Compare commits

..

1 Commits

Author SHA1 Message Date
066bcdcf98 Scaffolding LDAP MVP 2024-01-13 21:21:40 +07:00
4703 changed files with 206503 additions and 362302 deletions

View File

@ -8,17 +8,19 @@ ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
# Postgres creds
POSTGRES_PASSWORD=infisical
POSTGRES_USER=infisical
POSTGRES_DB=infisical
# MongoDB
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
# to the MongoDB container instance or Mongo Cloud
# Required
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
# Redis
REDIS_URL=redis://redis:6379
# Optional credentials for MongoDB container instance and Mongo-Express
MONGO_USERNAME=root
MONGO_PASSWORD=example
# Website URL
# Required
SITE_URL=http://localhost:8080
@ -26,8 +28,7 @@ SITE_URL=http://localhost:8080
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_FROM_ADDRESS=
SMTP_FROM_NAME=
SMTP_NAME=
SMTP_USERNAME=
SMTP_PASSWORD=
@ -37,22 +38,16 @@ CLIENT_ID_HEROKU=
CLIENT_ID_VERCEL=
CLIENT_ID_NETLIFY=
CLIENT_ID_GITHUB=
CLIENT_ID_GITHUB_APP=
CLIENT_SLUG_GITHUB_APP=
CLIENT_ID_GITLAB=
CLIENT_ID_BITBUCKET=
CLIENT_SECRET_HEROKU=
CLIENT_SECRET_VERCEL=
CLIENT_SECRET_NETLIFY=
CLIENT_SECRET_GITHUB=
CLIENT_SECRET_GITHUB_APP=
CLIENT_SECRET_GITLAB=
CLIENT_SECRET_BITBUCKET=
CLIENT_SLUG_VERCEL=
CLIENT_PRIVATE_KEY_GITHUB_APP=
CLIENT_APP_ID_GITHUB_APP=
# Sentry (optional) for monitoring errors
SENTRY_DSN=
@ -70,46 +65,3 @@ CLIENT_SECRET_GITHUB_LOGIN=
CLIENT_ID_GITLAB_LOGIN=
CLIENT_SECRET_GITLAB_LOGIN=
CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
OTEL_TELEMETRY_COLLECTION_ENABLED=false
OTEL_EXPORT_TYPE=prometheus
OTEL_EXPORT_OTLP_ENDPOINT=
OTEL_OTLP_PUSH_INTERVAL=
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
# App Connections
# aws assume-role connection
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
# github oauth connection
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
#github app connection
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=

View File

@ -1,2 +0,0 @@
DB_CONNECTION_URI=
AUDIT_LOGS_DB_CONNECTION_URI=

View File

@ -1,4 +0,0 @@
REDIS_URL=redis://localhost:6379
DB_CONNECTION_URI=postgres://infisical:infisical@localhost/infisical?sslmode=disable
AUTH_SECRET=4bnfe4e407b8921c104518903515b218
ENCRYPTION_KEY=4bnfe4e407b8921c104518903515b218

View File

@ -1,12 +1,11 @@
# Description 📣
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. Here's how we expect a pull request to be : https://infisical.com/docs/contributing/getting-started/pull-requests -->
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. -->
## Type ✨
- [ ] Bug fix
- [ ] New feature
- [ ] Improvement
- [ ] Breaking change
- [ ] Documentation
@ -20,6 +19,4 @@
---
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/getting-started/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/getting-started/code-of-conduct). 📝
<!-- If you have any questions regarding contribution, here's the FAQ : https://infisical.com/docs/contributing/getting-started/faq -->
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/code-of-conduct). 📝

View File

@ -1,190 +0,0 @@
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
import os
import requests
import re
from openai import OpenAI
import subprocess
from datetime import datetime
import uuid
# Constants
REPO_OWNER = "infisical"
REPO_NAME = "infisical"
TOKEN = os.environ["GITHUB_TOKEN"]
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
SLACK_MSG_COLOR = "#36a64f"
headers = {
"Authorization": f"Bearer {TOKEN}",
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
def set_multiline_output(name, value):
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
delimiter = uuid.uuid1()
print(f'{name}<<{delimiter}', file=fh)
print(value, file=fh)
print(delimiter, file=fh)
def post_changelog_to_slack(changelog, tag):
slack_payload = {
"text": "Hey team, it's changelog time! :wave:",
"attachments": [
{
"color": SLACK_MSG_COLOR,
"title": f"🗓Infisical Changelog - {tag}",
"text": changelog,
}
],
}
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
if response.status_code != 200:
raise Exception("Failed to post changelog to Slack.")
def find_previous_release_tag(release_tag:str):
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
while not(previous_tag.startswith("infisical/")):
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
return previous_tag
def get_tag_creation_date(tag_name):
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
response = requests.get(url, headers=headers)
response.raise_for_status()
commit_sha = response.json()['object']['sha']
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
commit_response = requests.get(commit_url, headers=headers)
commit_response.raise_for_status()
creation_date = commit_response.json()['commit']['author']['date']
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
# Use GitHub API to fetch PRs merged between the commits
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
response = requests.get(url, headers=headers)
if response.status_code != 200:
raise Exception("Error fetching PRs from GitHub API!")
prs = []
for pr in response.json():
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
prs.append(pr)
return prs
def extract_commit_details_from_prs(prs):
commit_details = []
for pr in prs:
commit_message = pr["title"]
commit_url = pr["html_url"]
pr_number = pr["number"]
branch_name = pr["head"]["ref"]
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
# If no issue numbers are found, add the PR details without issue numbers and URLs
if not issue_numbers:
commit_details.append(
{
"message": commit_message,
"pr_number": pr_number,
"pr_url": commit_url,
"issue_number": None,
"issue_url": None,
}
)
continue
for issue in issue_numbers:
commit_details.append(
{
"message": commit_message,
"pr_number": pr_number,
"pr_url": commit_url,
"issue_number": issue,
}
)
return commit_details
# Function to generate changelog using OpenAI
def generate_changelog_with_openai(commit_details):
commit_messages = []
for details in commit_details:
base_message = f"{details['pr_url']} - {details['message']}"
# Add the issue URL if available
# if details["issue_url"]:
# base_message += f" (Linear Issue: {details['issue_url']})"
commit_messages.append(base_message)
commit_list = "\n".join(commit_messages)
prompt = """
Generate a changelog for Infisical, opensource secretops
The changelog should:
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
6. Linear Links: note that the Linear link is optional, include it only if provided.
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
And here's an example of the full changelog:
*Features*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
*Fixes & Improvements*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
*Technical Updates*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
Stay tuned for more exciting updates coming soon!
And here are the commits:
{}
""".format(
commit_list
)
client = OpenAI(api_key=OPENAI_API_KEY)
messages = [{"role": "user", "content": prompt}]
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
if "error" in response.choices[0].message:
raise Exception("Error generating changelog with OpenAI!")
return response.choices[0].message.content.strip()
if __name__ == "__main__":
try:
# Get the latest and previous release tags
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
previous_tag = find_previous_release_tag(latest_tag)
latest_tag_date = get_tag_creation_date(latest_tag)
previous_tag_date = get_tag_creation_date(previous_tag)
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
pr_details = extract_commit_details_from_prs(prs)
# Generate changelog
changelog = generate_changelog_with_openai(pr_details)
post_changelog_to_slack(changelog,latest_tag)
# Print or post changelog to Slack
# set_multiline_output("changelog", changelog)
except Exception as e:
print(str(e))

View File

@ -1,26 +0,0 @@
import os
from datetime import datetime, timedelta
def rename_migrations():
migration_folder = "./backend/src/db/migrations"
with open("added_files.txt", "r") as file:
changed_files = file.readlines()
# Find the latest file among the changed files
latest_timestamp = datetime.now() # utc time
for file_path in changed_files:
file_path = file_path.strip()
# each new file bump by 1s
latest_timestamp = latest_timestamp + timedelta(seconds=1)
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
old_filename = os.path.join(migration_folder, file_path)
os.rename(old_filename, new_filename)
print(f"Renamed {old_filename} to {new_filename}")
if len(changed_files) == 0:
print("No new files added to migration folder")
if __name__ == "__main__":
rename_migrations()

82
.github/values.yaml vendored
View File

@ -1,57 +1,49 @@
## @section Common parameters
##
## @param nameOverride Override release name
##
nameOverride: ""
## @param fullnameOverride Override release fullname
##
fullnameOverride: ""
## @section Infisical backend parameters
## Documentation : https://infisical.com/docs/self-hosting/deployments/kubernetes
##
infisical:
autoDatabaseSchemaMigration: false
enabled: false
name: infisical
replicaCount: 3
backend:
enabled: true
name: backend
podAnnotations: {}
deploymentAnnotations:
secrets.infisical.com/auto-reload: "true"
replicaCount: 2
image:
repository: infisical/staging_infisical
tag: "latest"
pullPolicy: Always
kubeSecretRef: managed-backend-secret
service:
annotations: {}
type: ClusterIP
nodePort: ""
resources:
limits:
memory: 300Mi
deploymentAnnotations:
secrets.infisical.com/auto-reload: "true"
backendEnvironmentVariables: null
kubeSecretRef: "managed-secret"
## Mongo DB persistence
mongodb:
enabled: false
persistence:
enabled: false
## By default the backend will be connected to a Mongo instance within the cluster
## However, it is recommended to add a managed document DB connection string for production-use (DBaaS)
## Learn about connection string type here https://www.mongodb.com/docs/manual/reference/connection-string/
## e.g. "mongodb://<user>:<pass>@<host>:<port>/<database-name>"
mongodbConnection:
externalMongoDBConnectionString: ""
ingress:
## @param ingress.enabled Enable ingress
##
enabled: true
## @param ingress.ingressClassName Ingress class name
##
ingressClassName: nginx
## @param ingress.nginx.enabled Ingress controller
##
# nginx:
# enabled: true
## @param ingress.annotations Ingress annotations
##
annotations:
cert-manager.io/cluster-issuer: "letsencrypt-prod"
hostName: "gamma.infisical.com"
# annotations:
# kubernetes.io/ingress.class: "nginx"
# cert-manager.io/issuer: letsencrypt-nginx
hostName: gamma.infisical.com ## <- Replace with your own domain
tls:
- secretName: letsencrypt-prod
hosts:
- gamma.infisical.com
[]
# - secretName: letsencrypt-nginx
# hosts:
# - infisical.local
postgresql:
enabled: false
redis:
mailhog:
enabled: false

View File

@ -1,189 +0,0 @@
name: Build Binaries and Deploy
on:
workflow_dispatch:
inputs:
version:
description: "Version number"
required: true
type: string
defaults:
run:
working-directory: ./backend
jobs:
build-and-deploy:
strategy:
matrix:
arch: [x64, arm64]
os: [linux, win]
include:
- os: linux
target: node20-linux
- os: win
target: node20-win
runs-on: ${{ (matrix.arch == 'arm64' && matrix.os == 'linux') && 'ubuntu24-arm64' || 'ubuntu-latest' }}
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 20
- name: Install pkg
run: npm install -g @yao-pkg/pkg
- name: Install dependencies (backend)
run: npm install
- name: Install dependencies (frontend)
run: npm install --prefix ../frontend
- name: Prerequisites for pkg
run: npm run binary:build
- name: Package into node binary
run: |
if [ "${{ matrix.os }}" != "linux" ]; then
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
else
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
fi
# Set up .deb package structure (Debian/Ubuntu only)
- name: Set up .deb package structure
if: matrix.os == 'linux'
run: |
mkdir -p infisical-core/DEBIAN
mkdir -p infisical-core/usr/local/bin
cp ./binary/infisical-core infisical-core/usr/local/bin/
chmod +x infisical-core/usr/local/bin/infisical-core
- name: Create control file
if: matrix.os == 'linux'
run: |
cat <<EOF > infisical-core/DEBIAN/control
Package: infisical-core
Version: ${{ github.event.inputs.version }}
Section: base
Priority: optional
Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }}
Maintainer: Infisical <daniel@infisical.com>
Description: Infisical Core standalone executable (app.infisical.com)
EOF
# Build .deb file (Debian/Ubunutu only)
- name: Build .deb package
if: matrix.os == 'linux'
run: |
dpkg-deb --build infisical-core
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
### RPM
# Set up .rpm package structure
- name: Set up .rpm package structure
if: matrix.os == 'linux'
run: |
mkdir -p infisical-core-rpm/usr/local/bin
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
# Install RPM build tools
- name: Install RPM build tools
if: matrix.os == 'linux'
run: sudo apt-get update && sudo apt-get install -y rpm
# Create .spec file for RPM
- name: Create .spec file for RPM
if: matrix.os == 'linux'
run: |
cat <<EOF > infisical-core.spec
%global _enable_debug_package 0
%global debug_package %{nil}
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
Name: infisical-core
Version: ${{ github.event.inputs.version }}
Release: 1%{?dist}
Summary: Infisical Core standalone executable
License: Proprietary
URL: https://app.infisical.com
%description
Infisical Core standalone executable (app.infisical.com)
%install
mkdir -p %{buildroot}/usr/local/bin
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
%files
/usr/local/bin/infisical-core
%pre
%post
%preun
%postun
EOF
# Build .rpm file
- name: Build .rpm package
if: matrix.os == 'linux'
run: |
# Create necessary directories
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
# Copy the binary directly to SOURCES
cp ./binary/infisical-core rpmbuild/SOURCES/
# Run rpmbuild with verbose output
rpmbuild -vv -bb \
--define "_topdir $(pwd)/rpmbuild" \
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
infisical-core.spec
# Try to find the RPM file
find rpmbuild -name "*.rpm"
# Move the RPM file if found
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
else
echo "RPM file not found!"
exit 1
fi
- uses: actions/setup-python@v4
with:
python-version: "3.x" # Specify the Python version you need
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install --upgrade cloudsmith-cli
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
- name: Publish to Cloudsmith (Debian/Ubuntu)
if: matrix.os == 'linux'
working-directory: ./backend
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
- name: Publish .rpm to Cloudsmith
if: matrix.os == 'linux'
working-directory: ./backend
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
# Publish .exe file to Cloudsmith (Windows only)
- name: Publish to Cloudsmith (Windows)
if: matrix.os == 'win'
working-directory: ./backend
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -3,7 +3,6 @@ on:
push:
tags:
- "infisical/v*.*.*"
- "!infisical/v*.*.*-postgres"
jobs:
backend-image:
@ -41,7 +40,6 @@ jobs:
load: true
context: backend
tags: infisical/infisical:test
platforms: linux/amd64,linux/arm64
- name: ⏻ Spawn backend container and dependencies
run: |
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
@ -93,7 +91,6 @@ jobs:
project: 64mmf0n610
context: frontend
tags: infisical/frontend:test
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

View File

@ -1,38 +0,0 @@
name: Build patroni
on: [workflow_dispatch]
jobs:
patroni-image:
name: Build patroni
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
repository: 'zalando/patroni'
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile
tags: |
infisical/patroni:${{ steps.commit.outputs.short }}
infisical/patroni:latest
platforms: linux/amd64,linux/arm64

99
.github/workflows/build-staging-img.yml vendored Normal file
View File

@ -0,0 +1,99 @@
name: Build, Publish and Deploy to Gamma
on: [workflow_dispatch]
jobs:
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
# - name: 🧪 Run tests
# run: npm run test:ci
# working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
load: true
context: .
file: Dockerfile.standalone-infisical
tags: infisical/infisical:test
- name: ⏻ Spawn backend container and dependencies
run: |
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
- name: 🧪 Test backend image
run: |
./.github/resources/healthcheck.sh infisical-backend-test
- name: ⏻ Shut down backend container and dependencies
run: |
docker compose -f .github/resources/docker-compose.be-test.yml down
- name: 🏗️ Build backend and push
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install infisical helm chart
run: |
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
helm repo update
- name: Install kubectl
uses: azure/setup-kubectl@v3
- name: Install doctl
uses: digitalocean/action-doctl@v2
with:
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
- name: Save DigitalOcean kubeconfig with short-lived credentials
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 k8s-1-25-4-do-0-nyc1-1670645170179
- name: switch to gamma namespace
run: kubectl config set-context --current --namespace=gamma
- name: test kubectl
run: kubectl get ingress
- name: Download helm values to file and upgrade gamma deploy
run: |
wget https://raw.githubusercontent.com/Infisical/infisical/main/.github/values.yaml
helm upgrade infisical infisical-helm-charts/infisical --values values.yaml --wait --install
if [[ $(helm status infisical) == *"FAILED"* ]]; then
echo "Helm upgrade failed"
exit 1
else
echo "Helm upgrade was successful"
fi

View File

@ -1,77 +0,0 @@
name: "Check API For Breaking Changes"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/src/server/routes/**"
- "backend/src/ee/routes/**"
jobs:
check-be-api-changes:
name: Check API Changes
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout source
uses: actions/checkout@v3
# - name: Setup Node 20
# uses: actions/setup-node@v3
# with:
# node-version: "20"
# uncomment this when testing locally using nektos/act
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
with:
version: "2.14.2"
- name: 📦Build the latest image
run: docker build --tag infisical-api .
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
JWT_AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/tufin/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api

View File

@ -0,0 +1,43 @@
name: "Check Backend Pull Request"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
jobs:
check-be-pr:
name: Check
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "16"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: 📦 Install dependencies
run: npm ci --only-production
working-directory: backend
# - name: 🧪 Run tests
# run: npm run test:ci
# working-directory: backend
# - name: 📁 Upload test results
# uses: actions/upload-artifact@v3
# if: always()
# with:
# name: be-test-results
# path: |
# ./backend/reports
# ./backend/coverage
- name: 🏗️ Run build
run: npm run build
working-directory: backend

View File

@ -1,35 +0,0 @@
name: "Check Backend PR types and lint"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
jobs:
check-be-pr:
name: Check TS and Lint
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
uses: actions/setup-node@v3
with:
node-version: "20"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: Install dependencies
run: npm install
working-directory: backend
- name: Run type check
run: npm run type:check
working-directory: backend
- name: Run lint check
run: npm run lint
working-directory: backend

View File

@ -0,0 +1,36 @@
name: Check Frontend Pull Request
on:
pull_request:
types: [opened, synchronize]
paths:
- "frontend/**"
- "!frontend/README.md"
- "!frontend/.*"
- "frontend/.eslintrc.js"
jobs:
check-fe-pr:
name: Check
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "16"
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies
run: npm ci --only-production --ignore-scripts
working-directory: frontend
# -
# name: 🧪 Run tests
# run: npm run test:ci
# working-directory: frontend
- name: 🏗️ Run build
run: npm run build
working-directory: frontend

View File

@ -1,35 +0,0 @@
name: Check Frontend Type and Lint check
on:
pull_request:
types: [opened, synchronize]
paths:
- "frontend/**"
- "!frontend/README.md"
- "!frontend/.*"
- "frontend/.eslintrc.js"
jobs:
check-fe-ts-lint:
name: Check Frontend Type and Lint check
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
uses: actions/setup-node@v3
with:
node-version: "20"
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies
run: npm install
working-directory: frontend
- name: 🏗️ Run Type check
run: npm run type:check
working-directory: frontend
- name: 🏗️ Run Link check
run: npm run lint:fix
working-directory: frontend

View File

@ -1,25 +0,0 @@
name: Check migration file edited
on:
pull_request:
types: [opened, synchronize]
paths:
- 'backend/src/db/migrations/**'
jobs:
rename:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check any migration files are modified, renamed or duplicated.
run: |
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
if [ -s edited_files.txt ]; then
echo "Exiting migration files cannot be modified."
cat edited_files.txt
exit 1
fi

View File

@ -1,262 +0,0 @@
name: Deployment pipeline
on: [workflow_dispatch]
permissions:
id-token: write
contents: read
concurrency:
group: "infisical-core-deployment"
cancel-in-progress: true
jobs:
infisical-tests:
name: Integration tests
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
wait-for-service-stability: true
production-us:
name: US production deploy
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
name: Production
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
- name: Post slack message
uses: slackapi/slack-github-action@v2.0.0
with:
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
text: "*Deployment Status Update*: ${{ job.status }}"
blocks:
- type: "section"
text:
type: "mrkdwn"
text: "*Deployment Status Update*: ${{ job.status }}"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Application:*\nInfisical Core"
- type: "mrkdwn"
text: "*Instance Type:*\nShared Infisical Cloud"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Region:*\nUS"
- type: "mrkdwn"
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
production-eu:
name: EU production deploy
runs-on: ubuntu-latest
needs: [production-us]
environment:
name: production-eu
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: eu-central-1
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
- name: Post slack message
uses: slackapi/slack-github-action@v2.0.0
with:
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
text: "*Deployment Status Update*: ${{ job.status }}"
blocks:
- type: "section"
text:
type: "mrkdwn"
text: "*Deployment Status Update*: ${{ job.status }}"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Application:*\nInfisical Core"
- type: "mrkdwn"
text: "*Instance Type:*\nShared Infisical Cloud"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Region:*\nEU"
- type: "mrkdwn"
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"

View File

@ -1,34 +0,0 @@
name: Generate Changelog
permissions:
contents: write
on:
workflow_dispatch:
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
generate_changelog:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-tags: true
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12.0"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests openai
- name: Generate Changelog and Post to Slack
id: gen-changelog
run: python .github/resources/changelog-generator.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,22 +0,0 @@
name: Release Infisical Core Helm chart
on: [workflow_dispatch]
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -0,0 +1,22 @@
name: Release Helm Charts
on: [workflow_dispatch]
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-to-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,115 +0,0 @@
name: Release standalone docker image
on:
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest-postgres
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
infisical-fips-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical-fips:latest-postgres
infisical/infisical-fips:${{ steps.commit.outputs.short }}
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.fips.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

View File

@ -0,0 +1,78 @@
name: Release standalone docker image
on:
push:
tags:
- "infisical/v*.*.*"
jobs:
infisical-standalone:
name: Build infisical standalone image
runs-on: ubuntu-latest
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- uses: paulhatch/semantic-version@v5.0.2
id: version
with:
# The prefix to use to identify tags
tag_prefix: "infisical-standalone/v"
# A string which, if present in a git commit, indicates that a change represents a
# major (breaking) change, supports regular expressions wrapped with '/'
major_pattern: "(MAJOR)"
# Same as above except indicating a minor change, supports regular expressions wrapped with '/'
minor_pattern: "(MINOR)"
# A string to determine the format of the version output
version_format: "${major}.${minor}.${patch}-prerelease${increment}"
# Optional path to check for changes. If any changes are detected in the path the
# 'changed' output will true. Enter multiple paths separated by spaces.
change_path: "backend,frontend"
# Prevents pre-v1.0.0 version from automatically incrementing the major version.
# If enabled, when the major version is 0, major releases will be treated as minor and minor as patch. Note that the version_type output is unchanged.
enable_prerelease_mode: true
# - name: 🧪 Run tests
# run: npm run test:ci
# working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

58
.github/workflows/release_build.yml vendored Normal file
View File

@ -0,0 +1,58 @@
name: Build and release CLI
on:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
# packages: write
# issues: write
jobs:
goreleaser:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,131 +0,0 @@
name: Build and release CLI
on:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-20.04
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-20.04
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,4 +1,4 @@
name: Release image + Helm chart K8s Operator
name: Release Docker image for K8 operator
on:
push:
tags:
@ -35,18 +35,3 @@ jobs:
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,47 +0,0 @@
name: "Run backend tests"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
workflow_call:
jobs:
check-be-pr:
name: Run integration test
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
with:
version: "2.14.2"
- name: 🔧 Setup Node 20
uses: actions/setup-node@v3
with:
node-version: "20"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: Install dependencies
run: npm install
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
run: npm run test:e2e
working-directory: backend
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down

View File

@ -1,55 +0,0 @@
name: Go CLI Tests
on:
pull_request:
types: [opened, synchronize]
paths:
- "cli/**"
workflow_dispatch:
workflow_call:
secrets:
CLI_TESTS_UA_CLIENT_ID:
required: true
CLI_TESTS_UA_CLIENT_SECRET:
required: true
CLI_TESTS_SERVICE_TOKEN:
required: true
CLI_TESTS_PROJECT_ID:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
run:
working-directory: ./cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.x"
- name: Install dependencies
run: go get .
- name: Test with the Go CLI
env:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

15
.gitignore vendored
View File

@ -1,12 +1,11 @@
# backend
node_modules
.env
.env.test
.env.dev
.env.gamma
.env.prod
.env.infisical
.env.migration
*~
*.swp
*.swo
@ -59,17 +58,7 @@ yarn-error.log*
# Infisical init
.infisical.json
.infisicalignore
# Editor specific
.vscode/*
.idea/*
frontend-build
*.tgz
cli/infisical-merge
cli/test/infisical-merge
/backend/binary
/npm/bin
frontend-build

View File

@ -190,34 +190,10 @@ dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:latest-amd64"
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- "infisical/cli:latest-arm64"
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
docker_manifests:
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- name_template: "infisical/cli:latest"
image_templates:
- "infisical/cli:latest-amd64"
- "infisical/cli:latest-arm64"
- "infisical/cli:{{ .Version }}"
- "infisical/cli:{{ .Major }}.{{ .Minor }}"
- "infisical/cli:{{ .Major }}"
- "infisical/cli:latest"

View File

@ -1,12 +1,6 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# Check if infisical is installed
if ! command -v infisical >/dev/null 2>&1; then
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
exit 1
fi
npx lint-staged
infisical scan git-changes --staged -v

View File

@ -1,10 +1 @@
.github/resources/docker-compose.be-test.yml:generic-api-key:16
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104

View File

@ -2,6 +2,6 @@
Thanks for taking the time to contribute! 😃 🚀
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/getting-started/overview) for instructions on how to contribute.
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/overview) for instructions on how to contribute.
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀

View File

@ -1,181 +0,0 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-slim AS base
FROM base AS frontend-dependencies
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
# Rebuild the source code only when needed
FROM base AS frontend-builder
WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
COPY /frontend .
ENV NODE_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
# Production image
FROM base AS frontend-runner
WORKDIR /app
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
USER non-root-user
##
## BACKEND
##
FROM base AS backend-build
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
WORKDIR /app
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
RUN npm run build
# Production stage
FROM base AS backend-runner
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /app
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
COPY --from=backend-build /app .
RUN mkdir frontend-build
# Production stage
FROM base AS production
# Install necessary packages including ODBC
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
git \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
RUN chmod -R u+rwx /etc/ssl/certs
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
RUN chown non-root-user /usr/sbin/update-ca-certificates
RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /backend
ENV TELEMETRY_ENABLED true
EXPOSE 8080
EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]

View File

@ -1,9 +1,8 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-alpine AS base
FROM node:16-alpine AS base
FROM base AS frontend-dependencies
@ -12,7 +11,7 @@ RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -27,16 +26,15 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
# Build
RUN npm run build
@ -48,10 +46,19 @@ WORKDIR /app
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
@ -61,23 +68,11 @@ RUN addgroup --system --gid 1001 nodejs \
WORKDIR /app
# Install all required dependencies for build
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
COPY backend/package*.json ./
RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
RUN npm run build
# Production stage
@ -85,20 +80,6 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@ -108,71 +89,40 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
bash \
curl \
git \
openssh
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
RUN chmod -R u+rwx /etc/ssl/certs
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
RUN chown non-root-user /usr/sbin/update-ca-certificates
RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
WORKDIR /backend
ENV TELEMETRY_ENABLED true
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
EXPOSE 8080
EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]

View File

@ -5,31 +5,13 @@ push:
docker-compose -f docker-compose.yml push
up-dev:
docker compose -f docker-compose.dev.yml up --build
docker-compose -f docker-compose.dev.yml up --build
up-dev-ldap:
docker compose -f docker-compose.dev.yml --profile ldap up --build
up-dev-metrics:
docker compose -f docker-compose.dev.yml --profile metrics up --build
i-dev:
infisical run -- docker-compose -f docker-compose.dev.yml up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build
docker-compose -f docker-compose.yml up --build
down:
docker compose -f docker-compose.dev.yml down
reviewable-ui:
cd frontend && \
npm run lint:fix && \
npm run type:check
reviewable-api:
cd backend && \
npm run lint:fix && \
npm run type:check
reviewable: reviewable-ui reviewable-api
up-dev-sso:
docker compose -f docker-compose.dev.yml --profile sso up --build
docker-compose down

117
README.md

File diff suppressed because one or more lines are too long

View File

@ -1,3 +1,2 @@
vitest-environment-infisical.ts
vitest.config.ts
vitest.e2e.config.ts
node_modules
built

41
backend/.eslintrc Normal file
View File

@ -0,0 +1,41 @@
{
"parser": "@typescript-eslint/parser",
"plugins": [
"@typescript-eslint",
"unused-imports"
],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended"
],
"rules": {
"no-empty-function": "off",
"@typescript-eslint/no-empty-function": "off",
"no-console": 2,
"quotes": [
"error",
"double",
{
"avoidEscape": true
}
],
"comma-dangle": [
"error",
"only-multiline"
],
"@typescript-eslint/no-unused-vars": "off",
"unused-imports/no-unused-imports": "error",
"@typescript-eslint/no-extra-semi": "off", // added to be able to push
"unused-imports/no-unused-vars": [
"warn",
{
"vars": "all",
"varsIgnorePattern": "^_",
"args": "after-used",
"argsIgnorePattern": "^_"
}
],
"sort-imports": 1
}
}

View File

@ -1,74 +0,0 @@
/* eslint-env node */
module.exports = {
env: {
es6: true,
node: true
},
extends: [
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-type-checked",
"airbnb-base",
"airbnb-typescript/base",
"plugin:prettier/recommended",
"prettier"
],
plugins: ["@typescript-eslint", "simple-import-sort", "import"],
parser: "@typescript-eslint/parser",
parserOptions: {
project: true,
sourceType: "module",
tsconfigRootDir: __dirname
},
root: true,
overrides: [
{
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
rules: {
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unsafe-call": "off"
}
}
],
rules: {
"@typescript-eslint/no-empty-function": "off",
"@typescript-eslint/no-unsafe-enum-comparison": "off",
"no-void": "off",
"consistent-return": "off", // my style
"import/order": "off", // for simple-import-order
"import/prefer-default-export": "off", // why
"no-restricted-syntax": "off",
// importing rules
"simple-import-sort/exports": "error",
"import/first": "error",
"import/newline-after-import": "error",
"import/no-duplicates": "error",
"simple-import-sort/imports": [
"warn",
{
groups: [
// Side effect imports.
["^\\u0000"],
// Node.js builtins prefixed with `node:`.
["^node:"],
// Packages.
// Things that start with a letter (or digit or underscore), or `@` followed by a letter.
["^@?\\w"],
["^@app"],
["@lib"],
["@server"],
// Absolute imports and other imports such as Vue-style `@/foo`.
// Anything not matched in another group.
["^"],
// Relative imports.
// Anything that starts with a dot.
["^\\."]
]
}
]
}
};

1
backend/.gitignore vendored
View File

@ -1 +0,0 @@
dist

7
backend/.prettierrc Normal file
View File

@ -0,0 +1,7 @@
{
"singleQuote": false,
"printWidth": 100,
"trailingComma": "none",
"tabWidth": 2,
"semi": true
}

View File

@ -1,7 +0,0 @@
{
"singleQuote": false,
"printWidth": 120,
"trailingComma": "none",
"tabWidth": 2,
"semi": true
}

View File

@ -1,24 +1,8 @@
# Build stage
FROM node:20-alpine AS build
FROM node:16-alpine AS build
WORKDIR /app
# Required for pkcs11js
RUN apk --update add \
python3 \
make \
g++ \
openssh
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
COPY package*.json ./
RUN npm ci --only-production
@ -26,29 +10,13 @@ COPY . .
RUN npm run build
# Production stage
FROM node:20-alpine
FROM node:16-alpine
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN apk --update add \
python3 \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .
@ -60,8 +28,6 @@ RUN apk add --no-cache bash curl && curl -1sLf \
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
ENV HOST=0.0.0.0
EXPOSE 4000
CMD ["npm", "start"]
CMD ["node", "build/index.js"]

View File

@ -1,70 +0,0 @@
FROM node:20-alpine
# ? Setup a test SoftHSM module. In production a real HSM is used.
ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apk --update add \
alpine-sdk \
autoconf \
automake \
git \
libtool \
openssl-dev \
python3 \
make \
g++ \
openssh
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
&& sh autogen.sh \
&& ./configure --prefix=/usr/local --disable-gost \
&& make \
&& make install
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# install pkcs11-tool
RUN apk --update add opensc
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm install
COPY . .
ENV HOST=0.0.0.0
CMD ["npm", "run", "dev:docker"]

View File

@ -1,4 +0,0 @@
{
"presets": ["@babel/preset-env", "@babel/preset-react"],
"plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"]
}

View File

@ -1,37 +0,0 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { Lock } from "@app/lib/red-lock";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
return {
setItem: async (key, value) => {
store[key] = value;
return "OK";
},
setItemWithExpiry: async (key, value) => {
store[key] = value;
return "OK";
},
deleteItem: async (key) => {
delete store[key];
return 1;
},
getItem: async (key) => {
const value = store[key];
if (typeof value === "string") {
return value;
}
return null;
},
incrementBy: async () => {
return 1;
},
acquireLock: () => {
return Promise.resolve({
release: () => {}
}) as Promise<Lock>;
},
waitTillReady: async () => {}
};
};

View File

@ -1,31 +0,0 @@
import { TQueueServiceFactory } from "@app/queue";
export const mockQueue = (): TQueueServiceFactory => {
const queues: Record<string, unknown> = {};
const workers: Record<string, unknown> = {};
const job: Record<string, unknown> = {};
const events: Record<string, unknown> = {};
return {
queue: async (name, jobData) => {
job[name] = jobData;
},
queuePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
start: (name, jobFn) => {
queues[name] = jobFn;
workers[name] = jobFn;
},
startPg: async () => {},
listen: (name, event) => {
events[name] = event;
},
getRepeatableJobs: async () => [],
clearQueue: async () => {},
stopJobById: async () => {},
stopRepeatableJobByJobId: async () => true,
stopRepeatableJobByKey: async () => true
};
};

View File

@ -1,13 +0,0 @@
import { TSmtpSendMail, TSmtpService } from "@app/services/smtp/smtp-service";
export const mockSmtpServer = (): TSmtpService => {
const storage: TSmtpSendMail[] = [];
return {
sendMail: async (data) => {
storage.push(data);
},
verify: async () => {
return true;
}
};
};

View File

@ -1,71 +0,0 @@
import { OrgMembershipRole } from "@app/db/schemas";
import { seedData1 } from "@app/db/seed-data";
export const createIdentity = async (name: string, role: string) => {
const createIdentityRes = await testServer.inject({
method: "POST",
url: "/api/v1/identities",
body: {
name,
role,
organizationId: seedData1.organization.id
},
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(createIdentityRes.statusCode).toBe(200);
return createIdentityRes.json().identity;
};
export const deleteIdentity = async (id: string) => {
const deleteIdentityRes = await testServer.inject({
method: "DELETE",
url: `/api/v1/identities/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(deleteIdentityRes.statusCode).toBe(200);
return deleteIdentityRes.json().identity;
};
describe("Identity v1", async () => {
test("Create identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethods).toEqual([]);
await deleteIdentity(newIdentity.id);
});
test("Update identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethods).toEqual([]);
const updatedIdentity = await testServer.inject({
method: "PATCH",
url: `/api/v1/identities/${newIdentity.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: "updated-mac-1",
role: OrgMembershipRole.Member
}
});
expect(updatedIdentity.statusCode).toBe(200);
expect(updatedIdentity.json().identity.name).toBe("updated-mac-1");
await deleteIdentity(newIdentity.id);
});
test("Delete Identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
const deletedIdentity = await deleteIdentity(newIdentity.id);
expect(deletedIdentity.name).toBe("mac1");
});
});

View File

@ -1,44 +0,0 @@
import jsrp from "jsrp";
import { seedData1 } from "@app/db/seed-data";
describe("Login V1 Router", async () => {
// eslint-disable-next-line
const client = new jsrp.client();
await new Promise((resolve) => {
client.init({ username: seedData1.email, password: seedData1.password }, () => resolve(null));
});
let clientProof: string;
test("Login first phase", async () => {
const res = await testServer.inject({
method: "POST",
url: "/api/v3/auth/login1",
body: {
email: "test@localhost.local",
clientPublicKey: client.getPublicKey()
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("serverPublicKey");
expect(payload).toHaveProperty("salt");
client.setSalt(payload.salt);
client.setServerPublicKey(payload.serverPublicKey);
clientProof = client.getProof(); // called M1
});
test("Login second phase", async () => {
const res = await testServer.inject({
method: "POST",
url: "/api/v3/auth/login2",
body: {
email: seedData1.email,
clientProof
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("token");
});
});

View File

@ -1,19 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
describe("Org V1 Router", async () => {
test("GET Org list", async () => {
const res = await testServer.inject({
method: "GET",
url: "/api/v1/organization",
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("organizations");
expect(payload).toEqual({
organizations: [expect.objectContaining({ name: seedData1.organization.name })]
});
});
});

View File

@ -1,132 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
import { DEFAULT_PROJECT_ENVS } from "@app/db/seeds/3-project";
const createProjectEnvironment = async (name: string, slug: string) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name,
slug
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
return payload.environment;
};
const deleteProjectEnvironment = async (envId: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${envId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
return payload.environment;
};
describe("Project Environment Router", async () => {
test("Get default environments", async () => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/workspace/${seedData1.project.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("workspace");
// check for default environments
expect(payload).toEqual({
workspace: expect.objectContaining({
name: seedData1.project.name,
id: seedData1.project.id,
slug: seedData1.project.slug,
environments: expect.arrayContaining([
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
])
})
});
// ensure only two default environments exist
expect(payload.workspace.environments.length).toBe(3);
});
const mockProjectEnv = { name: "temp", slug: "temp" }; // id will be filled in create op
test("Create environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
expect(newEnvironment).toEqual(
expect.objectContaining({
id: expect.any(String),
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
projectId: seedData1.project.id,
position: DEFAULT_PROJECT_ENVS.length + 1,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})
);
await deleteProjectEnvironment(newEnvironment.id);
});
test("Update environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
const updatedName = { name: "temp#2", slug: "temp2" };
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${newEnvironment.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: updatedName.name,
slug: updatedName.slug,
position: 1
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: newEnvironment.id,
name: updatedName.name,
slug: updatedName.slug,
projectId: seedData1.project.id,
position: 1,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})
);
await deleteProjectEnvironment(newEnvironment.id);
});
test("Delete environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
const deletedProjectEnvironment = await deleteProjectEnvironment(newEnvironment.id);
expect(deletedProjectEnvironment).toEqual(
expect.objectContaining({
id: deletedProjectEnvironment.id,
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
position: 5,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})
);
});
});

View File

@ -1,36 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-approvals`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: dto.name,
secretPath: dto.secretPath,
approvers: dto.approvers,
approvals: dto.approvals
}
});
expect(res.statusCode).toBe(200);
return res.json().approval;
};
describe("Secret approval policy router", async () => {
test("Create policy", async () => {
const policy = await createPolicy({
secretPath: "/",
approvals: 1,
approvers: [{id:seedData1.id, type: ApproverType.User}],
name: "test-policy"
});
expect(policy.name).toBe("test-policy");
});
});

View File

@ -1,165 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
const createFolder = async (dto: { path: string; name: string }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: dto.name,
path: dto.path
}
});
expect(res.statusCode).toBe(200);
return res.json().folder;
};
const deleteFolder = async (dto: { path: string; id: string }) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${dto.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: dto.path
}
});
expect(res.statusCode).toBe(200);
return res.json().folder;
};
describe("Secret Folder Router", async () => {
test.each([
{ name: "folder1", path: "/" }, // one in root
{ name: "folder1", path: "/level1/level2" }, // then create a deep one creating intermediate ones
{ name: "folder2", path: "/" },
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
])("Create folder $name in $path", async ({ name, path }) => {
const createdFolder = await createFolder({ path, name });
// check for default environments
expect(createdFolder).toEqual(
expect.objectContaining({
name,
id: expect.any(String)
})
);
await deleteFolder({ path, id: createdFolder.id });
});
test.each([
{
path: "/",
expected: {
folders: [{ name: "folder1" }, { name: "level1" }, { name: "folder2" }],
length: 3
}
},
{ path: "/level1/level2", expected: { folders: [{ name: "folder1" }], length: 1 } }
])("Get folders $path", async ({ path, expected }) => {
const newFolders = await Promise.all(expected.folders.map(({ name }) => createFolder({ name, path })));
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folders");
expect(payload.folders.length >= expected.folders.length).toBeTruthy();
expect(payload).toEqual({
folders: expect.arrayContaining(expected.folders.map((el) => expect.objectContaining(el)))
});
await Promise.all(newFolders.map(({ id }) => deleteFolder({ path, id })));
});
test("Update a deep folder", async () => {
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
expect(newFolder).toEqual(
expect.objectContaining({
id: expect.any(String),
name: "folder-updated"
})
);
const resUpdatedFolders = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/level1/level2"
}
});
expect(resUpdatedFolders.statusCode).toBe(200);
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
expect(updatedFolderList).toHaveProperty("folders");
expect(updatedFolderList.folders[0].name).toEqual("folder-updated");
await deleteFolder({ path: "/level1/level2", id: newFolder.id });
});
test("Delete a deep folder", async () => {
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${newFolder.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/level1/level2"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folder");
expect(payload.folder).toEqual(
expect.objectContaining({
id: expect.any(String),
name: "folder-updated"
})
);
const resUpdatedFolders = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/level1/level2"
}
});
expect(resUpdatedFolders.statusCode).toBe(200);
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
expect(updatedFolderList).toHaveProperty("folders");
expect(updatedFolderList.folders.length).toEqual(0);
});
});

View File

@ -1,808 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "prod", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
const payload = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath,
importEnv
});
expect(payload).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: importEnv,
id: expect.any(String)
})
})
);
await deleteSecretImport({
id: payload.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
});
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
expect(payload.secretImports.length).toBe(2);
expect(payload.secretImports).toEqual(
expect.arrayContaining([
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "prod",
id: expect.any(String)
})
}),
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "staging",
id: expect.any(String)
})
})
])
);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
});
test("Update secret import position", async () => {
const prodImportDetails = { path: "/", envSlug: "prod" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: prodImportDetails.path,
importEnv: prodImportDetails.envSlug
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: stagingImportDetails.path,
importEnv: stagingImportDetails.envSlug
});
const updateImportRes = await testServer.inject({
method: "PATCH",
url: `/api/v1/secret-imports/${createdImport1.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
position: 2
}
}
});
expect(updateImportRes.statusCode).toBe(200);
const payload = JSON.parse(updateImportRes.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
expect(payload.secretImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
position: 2,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.stringMatching(prodImportDetails.envSlug),
id: expect.any(String)
})
})
);
const secretImportsListRes = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(secretImportsListRes.statusCode).toBe(200);
const secretImportList = JSON.parse(secretImportsListRes.payload);
expect(secretImportList).toHaveProperty("secretImports");
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const deletedImport = await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
// check for default environments
expect(deletedImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "prod",
id: expect.any(String)
})
})
);
const secretImportsListRes = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(secretImportsListRes.statusCode).toBe(200);
const secretImportList = JSON.parse(secretImportsListRes.payload);
expect(secretImportList).toHaveProperty("secretImports");
expect(secretImportList.secretImports.length).toEqual(1);
expect(secretImportList.secretImports[0].position).toEqual(1);
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
});
});
// dev <- stage <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import waterfall pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import multiple destination to one source pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev -> stage, prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import one source to multiple destination pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const stageImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
const prodImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
return async () => {
await deleteSecretImport({
id: prodImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: stageImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const stagingSecret = await getSecretByNameV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
expect(stagingSecret.secretValue).toBe("stage-value");
const prodSecret = await getSecretByNameV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(prodSecret.secretKey).toBe("PROD_KEY");
expect(prodSecret.secretValue).toBe("prod-value");
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);

View File

@ -1,406 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
// dev <- stage <- prod
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
"Secret replication waterfall pattern testing - %secretPath",
({ secretPath: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret replication 1-N pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);

View File

@ -1,9 +0,0 @@
describe("Status V1 Router", async () => {
test("Simple check", async () => {
const res = await testServer.inject({
method: "GET",
url: "/api/status"
});
expect(res.statusCode).toBe(200);
});
});

View File

@ -1,579 +0,0 @@
import crypto from "node:crypto";
import { SecretType, TSecrets } from "@app/db/schemas";
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
const createServiceToken = async (
scopes: { environment: string; secretPath: string }[],
permissions: ("read" | "write")[]
) => {
const projectKeyRes = await testServer.inject({
method: "GET",
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
const projectKeyEnc = JSON.parse(projectKeyRes.payload);
const userInfoRes = await testServer.inject({
method: "GET",
url: "/api/v2/users/me",
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
const { user: userInfo } = JSON.parse(userInfoRes.payload);
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
const projectKey = decryptAsymmetric({
ciphertext: projectKeyEnc.encryptedKey,
nonce: projectKeyEnc.nonce,
publicKey: projectKeyEnc.sender.publicKey,
privateKey
});
const randomBytes = crypto.randomBytes(16).toString("hex");
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
const serviceTokenRes = await testServer.inject({
method: "POST",
url: "/api/v2/service-token",
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: "test-token",
workspaceId: seedData1.project.id,
scopes,
encryptedKey: ciphertext,
iv,
tag,
permissions,
expiresIn: null
}
});
expect(serviceTokenRes.statusCode).toBe(200);
const serviceTokenInfo = serviceTokenRes.json();
expect(serviceTokenInfo).toHaveProperty("serviceToken");
expect(serviceTokenInfo).toHaveProperty("serviceTokenData");
return `${serviceTokenInfo.serviceToken}.${randomBytes}`;
};
const deleteServiceToken = async () => {
const serviceTokenListRes = await testServer.inject({
method: "GET",
url: `/api/v1/workspace/${seedData1.project.id}/service-token-data`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(serviceTokenListRes.statusCode).toBe(200);
const serviceTokens = JSON.parse(serviceTokenListRes.payload).serviceTokenData as { name: string; id: string }[];
expect(serviceTokens.length).toBeGreaterThan(0);
const serviceTokenInfo = serviceTokens.find(({ name }) => name === "test-token");
expect(serviceTokenInfo).toBeDefined();
const deleteTokenRes = await testServer.inject({
method: "DELETE",
url: `/api/v2/service-token/${serviceTokenInfo?.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(deleteTokenRes.statusCode).toBe(200);
};
const createSecret = async (dto: {
projectKey: string;
path: string;
key: string;
value: string;
comment: string;
type?: SecretType;
token: string;
}) => {
const createSecretReqBody = {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: dto.type || SecretType.Shared,
secretPath: dto.path,
...encryptSecret(dto.projectKey, dto.key, dto.value, dto.comment)
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/${dto.key}`,
headers: {
authorization: `Bearer ${dto.token}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret;
};
const deleteSecret = async (dto: { path: string; key: string; token: string }) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/${dto.key}`,
headers: {
authorization: `Bearer ${dto.token}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: dto.path
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret;
};
describe("Service token secret ops", async () => {
let serviceToken = "";
let projectKey = "";
let folderId = "";
beforeAll(async () => {
serviceToken = await createServiceToken(
[{ secretPath: "/**", environment: seedData1.environment.slug }],
["read", "write"]
);
// this is ensure cli service token decryptiong working fine
const serviceTokenInfoRes = await testServer.inject({
method: "GET",
url: "/api/v2/service-token",
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(serviceTokenInfoRes.statusCode).toBe(200);
const serviceTokenInfo = serviceTokenInfoRes.json();
const serviceTokenParts = serviceToken.split(".");
projectKey = decryptSymmetric128BitHexKeyUTF8({
key: serviceTokenParts[3],
tag: serviceTokenInfo.tag,
ciphertext: serviceTokenInfo.encryptedKey,
iv: serviceTokenInfo.iv
});
// create a deep folder
const folderCreate = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: "folder",
path: "/nested1/nested2"
}
});
expect(folderCreate.statusCode).toBe(200);
folderId = folderCreate.json().folder.id;
});
afterAll(async () => {
await deleteServiceToken();
// create a deep folder
const deleteFolder = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${folderId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/nested1/nested2"
}
});
expect(deleteFolder.statusCode).toBe(200);
});
const testSecrets = [
{
path: "/",
secret: {
key: "ST-SEC",
value: "something-secret",
comment: "some comment"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "NESTED-ST-SEC",
value: "something-secret",
comment: "some comment"
}
}
];
const getSecrets = async (environment: string, secretPath = "/") => {
const res = await testServer.inject({
method: "GET",
url: `/api/v3/secrets`,
headers: {
authorization: `Bearer ${serviceToken}`
},
query: {
secretPath,
environment,
workspaceId: seedData1.project.id
}
});
const secrets: TSecrets[] = JSON.parse(res.payload).secrets || [];
return secrets.map((el) => ({ ...decryptSecret(projectKey, el), type: el.type }));
};
test.each(testSecrets)("Create secret in path $path", async ({ secret, path }) => {
const createdSecret = await createSecret({ projectKey, path, ...secret, token: serviceToken });
const decryptedSecret = decryptSecret(projectKey, createdSecret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual(secret.value);
expect(decryptedSecret.comment).toEqual(secret.comment);
expect(decryptedSecret.version).toEqual(1);
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: secret.value,
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Get secret by name in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const getSecByNameRes = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/${secret.key}`,
headers: {
authorization: `Bearer ${serviceToken}`
},
query: {
secretPath: path,
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug
}
});
expect(getSecByNameRes.statusCode).toBe(200);
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
expect(getSecretByNamePayload).toHaveProperty("secret");
const decryptedSecret = decryptSecret(projectKey, getSecretByNamePayload.secret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual(secret.value);
expect(decryptedSecret.comment).toEqual(secret.comment);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Update secret in path $path", async ({ path, secret }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const updateSecretReqBody = {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: SecretType.Shared,
secretPath: path,
...encryptSecret(projectKey, secret.key, "new-value", secret.comment)
};
const updateSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/${secret.key}`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
const decryptedSecret = decryptSecret(projectKey, updatedSecretPayload.secret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual("new-value");
expect(decryptedSecret.comment).toEqual(secret.comment);
// list secret should have updated value
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: "new-value",
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Delete secret in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const deletedSecret = await deleteSecret({ path, key: secret.key, token: serviceToken });
const decryptedSecret = decryptSecret(projectKey, deletedSecret);
expect(decryptedSecret.key).toEqual(secret.key);
// shared secret deletion should delete personal ones also
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining([
expect.objectContaining({
key: secret.key,
type: SecretType.Shared
})
])
);
});
test.each(testSecrets)("Bulk create secrets in path $path", async ({ secret, path }) => {
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
}))
}
});
expect(createSharedSecRes.statusCode).toBe(200);
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
expect(createSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.key}-${i + 1}`,
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
deleteSecret({ path, token: serviceToken, key: `BULK-${secret.key}-${i + 1}` })
)
);
});
test.each(testSecrets)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, ...secret, key: `BULK-${secret.key}-1`, path, token: serviceToken });
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
}))
}
});
expect(createSharedSecRes.statusCode).toBe(400);
await deleteSecret({ path, key: `BULK-${secret.key}-1`, token: serviceToken });
});
test.each(testSecrets)("Bulk update secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
)
);
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, "update-value", secret.comment)
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.key}-${i + 1}`,
value: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}`, token: serviceToken })
)
);
});
test.each(testSecrets)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
)
);
const deletedSharedSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`
}))
}
});
expect(deletedSharedSecRes.statusCode).toBe(200);
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
expect(deletedSecretPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.value}-${i + 1}`,
type: SecretType.Shared
})
)
)
);
});
});
describe("Service token fail cases", async () => {
test("Unauthorized secret path access", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read", "write"]
);
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: "/nested/deep"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(403);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
test("Unauthorized secret environment access", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read", "write"]
);
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: "prod",
secretPath: "/"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(403);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
test("Unauthorized write operation", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read"]
);
const writeSecrets = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/NEW`,
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: SecretType.Shared,
secretPath: "/",
// doesn't matter project key because this will fail before that due to read only access
...encryptSecret(crypto.randomBytes(16).toString("hex"), "NEW", "value", "")
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(writeSecrets.statusCode).toBe(403);
expect(writeSecrets.json().error).toBe("PermissionDenied");
// but read access should still work fine
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: "/"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(200);
await deleteServiceToken();
});
});

View File

@ -1,86 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret Recursive Testing", async () => {
const projectId = seedData1.projectV3.id;
const folderAndSecretNames = [
{ name: "deep1", path: "/", expectedSecretCount: 4 },
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
];
beforeAll(async () => {
const rootFolderIds: string[] = [];
for (const folder of folderAndSecretNames) {
// eslint-disable-next-line no-await-in-loop
const createdFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: folder.path,
name: folder.name
});
if (folder.path === "/") {
rootFolderIds.push(createdFolder.id);
}
// eslint-disable-next-line no-await-in-loop
await createSecretV2({
secretPath: folder.path,
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
key: folder.name,
value: folder.name
});
}
return async () => {
await Promise.all(
rootFolderIds.map((id) =>
deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id,
workspaceId: projectId,
environmentSlug: "prod"
})
)
);
await deleteSecretV2({
authToken: jwtAuthToken,
secretPath: "/",
workspaceId: projectId,
environmentSlug: "prod",
key: folderAndSecretNames[0].name
});
};
});
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
const secrets = await getSecretsV2({
authToken: jwtAuthToken,
secretPath: path,
workspaceId: projectId,
environmentSlug: "prod",
recursive: true
});
expect(secrets.secrets.length).toEqual(expectedSecretCount);
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
folderAndSecretNames
.filter((el) => el.path.startsWith(path))
.sort((a, b) => a.name.localeCompare(b.name))
.map((el) =>
expect.objectContaining({
secretKey: el.name,
secretValue: el.name
})
)
);
});
});

View File

@ -1,344 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret expansion", () => {
const projectId = seedData1.projectV3.id;
beforeAll(async () => {
const prodRootFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/",
name: "deep"
});
await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
name: "nested"
});
return async () => {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodRootFolder.id,
workspaceId: projectId,
environmentSlug: "prod"
});
};
});
test("Local secret reference", async () => {
const secrets = [
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "HELLO",
value: "world"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST",
// eslint-disable-next-line
value: "hello ${HELLO}"
}
];
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST"
});
expect(expandedSecret.secretValue).toBe("hello world");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "TEST",
secretValue: "hello world"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Cross environment secret reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
}
];
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY"
});
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "KEY",
secretValue: "hello testing secret reference"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested"
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: "/deep/nested",
environment: "prod",
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
});
test(
"Replicated secret import secret expansion on local reference and nested reference",
async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested",
isReplication: true
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
environment: seedData1.environment.slug,
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
},
{ timeout: 10000 }
);
});

View File

@ -1,577 +0,0 @@
import { SecretType } from "@app/db/schemas";
import { seedData1 } from "@app/db/seed-data";
import { AuthMode } from "@app/services/auth/auth-type";
type TRawSecret = {
secretKey: string;
secretValue: string;
secretComment?: string;
version: number;
};
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
type: dto.type || SecretType.Shared,
secretPath: dto.path,
secretKey: dto.key,
secretValue: dto.value,
secretComment: dto.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret as TRawSecret;
};
const deleteSecret = async (dto: { path: string; key: string }) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: dto.path
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret as TRawSecret;
};
describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }])(
"Secret V2 Architecture - $auth mode",
async ({ auth }) => {
let folderId = "";
let authToken = "";
const secretTestCases = [
{
path: "/",
secret: {
key: "SEC1",
value: "something-secret",
comment: "some comment"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "NESTED-SEC1",
value: "something-secret",
comment: "some comment"
}
},
{
path: "/",
secret: {
key: "secret-key-2",
value: `-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
omQDpP86RX/hIIQ+JyLSaWYa
-----END PRIVATE KEY-----`,
comment:
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "secret-key-3",
value: `-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
omQDpP86RX/hIIQ+JyLSaWYa
-----END PRIVATE KEY-----`,
comment:
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "secret-key-3",
value:
"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGRvIGVpdXNtb2QgdGVtcG9yIGluY2lkaWR1bnQgdXQgbGFib3JlIGV0IGRvbG9yZSBtYWduYSBhbGlxdWEuIFV0IGVuaW0gYWQgbWluaW0gdmVuaWFtLCBxdWlzIG5vc3RydWQgZXhlcmNpdGF0aW9uCg==",
comment: ""
}
}
];
beforeAll(async () => {
if (auth === AuthMode.JWT) {
authToken = jwtAuthToken;
} else if (auth === AuthMode.IDENTITY_ACCESS_TOKEN) {
const identityLogin = await testServer.inject({
method: "POST",
url: "/api/v1/auth/universal-auth/login",
body: {
clientSecret: seedData1.machineIdentity.clientCredentials.secret,
clientId: seedData1.machineIdentity.clientCredentials.id
}
});
expect(identityLogin.statusCode).toBe(200);
authToken = identityLogin.json().accessToken;
}
// create a deep folder
const folderCreate = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
name: "folder",
path: "/nested1/nested2"
}
});
expect(folderCreate.statusCode).toBe(200);
folderId = folderCreate.json().folder.id;
});
afterAll(async () => {
const deleteFolder = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${folderId}`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
path: "/nested1/nested2"
}
});
expect(deleteFolder.statusCode).toBe(200);
});
const getSecrets = async (environment: string, secretPath = "/") => {
const res = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
query: {
secretPath,
environment,
workspaceId: seedData1.projectV3.id
}
});
const secrets: TRawSecret[] = JSON.parse(res.payload).secrets || [];
return secrets;
};
test.each(secretTestCases)("Create secret in path $path", async ({ secret, path }) => {
const createdSecret = await createSecret({ path, ...secret });
expect(createdSecret.secretKey).toEqual(secret.key);
expect(createdSecret.secretValue).toEqual(secret.value);
expect(createdSecret.secretComment || "").toEqual(secret.comment);
expect(createdSecret.version).toEqual(1);
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
secretValue: secret.value,
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key });
});
test.each(secretTestCases)("Get secret by name in path $path", async ({ secret, path }) => {
await createSecret({ path, ...secret });
const getSecByNameRes = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw/${secret.key}`,
headers: {
authorization: `Bearer ${authToken}`
},
query: {
secretPath: path,
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug
}
});
expect(getSecByNameRes.statusCode).toBe(200);
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
expect(getSecretByNamePayload).toHaveProperty("secret");
const decryptedSecret = getSecretByNamePayload.secret as TRawSecret;
expect(decryptedSecret.secretKey).toEqual(secret.key);
expect(decryptedSecret.secretValue).toEqual(secret.value);
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
await deleteSecret({ path, key: secret.key });
});
if (auth === AuthMode.JWT) {
test.each(secretTestCases)(
"Creating personal secret without shared throw error in path $path",
async ({ secret }) => {
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
type: SecretType.Personal,
secretKey: secret.key,
secretValue: secret.value,
secretComment: secret.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/SEC2`,
headers: {
authorization: `Bearer ${authToken}`
},
body: createSecretReqBody
});
const payload = JSON.parse(createSecRes.payload);
expect(createSecRes.statusCode).toBe(400);
expect(payload.error).toEqual("BadRequest");
}
);
test.each(secretTestCases)("Creating personal secret in path $path", async ({ secret, path }) => {
await createSecret({ path, ...secret });
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
type: SecretType.Personal,
secretPath: path,
secretKey: secret.key,
secretValue: "personal-value",
secretComment: secret.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${secret.key}`,
headers: {
authorization: `Bearer ${authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
// list secrets should contain personal one and shared one
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
secretValue: secret.value,
type: SecretType.Shared
}),
expect.objectContaining({
secretKey: secret.key,
secretValue: "personal-value",
type: SecretType.Personal
})
])
);
await deleteSecret({ path, key: secret.key });
});
test.each(secretTestCases)(
"Deleting personal one should not delete shared secret in path $path",
async ({ secret, path }) => {
await createSecret({ path, ...secret }); // shared one
await createSecret({ path, ...secret, type: SecretType.Personal });
// shared secret deletion should delete personal ones also
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
type: SecretType.Shared
}),
expect.not.objectContaining({
secretKey: secret.key,
type: SecretType.Personal
})
])
);
await deleteSecret({ path, key: secret.key });
}
);
}
test.each(secretTestCases)("Update secret in path $path", async ({ path, secret }) => {
await createSecret({ path, ...secret });
const updateSecretReqBody = {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
type: SecretType.Shared,
secretPath: path,
secretKey: secret.key,
secretValue: "new-value",
secretComment: secret.comment
};
const updateSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/raw/${secret.key}`,
headers: {
authorization: `Bearer ${authToken}`
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
const decryptedSecret = updatedSecretPayload.secret;
expect(decryptedSecret.secretKey).toEqual(secret.key);
expect(decryptedSecret.secretValue).toEqual("new-value");
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
// list secret should have updated value
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
secretValue: "new-value",
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key });
});
test.each(secretTestCases)("Delete secret in path $path", async ({ secret, path }) => {
await createSecret({ path, ...secret });
const deletedSecret = await deleteSecret({ path, key: secret.key });
expect(deletedSecret.secretKey).toEqual(secret.key);
// shared secret deletion should delete personal ones also
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
type: SecretType.Shared
}),
expect.objectContaining({
secretKey: secret.key,
type: SecretType.Personal
})
])
);
});
test.each(secretTestCases)("Bulk create secrets in path $path", async ({ secret, path }) => {
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: secret.value,
secretComment: secret.comment
}))
}
});
expect(createSharedSecRes.statusCode).toBe(200);
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
expect(createSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: secret.value,
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
);
});
test.each(secretTestCases)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
await createSecret({ ...secret, key: `BULK-${secret.key}-1`, path });
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: secret.value,
secretComment: secret.comment
}))
}
});
expect(createSharedSecRes.statusCode).toBe(400);
await deleteSecret({ path, key: `BULK-${secret.key}-1` });
});
test.each(secretTestCases)("Bulk update secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
);
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: secret.comment
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
);
});
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
);
const deletedSharedSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`
}))
}
});
expect(deletedSharedSecRes.statusCode).toBe(200);
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
expect(deletedSecretPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.value}-${i + 1}`,
type: SecretType.Shared
})
)
)
);
});
}
);

File diff suppressed because it is too large Load Diff

View File

@ -1,73 +0,0 @@
type TFolder = {
id: string;
name: string;
};
export const createFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
name: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
name: dto.name,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const deleteFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
id: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const listFolders = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folders as TFolder[];
};

View File

@ -1,93 +0,0 @@
type TSecretImport = {
id: string;
importEnv: {
name: string;
slug: string;
id: string;
};
importPath: string;
};
export const createSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
isReplication?: boolean;
secretPath: string;
importPath: string;
importEnv: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
isReplication: dto.isReplication,
path: dto.secretPath,
import: {
environment: dto.importEnv,
path: dto.importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const deleteSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
id: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const listSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
return payload.secretImports as TSecretImport[];
};

View File

@ -1,130 +0,0 @@
import { SecretType } from "@app/db/schemas";
type TRawSecret = {
secretKey: string;
secretValue: string;
secretComment?: string;
version: number;
};
export const createSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
value: string;
comment?: string;
authToken: string;
type?: SecretType;
}) => {
const createSecretReqBody = {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
type: dto.type || SecretType.Shared,
secretPath: dto.secretPath,
secretKey: dto.key,
secretValue: dto.value,
secretComment: dto.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret as TRawSecret;
};
export const deleteSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret as TRawSecret;
};
export const getSecretByNameV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const response = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
}
});
expect(response.statusCode).toBe(200);
const payload = JSON.parse(response.payload);
expect(payload).toHaveProperty("secret");
return payload.secret as TRawSecret;
};
export const getSecretsV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
recursive?: boolean;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true",
recursive: String(dto.recursive || false)
}
});
expect(getSecretsResponse.statusCode).toBe(200);
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
expect(getSecretsPayload).toHaveProperty("secrets");
expect(getSecretsPayload).toHaveProperty("imports");
return getSecretsPayload as {
secrets: TRawSecret[];
imports: {
secretPath: string;
environment: string;
folderId: string;
secrets: TRawSecret[];
}[];
};
};

View File

@ -1,111 +0,0 @@
// eslint-disable-next-line
import "ts-node/register";
import dotenv from "dotenv";
import jwt from "jsonwebtoken";
import path from "path";
import { seedData1 } from "@app/db/seed-data";
import { initEnvConfig } from "@app/lib/config/env";
import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { mockSmtpServer } from "./mocks/smtp";
import { initDbConnection } from "@app/db";
import { queueServiceFactory } from "@app/queue";
import { keyStoreFactory } from "@app/keystore/keystore";
import { Redis } from "ioredis";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
});
const redis = new Redis(cfg.REDIS_URL);
await redis.flushdb("SYNC");
try {
await db.migrate.rollback(
{
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
});
await db.seed.run({
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const hsmModule = initializeHsmModule();
hsmModule.initialize();
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type
globalThis.jwtAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.ACCESS_TOKEN,
userId: seedData1.id,
tokenVersionId: seedData1.token.id,
authMethod: AuthMethod.EMAIL,
organizationId: seedData1.organization.id,
accessVersion: 1
},
cfg.AUTH_SECRET,
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
console.log("[TEST] Error setting up environment", error);
await db.destroy();
throw error;
}
// custom setup
return {
async teardown() {
// @ts-expect-error type
await globalThis.testServer.close();
// @ts-expect-error type
delete globalThis.testServer;
// @ts-expect-error type
delete globalThis.jwtToken;
// called after all tests with this env have been run
await db.migrate.rollback(
{
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
true
);
await redis.flushdb("ASYNC");
redis.disconnect();
await db.destroy();
}
};
}
};

46
backend/environment.d.ts vendored Normal file
View File

@ -0,0 +1,46 @@
export {};
declare global {
namespace NodeJS {
interface ProcessEnv {
PORT: string;
ENCRYPTION_KEY: string;
SALT_ROUNDS: string;
JWT_AUTH_LIFETIME: string;
JWT_AUTH_SECRET: string;
JWT_REFRESH_LIFETIME: string;
JWT_REFRESH_SECRET: string;
JWT_SERVICE_SECRET: string;
JWT_SIGNUP_LIFETIME: string;
JWT_SIGNUP_SECRET: string;
MONGO_URL: string;
NODE_ENV: "development" | "staging" | "testing" | "production";
VERBOSE_ERROR_OUTPUT: string;
LOKI_HOST: string;
CLIENT_ID_HEROKU: string;
CLIENT_ID_VERCEL: string;
CLIENT_ID_NETLIFY: string;
CLIENT_ID_GITHUB: string;
CLIENT_ID_GITLAB: string;
CLIENT_SECRET_HEROKU: string;
CLIENT_SECRET_VERCEL: string;
CLIENT_SECRET_NETLIFY: string;
CLIENT_SECRET_GITHUB: string;
CLIENT_SECRET_GITLAB: string;
CLIENT_SLUG_VERCEL: string;
POSTHOG_HOST: string;
POSTHOG_PROJECT_API_KEY: string;
SENTRY_DSN: string;
SITE_URL: string;
SMTP_HOST: string;
SMTP_SECURE: string;
SMTP_PORT: string;
SMTP_USERNAME: string;
SMTP_PASSWORD: string;
SMTP_FROM_ADDRESS: string;
SMTP_FROM_NAME: string;
TELEMETRY_ENABLED: string;
LICENSE_KEY: string;
}
}
}

24
backend/healthcheck.js Normal file
View File

@ -0,0 +1,24 @@
const http = require('http');
const PORT = process.env.PORT || 4000;
const options = {
host: 'localhost',
port: PORT,
timeout: 2000,
path: '/healthcheck'
};
const healthCheck = http.request(options, (res) => {
console.log(`HEALTHCHECK STATUS: ${res.statusCode}`);
if (res.statusCode == 200) {
process.exit(0);
} else {
process.exit(1);
}
});
healthCheck.on('error', function (err) {
console.error(`HEALTH CHECK ERROR: ${err}`);
process.exit(1);
});
healthCheck.end();

BIN
backend/img/dashboard.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 493 KiB

9
backend/jest.config.ts Normal file
View File

@ -0,0 +1,9 @@
export default {
preset: "ts-jest",
testEnvironment: "node",
collectCoverageFrom: ["src/*.{js,ts}", "!**/node_modules/**"],
modulePaths: ["<rootDir>/src"],
testMatch: ["<rootDir>/tests/**/*.test.ts"],
setupFiles: ["<rootDir>/test-resources/env-vars.js"],
setupFilesAfterEnv: ["<rootDir>/tests/setupTests.ts"],
};

View File

@ -1,6 +1,6 @@
{
"watch": ["src"],
"ext": ".ts,.js",
"ignore": [],
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
}
"watch": ["src"],
"ext": ".ts,.js",
"ignore": [],
"exec": "ts-node ./src/index.ts"
}

42972
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,225 +1,149 @@
{
"name": "backend",
"version": "1.0.0",
"description": "",
"main": "./dist/main.mjs",
"bin": "dist/main.js",
"pkg": {
"scripts": [
"dist/**/*.js",
"../frontend/node_modules/next/**/*.js",
"../frontend/.next/*/**/*.js",
"../frontend/node_modules/next/dist/server/**/*.js",
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js"
],
"assets": [
"dist/**",
"!dist/**/*.js",
"node_modules/**",
"../frontend/node_modules/**",
"../frontend/.next/**",
"!../frontend/node_modules/next/dist/server/**/*.js",
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
"../frontend/public/**"
],
"outputPath": "binary"
},
"scripts": {
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
"binary:babel-backend": " babel ./dist -d ./dist",
"binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server",
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup --sourcemap",
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@babel/cli": "^7.18.10",
"@babel/core": "^7.18.10",
"@babel/plugin-syntax-import-attributes": "^7.24.7",
"@babel/preset-env": "^7.18.10",
"@babel/preset-react": "^7.24.7",
"@types/bcrypt": "^5.0.2",
"@types/jmespath": "^0.15.2",
"@types/jsonwebtoken": "^9.0.5",
"@types/jsrp": "^0.2.6",
"@types/libsodium-wrappers": "^0.7.13",
"@types/lodash.isequal": "^4.5.8",
"@types/node": "^20.9.5",
"@types/nodemailer": "^6.4.14",
"@types/passport-github": "^1.1.12",
"@types/passport-google-oauth20": "^2.0.14",
"@types/pg": "^8.10.9",
"@types/picomatch": "^2.3.3",
"@types/pkcs11js": "^1.0.4",
"@types/prompt-sync": "^4.2.3",
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"@yao-pkg/pkg": "^5.12.0",
"babel-plugin-transform-import-meta": "^2.2.1",
"eslint": "^8.56.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.1.0",
"eslint-config-prettier": "^9.1.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-simple-import-sort": "^10.0.0",
"nodemon": "^3.0.2",
"pino-pretty": "^10.2.3",
"prompt-sync": "^4.2.0",
"rimraf": "^5.0.5",
"ts-node": "^10.9.2",
"tsc-alias": "^1.8.8",
"tsconfig-paths": "^4.2.0",
"tsup": "^8.0.1",
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vitest": "^1.2.2"
},
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-kms": "^3.609.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-sts": "^3.600.0",
"@aws-sdk/client-secrets-manager": "^3.319.0",
"@casl/ability": "^6.5.0",
"@elastic/elasticsearch": "^8.15.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/multipart": "8.3.0",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/static": "^7.0.4",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@casl/mongoose": "^7.2.1",
"@godaddy/terminus": "^4.12.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@octokit/rest": "^19.0.5",
"@sentry/node": "^7.77.0",
"@sentry/tracing": "^7.48.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@types/crypto-js": "^4.1.1",
"@types/libsodium-wrappers": "^0.7.10",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.28.1",
"fastify-plugin": "^4.5.1",
"google-auth-library": "^9.9.0",
"googleapis": "^137.1.0",
"handlebars": "^4.7.8",
"hdb": "^0.19.10",
"argon2": "^0.30.3",
"aws-sdk": "^2.1364.0",
"axios": "^1.6.0",
"axios-retry": "^3.4.0",
"bcrypt": "^5.1.0",
"bigint-conversion": "^2.4.0",
"cookie-parser": "^1.4.6",
"cors": "^2.8.5",
"crypto-js": "^4.2.0",
"dotenv": "^16.0.1",
"express": "^4.18.1",
"express-async-errors": "^3.1.1",
"express-rate-limit": "^6.7.0",
"express-validator": "^6.14.2",
"handlebars": "^4.7.7",
"helmet": "^5.1.1",
"infisical-node": "^1.2.1",
"ioredis": "^5.3.2",
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"js-yaml": "^4.1.0",
"jsonwebtoken": "^9.0.0",
"jsrp": "^0.2.4",
"jwks-rsa": "^3.1.0",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"ldif": "0.5.1",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"otplib": "^12.0.1",
"libsodium-wrappers": "^0.7.10",
"lodash": "^4.17.21",
"mongoose": "^7.4.1",
"mysql2": "^3.6.2",
"nanoid": "^3.3.6",
"node-cache": "^5.1.2",
"nodemailer": "^6.8.0",
"ora": "^5.4.1",
"passport": "^0.6.0",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"pg-boss": "^10.1.5",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"pkcs11js": "^2.1.6",
"pkijs": "^3.2.4",
"posthog-node": "^3.6.2",
"probot": "^13.3.8",
"safe-regex": "^2.1.1",
"scim-patch": "^0.8.3",
"scim2-parse-filter": "^0.2.10",
"sjcl": "^1.0.8",
"smee-client": "^2.0.0",
"snowflake-sdk": "^1.14.0",
"tedious": "^18.2.1",
"pino": "^8.16.1",
"pino-http": "^8.5.1",
"posthog-node": "^2.6.0",
"probot": "^12.3.3",
"query-string": "^7.1.3",
"rate-limit-mongo": "^2.3.2",
"rimraf": "^3.0.2",
"swagger-ui-express": "^4.6.2",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"typescript": "^4.9.3",
"utility-types": "^3.10.0",
"zod": "^3.22.3"
},
"overrides": {
"rate-limit-mongo": {
"mongodb": "5.8.0"
}
},
"name": "infisical-api",
"version": "1.0.0",
"main": "src/index.js",
"scripts": {
"start": "node build/index.js",
"dev": "nodemon index.js",
"swagger-autogen": "node ./swagger/index.ts",
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build && cp -R ./src/data ./build",
"lint": "eslint . --ext .ts",
"lint-and-fix": "eslint . --ext .ts --fix",
"lint-staged": "lint-staged",
"pretest": "docker compose -f test-resources/docker-compose.test.yml up -d",
"test": "cross-env NODE_ENV=test jest --verbose --testTimeout=10000 --detectOpenHandles; npm run posttest",
"test:ci": "npm test -- --watchAll=false --ci --reporters=default --reporters=jest-junit --reporters=github-actions --coverage --testLocationInResults --json --outputFile=coverage/report.json",
"posttest": "docker compose -f test-resources/docker-compose.test.yml down"
},
"repository": {
"type": "git",
"url": "git+https://github.com/Infisical/infisical-api.git"
},
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/Infisical/infisical-api/issues"
},
"homepage": "https://github.com/Infisical/infisical-api#readme",
"description": "",
"devDependencies": {
"@jest/globals": "^29.3.1",
"@posthog/plugin-scaffold": "^1.3.4",
"@swc/core": "^1.3.99",
"@swc/helpers": "^0.5.3",
"@types/bcrypt": "^5.0.0",
"@types/bcryptjs": "^2.4.2",
"@types/bull": "^4.10.0",
"@types/cookie-parser": "^1.4.3",
"@types/cors": "^2.8.12",
"@types/express": "^4.17.14",
"@types/jest": "^29.5.0",
"@types/jmespath": "^0.15.1",
"@types/jsonwebtoken": "^8.5.9",
"@types/lodash": "^4.14.191",
"@types/node": "^18.11.3",
"@types/nodemailer": "^6.4.6",
"@types/passport": "^1.0.12",
"@types/pg": "^8.10.7",
"@types/picomatch": "^2.3.0",
"@types/pino": "^7.0.5",
"@types/supertest": "^2.0.12",
"@types/swagger-jsdoc": "^6.0.1",
"@types/swagger-ui-express": "^4.1.3",
"@typescript-eslint/eslint-plugin": "^5.54.0",
"@typescript-eslint/parser": "^5.40.1",
"cross-env": "^7.0.3",
"eslint": "^8.26.0",
"eslint-plugin-unused-imports": "^2.0.0",
"install": "^0.13.0",
"jest": "^29.3.1",
"jest-junit": "^15.0.0",
"nodemon": "^2.0.19",
"npm": "^8.19.3",
"pino-pretty": "^10.2.3",
"regenerator-runtime": "^0.14.0",
"smee-client": "^1.2.3",
"supertest": "^6.3.3",
"swagger-autogen": "^2.23.5",
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1"
},
"jest-junit": {
"outputDirectory": "reports",
"outputName": "jest-junit.xml",
"ancestorSeparator": " ",
"uniqueOutputName": "false",
"suiteNameTemplate": "{filepath}",
"classNameTemplate": "{classname}",
"titleTemplate": "{title}"
}
}

View File

@ -1,146 +0,0 @@
/* eslint-disable */
import { mkdirSync, writeFileSync } from "fs";
import path from "path";
import promptSync from "prompt-sync";
const prompt = promptSync({
sigint: true
});
type ComponentType = 1 | 2 | 3;
console.log(`
Component List
--------------
0. Exit
1. Service component
2. DAL component
3. Router component
`);
function getComponentType(): ComponentType {
while (true) {
const input = prompt("Select a component (0-3): ");
const componentType = parseInt(input, 10);
if (componentType === 0) {
console.log("Exiting the program. Goodbye!");
process.exit(0);
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
return componentType;
} else {
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
}
}
}
const componentType = getComponentType();
if (componentType === 1) {
const componentName = prompt("Enter service name: ");
const dir = path.join(__dirname, `../src/services/${componentName}`);
const pascalCase = componentName
.split("-")
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
.join("");
const camelCase = componentName
.split("-")
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
.join("");
const dalTypeName = `T${pascalCase}DALFactory`;
const dalName = `${camelCase}DALFactory`;
const serviceTypeName = `T${pascalCase}ServiceFactory`;
const serviceName = `${camelCase}ServiceFactory`;
mkdirSync(dir);
writeFileSync(
path.join(dir, `${componentName}-dal.ts`),
`import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
export const ${dalName} = (db: TDbClient) => {
return { };
};
`
);
writeFileSync(
path.join(dir, `${componentName}-service.ts`),
`import { ${dalTypeName} } from "./${componentName}-dal";
type ${serviceTypeName}Dep = {
${camelCase}DAL: ${dalTypeName};
};
export type ${serviceTypeName} = ReturnType<typeof ${serviceName}>;
export const ${serviceName} = ({ ${camelCase}DAL }: ${serviceTypeName}Dep) => {
return {};
};
`
);
writeFileSync(path.join(dir, `${componentName}-types.ts`), "");
} else if (componentType === 2) {
const componentName = prompt("Enter service name: ");
const componentPath = prompt("Path wrt service folder: ");
const pascalCase = componentName
.split("-")
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
.join("");
const camelCase = componentName
.split("-")
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
.join("");
const dalTypeName = `T${pascalCase}DALFactory`;
const dalName = `${camelCase}DALFactory`;
writeFileSync(
path.join(__dirname, "../src/services", componentPath, `${componentName}-dal.ts`),
`import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
export const ${dalName} = (db: TDbClient) => {
return { };
};
`
);
} else if (componentType === 3) {
const name = prompt("Enter router name: ");
const version = prompt("Version number: ");
const pascalCase = name
.split("-")
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
.join("");
writeFileSync(
path.join(__dirname, `../src/server/routes/v${Number(version)}/${name}-router.ts`),
`import { z } from "zod";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { readLimit } from "@app/server/config/rateLimiter";
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({}),
response: {
200: z.object({})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {}
});
};
`
);
}

View File

@ -1,17 +0,0 @@
/* eslint-disable */
import { execSync } from "child_process";
import path from "path";
import promptSync from "prompt-sync";
import slugify from "@sindresorhus/slugify"
const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for migration: ");
// Remove spaces from migration name and replace with hyphens
const formattedMigrationName = slugify(migrationName);
execSync(
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
{ stdio: "inherit" }
);

View File

@ -1,16 +0,0 @@
/* eslint-disable */
import { execSync } from "child_process";
import { readdirSync } from "fs";
import path from "path";
import promptSync from "prompt-sync";
const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for seedfile: ");
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seeds")).length || 1;
execSync(
`npx knex seed:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${
fileCounter + 1
}-${migrationName}`,
{ stdio: "inherit" }
);

View File

@ -1,164 +0,0 @@
/* eslint-disable */
import dotenv from "dotenv";
import path from "path";
import knex from "knex";
import { writeFileSync } from "fs";
dotenv.config({
path: path.join(__dirname, "../../.env.migration")
});
const db = knex({
client: "pg",
connection: process.env.DB_CONNECTION_URI
});
const getZodPrimitiveType = (type: string) => {
switch (type) {
case "uuid":
return "z.string().uuid()";
case "character varying":
return "z.string()";
case "ARRAY":
return "z.string().array()";
case "boolean":
return "z.boolean()";
case "jsonb":
return "z.unknown()";
case "json":
return "z.unknown()";
case "timestamp with time zone":
return "z.date()";
case "integer":
return "z.number()";
case "bigint":
return "z.coerce.number()";
case "text":
return "z.string()";
case "bytea":
return "zodBuffer";
default:
throw new Error(`Invalid type: ${type}`);
}
};
const getZodDefaultValue = (type: unknown, value: string | number | boolean | Object) => {
if (!value || value === "null") return;
switch (type) {
case "uuid":
return `.default("00000000-0000-0000-0000-000000000000")`;
case "character varying": {
if (value === "gen_random_uuid()") return;
if (typeof value === "string" && value.includes("::")) {
return `.default(${value.split("::")[0]})`;
}
return `.default(${value})`;
}
case "ARRAY":
return `.default(${value})`;
case "boolean":
return `.default(${value})`;
case "jsonb":
return "z.string()";
case "json":
return "z.string()";
case "timestamp with time zone": {
if (value === "CURRENT_TIMESTAMP") return;
return "z.string().datetime()";
}
case "integer": {
if ((value as string).includes("nextval")) return;
return `.default(${value})`;
}
case "bigint": {
if ((value as string).includes("nextval")) return;
return `.default(${parseInt((value as string).split("::")[0].slice(1, -1), 10)})`;
}
case "text":
if (typeof value === "string" && value.includes("::")) {
return `.default(${value.split("::")[0]})`;
}
return `.default(${value})`;
default:
throw new Error(`Invalid type: ${type}`);
}
};
const main = async () => {
const tables = (
await db("information_schema.tables")
.whereRaw("table_schema = current_schema()")
.select<{ tableName: string }[]>("table_name as tableName")
.orderBy("table_name")
).filter(
(el) =>
!el.tableName.includes("_migrations") &&
!el.tableName.includes("audit_logs_") &&
el.tableName !== "intermediate_audit_logs"
);
for (let i = 0; i < tables.length; i += 1) {
const { tableName } = tables[i];
const columns = await db(tableName).columnInfo();
const columnNames = Object.keys(columns);
let schema = "";
const zodImportSet = new Set<string>();
for (let colNum = 0; colNum < columnNames.length; colNum++) {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}
// don't put optional on id
if (colInfo.defaultValue && columnName !== "id") {
const { defaultValue } = colInfo;
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
if (zSchema) {
ztype = ztype.concat(zSchema);
}
}
if (colInfo.nullable) {
ztype = ztype.concat(".nullable().optional()");
}
schema = schema.concat(
`${!schema ? "\n" : ""} ${columnName}: ${ztype}${colNum === columnNames.length - 1 ? "" : ","}\n`
);
}
const dashcase = tableName.split("_").join("-");
const pascalCase = tableName
.split("_")
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
const zodImports = Array.from(zodImportSet);
// the insert and update are changed to zod input type to use default cases
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
`// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}});
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
export type T${pascalCase}Insert = Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>>;
`
);
}
process.exit(0);
};
main();

View File

@ -1,103 +0,0 @@
/* eslint-disable */
import promptSync from "prompt-sync";
import { execSync } from "child_process";
import path from "path";
import { existsSync } from "fs";
const prompt = promptSync({
sigint: true
});
const sanitizeInputParam = (value: string) => {
// Escape double quotes and wrap the entire value in double quotes
if (value) {
return `"${value.replace(/"/g, '\\"')}"`;
}
return '""';
};
const exportDb = () => {
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
const exportPort = sanitizeInputParam(
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
);
const exportUser = sanitizeInputParam(
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
);
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
const exportDatabase = sanitizeInputParam(
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
);
// we do not include the audit_log and secret_sharing entries
execSync(
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
__dirname,
"../src/db/backup.dump"
)}`,
{ stdio: "inherit" }
);
};
const importDbForOrg = () => {
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
const importUser = sanitizeInputParam(
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
);
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
const importDatabase = sanitizeInputParam(
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
);
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
console.log("File not found, please export the database first.");
return;
}
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
__dirname,
"../src/db/backup.dump"
)}`,
{ maxBuffer: 1024 * 1024 * 4096 }
);
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
);
// delete global/instance-level resources not relevant to the organization to migrate
// users
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
);
// identities
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
);
// reset slack configuration in superAdmin
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
);
console.log("Organization migrated successfully.");
};
const main = () => {
const action = prompt(
"Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: "
);
if (action === "1") {
exportDb();
} else if (action === "2") {
importDbForOrg();
} else {
console.log("Invalid action");
}
};
main();

View File

@ -1,27 +0,0 @@
/* eslint-disable @typescript-eslint/no-shadow */
import fs from "node:fs";
import path from "node:path";
function replaceMjsOccurrences(directory: string) {
fs.readdir(directory, (err, files) => {
if (err) throw err;
files.forEach((file) => {
const filePath = path.join(directory, file);
if (fs.statSync(filePath).isDirectory()) {
replaceMjsOccurrences(filePath);
} else {
fs.readFile(filePath, "utf8", (err, data) => {
if (err) throw err;
const result = data.replace(/\.mjs/g, ".js");
fs.writeFile(filePath, result, "utf8", (err) => {
if (err) throw err;
// eslint-disable-next-line no-console
console.log(`Updated: ${filePath}`);
});
});
}
});
});
}
replaceMjsOccurrences("dist");

8047
backend/spec.json Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +0,0 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

View File

@ -1,18 +0,0 @@
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
import { CustomLogger } from "@app/lib/logger/logger";
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
declare global {
type FastifyZodProvider = FastifyInstance<
RawServerDefault,
RawRequestDefaultExpression<RawServerDefault>,
RawReplyDefaultExpression<RawServerDefault>,
Readonly<CustomLogger>,
ZodTypeProvider
>;
// used only for testing
const testServer: FastifyZodProvider;
const jwtAuthToken: string;
}

View File

@ -1,222 +0,0 @@
import "fastify";
import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
import { TProjectKeyServiceFactory } from "@app/services/project-key/project-key-service";
import { TProjectMembershipServiceFactory } from "@app/services/project-membership/project-membership-service";
import { TProjectRoleServiceFactory } from "@app/services/project-role/project-role-service";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "fastify" {
interface Session {
callbackPort: string;
}
interface FastifyRequest {
realIp: string;
// used for mfa session authentication
mfa: {
userId: string;
orgId?: string;
user: TUsers;
};
// identity injection. depending on which kinda of token the information is filled in auth
auth: TAuthMode;
permission: {
authMethod: ActorAuthMethod;
type: ActorType;
id: string;
orgId: string;
};
rateLimits: RateLimitConfiguration;
// passport data
passportUser: {
isUserCompleted: string;
providerAuthToken: string;
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
}
interface FastifyInstance {
redis: Redis;
services: {
login: TAuthLoginFactory;
password: TAuthPasswordFactory;
signup: TAuthSignupFactory;
authToken: TAuthTokenServiceFactory;
permission: TPermissionServiceFactory;
org: TOrgServiceFactory;
orgRole: TOrgRoleServiceFactory;
oidc: TOidcConfigServiceFactory;
superAdmin: TSuperAdminServiceFactory;
user: TUserServiceFactory;
group: TGroupServiceFactory;
groupProject: TGroupProjectServiceFactory;
apiKey: TApiKeyServiceFactory;
pkiAlert: TPkiAlertServiceFactory;
project: TProjectServiceFactory;
projectMembership: TProjectMembershipServiceFactory;
projectEnv: TProjectEnvServiceFactory;
projectKey: TProjectKeyServiceFactory;
projectRole: TProjectRoleServiceFactory;
secret: TSecretServiceFactory;
secretReplication: TSecretReplicationServiceFactory;
secretTag: TSecretTagServiceFactory;
secretImport: TSecretImportServiceFactory;
projectBot: TProjectBotServiceFactory;
folder: TSecretFolderServiceFactory;
integration: TIntegrationServiceFactory;
integrationAuth: TIntegrationAuthServiceFactory;
webhook: TWebhookServiceFactory;
serviceToken: TServiceTokenServiceFactory;
identity: TIdentityServiceFactory;
identityAccessToken: TIdentityAccessTokenServiceFactory;
identityProject: TIdentityProjectServiceFactory;
identityTokenAuth: TIdentityTokenAuthServiceFactory;
identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
identityJwtAuth: TIdentityJwtAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
secretApprovalRequest: TSecretApprovalRequestServiceFactory;
secretRotation: TSecretRotationServiceFactory;
snapshot: TSecretSnapshotServiceFactory;
saml: TSamlConfigServiceFactory;
scim: TScimServiceFactory;
ldap: TLdapConfigServiceFactory;
auditLog: TAuditLogServiceFactory;
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateTemplate: TCertificateTemplateServiceFactory;
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
pkiCollection: TPkiCollectionServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
secretBlindIndex: TSecretBlindIndexServiceFactory;
telemetry: TTelemetryServiceFactory;
dynamicSecret: TDynamicSecretServiceFactory;
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory;
secretSharing: TSecretSharingServiceFactory;
rateLimit: TRateLimitServiceFactory;
userEngagement: TUserEngagementServiceFactory;
externalKms: TExternalKmsServiceFactory;
hsm: THsmServiceFactory;
orgAdmin: TOrgAdminServiceFactory;
slack: TSlackServiceFactory;
workflowIntegration: TWorkflowIntegrationServiceFactory;
cmek: TCmekServiceFactory;
migration: TExternalMigrationServiceFactory;
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
projectTemplate: TProjectTemplateServiceFactory;
totp: TTotpServiceFactory;
appConnection: TAppConnectionServiceFactory;
secretSync: TSecretSyncServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer
store: {
user: Pick<TUserDALFactory, "findById">;
};
}
}

View File

@ -1,4 +0,0 @@
declare module "hdb" {
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
function createClient(options): any;
}

View File

@ -1,906 +0,0 @@
import { Knex as KnexOriginal } from "knex";
import {
TableName,
TAccessApprovalPolicies,
TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate,
TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate,
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
TAccessApprovalRequestsReviewers,
TAccessApprovalRequestsReviewersInsert,
TAccessApprovalRequestsReviewersUpdate,
TAccessApprovalRequestsUpdate,
TApiKeys,
TApiKeysInsert,
TApiKeysUpdate,
TAuditLogs,
TAuditLogsInsert,
TAuditLogStreams,
TAuditLogStreamsInsert,
TAuditLogStreamsUpdate,
TAuditLogsUpdate,
TAuthTokens,
TAuthTokenSessions,
TAuthTokenSessionsInsert,
TAuthTokenSessionsUpdate,
TAuthTokensInsert,
TAuthTokensUpdate,
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate,
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
TCertificateAuthoritiesUpdate,
TCertificateAuthorityCerts,
TCertificateAuthorityCertsInsert,
TCertificateAuthorityCertsUpdate,
TCertificateAuthorityCrl,
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate,
TCertificateAuthoritySecret,
TCertificateAuthoritySecretInsert,
TCertificateAuthoritySecretUpdate,
TCertificateBodies,
TCertificateBodiesInsert,
TCertificateBodiesUpdate,
TCertificates,
TCertificateSecrets,
TCertificateSecretsInsert,
TCertificateSecretsUpdate,
TCertificatesInsert,
TCertificatesUpdate,
TCertificateTemplateEstConfigs,
TCertificateTemplateEstConfigsInsert,
TCertificateTemplateEstConfigsUpdate,
TCertificateTemplates,
TCertificateTemplatesInsert,
TCertificateTemplatesUpdate,
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate,
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate,
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
TGitAppOrg,
TGitAppOrgInsert,
TGitAppOrgUpdate,
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate,
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate,
TGroups,
TGroupsInsert,
TGroupsUpdate,
TIdentities,
TIdentitiesInsert,
TIdentitiesUpdate,
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate,
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate,
TIdentityAzureAuths,
TIdentityAzureAuthsInsert,
TIdentityAzureAuthsUpdate,
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate,
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate,
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate,
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate,
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate,
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate,
TIdentityProjectMembershipRole,
TIdentityProjectMembershipRoleInsert,
TIdentityProjectMembershipRoleUpdate,
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate,
TIdentityTokenAuths,
TIdentityTokenAuthsInsert,
TIdentityTokenAuthsUpdate,
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
TIdentityUaClientSecretsUpdate,
TIdentityUniversalAuths,
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate,
TIncidentContacts,
TIncidentContactsInsert,
TIncidentContactsUpdate,
TIntegrationAuths,
TIntegrationAuthsInsert,
TIntegrationAuthsUpdate,
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TInternalKms,
TInternalKmsInsert,
TInternalKmsUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
TKmsKeyVersions,
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate,
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate,
TLdapConfigs,
TLdapConfigsInsert,
TLdapConfigsUpdate,
TLdapGroupMaps,
TLdapGroupMapsInsert,
TLdapGroupMapsUpdate,
TOidcConfigs,
TOidcConfigsInsert,
TOidcConfigsUpdate,
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate,
TOrgBots,
TOrgBotsInsert,
TOrgBotsUpdate,
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate,
TOrgRoles,
TOrgRolesInsert,
TOrgRolesUpdate,
TPkiAlerts,
TPkiAlertsInsert,
TPkiAlertsUpdate,
TPkiCollectionItems,
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate,
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate,
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate,
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate,
TProjectKeys,
TProjectKeysInsert,
TProjectKeysUpdate,
TProjectMemberships,
TProjectMembershipsInsert,
TProjectMembershipsUpdate,
TProjectRoles,
TProjectRolesInsert,
TProjectRolesUpdate,
TProjects,
TProjectsInsert,
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
TProjectTemplatesUpdate,
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate,
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate,
TRateLimit,
TRateLimitInsert,
TRateLimitUpdate,
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
TScimTokens,
TScimTokensInsert,
TScimTokensUpdate,
TSecretApprovalPolicies,
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate,
TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate,
TSecretApprovalRequests,
TSecretApprovalRequestSecretTags,
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate,
TSecretApprovalRequestSecretTagsV2,
TSecretApprovalRequestSecretTagsV2Insert,
TSecretApprovalRequestSecretTagsV2Update,
TSecretApprovalRequestsInsert,
TSecretApprovalRequestsReviewers,
TSecretApprovalRequestsReviewersInsert,
TSecretApprovalRequestsReviewersUpdate,
TSecretApprovalRequestsSecrets,
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsUpdate,
TSecretApprovalRequestsSecretsV2,
TSecretApprovalRequestsSecretsV2Insert,
TSecretApprovalRequestsSecretsV2Update,
TSecretApprovalRequestsUpdate,
TSecretBlindIndexes,
TSecretBlindIndexesInsert,
TSecretBlindIndexesUpdate,
TSecretFolders,
TSecretFoldersInsert,
TSecretFoldersUpdate,
TSecretFolderVersions,
TSecretFolderVersionsInsert,
TSecretFolderVersionsUpdate,
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate,
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate,
TSecretReferencesV2,
TSecretReferencesV2Insert,
TSecretReferencesV2Update,
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate,
TSecretRotationOutputV2,
TSecretRotationOutputV2Insert,
TSecretRotationOutputV2Update,
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate,
TSecrets,
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate,
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate,
TSecretsInsert,
TSecretSnapshotFolders,
TSecretSnapshotFoldersInsert,
TSecretSnapshotFoldersUpdate,
TSecretSnapshots,
TSecretSnapshotSecrets,
TSecretSnapshotSecretsInsert,
TSecretSnapshotSecretsUpdate,
TSecretSnapshotSecretsV2,
TSecretSnapshotSecretsV2Insert,
TSecretSnapshotSecretsV2Update,
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate,
TSecretsUpdate,
TSecretTagJunction,
TSecretTagJunctionInsert,
TSecretTagJunctionUpdate,
TSecretTags,
TSecretTagsInsert,
TSecretTagsUpdate,
TSecretVersions,
TSecretVersionsInsert,
TSecretVersionsUpdate,
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate,
TSecretVersionV2TagJunction,
TSecretVersionV2TagJunctionInsert,
TSecretVersionV2TagJunctionUpdate,
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate,
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate,
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate,
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate,
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate,
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate,
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
TTotpConfigs,
TTotpConfigsInsert,
TTotpConfigsUpdate,
TTrustedIps,
TTrustedIpsInsert,
TTrustedIpsUpdate,
TUserActions,
TUserActionsInsert,
TUserActionsUpdate,
TUserAliases,
TUserAliasesInsert,
TUserAliasesUpdate,
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate,
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate,
TUsers,
TUsersInsert,
TUsersUpdate,
TWebhooks,
TWebhooksInsert,
TWebhooksUpdate,
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
import {
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
} from "@app/db/schemas/external-group-org-role-mappings";
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
import {
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
TSecretV2TagJunctionUpdate
} from "@app/db/schemas/secret-v2-tag-junction";
import {
TSecretVersionsV2,
TSecretVersionsV2Insert,
TSecretVersionsV2Update
} from "@app/db/schemas/secret-versions-v2";
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
declare module "knex" {
namespace Knex {
interface QueryInterface {
primaryNode(): KnexOriginal;
replicaNode(): KnexOriginal;
}
}
}
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate
>;
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate
>;
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate
>;
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate
>;
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate
>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
TCertificateAuthoritiesUpdate
>;
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
TCertificateAuthorityCerts,
TCertificateAuthorityCertsInsert,
TCertificateAuthorityCertsUpdate
>;
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
TCertificateAuthoritySecret,
TCertificateAuthoritySecretInsert,
TCertificateAuthoritySecretUpdate
>;
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
TCertificateAuthorityCrl,
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate
>;
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
TCertificateTemplates,
TCertificateTemplatesInsert,
TCertificateTemplatesUpdate
>;
[TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType<
TCertificateTemplateEstConfigs,
TCertificateTemplateEstConfigsInsert,
TCertificateTemplateEstConfigsUpdate
>;
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
TCertificateBodies,
TCertificateBodiesInsert,
TCertificateBodiesUpdate
>;
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
TCertificateSecrets,
TCertificateSecretsInsert,
TCertificateSecretsUpdate
>;
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate
>;
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
TPkiCollectionItems,
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate
>;
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate
>;
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate
>;
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate
>;
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate
>;
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
TAuthTokenSessions,
TAuthTokenSessionsInsert,
TAuthTokenSessionsUpdate
>;
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate
>;
[TableName.Organization]: KnexOriginal.CompositeTableType<
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate
>;
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate
>;
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
TIncidentContacts,
TIncidentContactsInsert,
TIncidentContactsUpdate
>;
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
TProjectMemberships,
TProjectMembershipsInsert,
TProjectMembershipsUpdate
>;
[TableName.Environment]: KnexOriginal.CompositeTableType<
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate
>;
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate
>;
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate
>;
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate
>;
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
TSecretBlindIndexes,
TSecretBlindIndexesInsert,
TSecretBlindIndexesUpdate
>;
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
TSecretVersions,
TSecretVersionsInsert,
TSecretVersionsUpdate
>;
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
TSecretFolders,
TSecretFoldersInsert,
TSecretFoldersUpdate
>;
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
TSecretFolderVersions,
TSecretFolderVersionsInsert,
TSecretFolderVersionsUpdate
>;
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate
>;
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate
>;
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate
>;
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
TIntegrationAuths,
TIntegrationAuthsInsert,
TIntegrationAuthsUpdate
>;
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
[TableName.IdentityTokenAuth]: KnexOriginal.CompositeTableType<
TIdentityTokenAuths,
TIdentityTokenAuthsInsert,
TIdentityTokenAuthsUpdate
>;
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
TIdentityUniversalAuths,
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate
>;
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate
>;
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate
>;
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate
>;
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate
>;
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
TIdentityAzureAuths,
TIdentityAzureAuthsInsert,
TIdentityAzureAuthsUpdate
>;
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate
>;
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
TIdentityUaClientSecretsUpdate
>;
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate
>;
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate
>;
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate
>;
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
TIdentityProjectMembershipRole,
TIdentityProjectMembershipRoleInsert,
TIdentityProjectMembershipRoleUpdate
>;
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate
>;
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
TAccessApprovalPolicies,
TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate
>;
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate
>;
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
TAccessApprovalRequestsUpdate
>;
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
TAccessApprovalRequestsReviewers,
TAccessApprovalRequestsReviewersInsert,
TAccessApprovalRequestsReviewersUpdate
>;
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
TSecretApprovalPolicies,
TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate
>;
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate
>;
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
TSecretApprovalRequests,
TSecretApprovalRequestsInsert,
TSecretApprovalRequestsUpdate
>;
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestsReviewers,
TSecretApprovalRequestsReviewersInsert,
TSecretApprovalRequestsReviewersUpdate
>;
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestsSecrets,
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsUpdate
>;
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestSecretTags,
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate
>;
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate
>;
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate
>;
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
TSecretSnapshots,
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate
>;
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
TSecretSnapshotSecrets,
TSecretSnapshotSecretsInsert,
TSecretSnapshotSecretsUpdate
>;
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
TSecretSnapshotFolders,
TSecretSnapshotFoldersInsert,
TSecretSnapshotFoldersUpdate
>;
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate
>;
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate
>;
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
TLdapGroupMaps,
TLdapGroupMapsInsert,
TLdapGroupMapsUpdate
>;
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
TAuditLogStreams,
TAuditLogStreamsInsert,
TAuditLogStreamsUpdate
>;
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate
>;
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate
>;
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
[TableName.SecretV2]: KnexOriginal.CompositeTableType<TSecretsV2, TSecretsV2Insert, TSecretsV2Update>;
[TableName.SecretVersionV2]: KnexOriginal.CompositeTableType<
TSecretVersionsV2,
TSecretVersionsV2Insert,
TSecretVersionsV2Update
>;
[TableName.SecretReferenceV2]: KnexOriginal.CompositeTableType<
TSecretReferencesV2,
TSecretReferencesV2Insert,
TSecretReferencesV2Update
>;
// Junction tables
[TableName.SecretV2JnTag]: KnexOriginal.CompositeTableType<
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
TSecretV2TagJunctionUpdate
>;
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
TSecretTagJunction,
TSecretTagJunctionInsert,
TSecretTagJunctionUpdate
>;
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate
>;
[TableName.SecretVersionV2Tag]: KnexOriginal.CompositeTableType<
TSecretVersionV2TagJunction,
TSecretVersionV2TagJunctionInsert,
TSecretVersionV2TagJunctionUpdate
>;
[TableName.SnapshotSecretV2]: KnexOriginal.CompositeTableType<
TSecretSnapshotSecretsV2,
TSecretSnapshotSecretsV2Insert,
TSecretSnapshotSecretsV2Update
>;
[TableName.SecretApprovalRequestSecretV2]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestsSecretsV2,
TSecretApprovalRequestsSecretsV2Insert,
TSecretApprovalRequestsSecretsV2Update
>;
[TableName.SecretApprovalRequestSecretTagV2]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestSecretTagsV2,
TSecretApprovalRequestSecretTagsV2Insert,
TSecretApprovalRequestSecretTagsV2Update
>;
[TableName.SecretRotationOutputV2]: KnexOriginal.CompositeTableType<
TSecretRotationOutputV2,
TSecretRotationOutputV2Insert,
TSecretRotationOutputV2Update
>;
// KMS service
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate
>;
[TableName.InternalKms]: KnexOriginal.CompositeTableType<TInternalKms, TInternalKmsInsert, TInternalKmsUpdate>;
[TableName.ExternalKms]: KnexOriginal.CompositeTableType<TExternalKms, TExternalKmsInsert, TExternalKmsUpdate>;
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
TKmsKeyVersions,
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate
>;
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate
>;
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate
>;
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
>;
[TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType<
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
>;
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
TProjectTemplates,
TProjectTemplatesInsert,
TProjectTemplatesUpdate
>;
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate
>;
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate
>;
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate
>;
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
}
}

View File

@ -1,4 +0,0 @@
declare module "ldif" {
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
function parse(input: string, ...args: any[]): any;
}

View File

@ -1 +0,0 @@
declare module "passport-gitlab2";

43
backend/src/bootstrap.ts Normal file
View File

@ -0,0 +1,43 @@
import ora from "ora";
import nodemailer from "nodemailer";
import { getSmtpHost, getSmtpPort } from "./config";
import { logger } from "./utils/logging";
import mongoose from "mongoose";
import { redisClient } from "./services/RedisService";
type BootstrapOpt = {
transporter: nodemailer.Transporter;
};
export const bootstrap = async ({ transporter }: BootstrapOpt) => {
const spinner = ora().start();
spinner.info("Checking configurations...");
spinner.info("Testing smtp connection");
await transporter
.verify()
.then(async () => {
spinner.succeed("SMTP successfully connected");
})
.catch(async (err) => {
spinner.fail(`SMTP - Failed to connect to ${await getSmtpHost()}:${await getSmtpPort()}`);
logger.error(err);
});
spinner.info("Testing mongodb connection");
if (mongoose.connection.readyState !== mongoose.ConnectionStates.connected) {
spinner.fail("Mongo DB - Failed to connect");
} else {
spinner.succeed("Mongodb successfully connected");
}
spinner.info("Testing redis connection");
const redisPing = await redisClient?.ping();
if (!redisPing) {
spinner.fail("Redis - Failed to connect");
} else {
spinner.succeed("Redis successfully connected");
}
spinner.stop();
};

173
backend/src/config/index.ts Normal file
View File

@ -0,0 +1,173 @@
import { GITLAB_URL } from "../variables";
import InfisicalClient from "infisical-node";
export const client = new InfisicalClient({
token: process.env.INFISICAL_TOKEN!
});
export const getPort = async () => (await client.getSecret("PORT")).secretValue || 4000;
export const getEncryptionKey = async () => {
const secretValue = (await client.getSecret("ENCRYPTION_KEY")).secretValue;
return secretValue === "" ? undefined : secretValue;
};
export const getRootEncryptionKey = async () => {
const secretValue = (await client.getSecret("ROOT_ENCRYPTION_KEY")).secretValue;
return secretValue === "" ? undefined : secretValue;
};
export const getInviteOnlySignup = async () =>
(await client.getSecret("INVITE_ONLY_SIGNUP")).secretValue === "true";
export const getSaltRounds = async () =>
parseInt((await client.getSecret("SALT_ROUNDS")).secretValue) || 10;
export const getAuthSecret = async () =>
(await client.getSecret("JWT_AUTH_SECRET")).secretValue ??
(await client.getSecret("AUTH_SECRET")).secretValue;
export const getJwtAuthLifetime = async () =>
(await client.getSecret("JWT_AUTH_LIFETIME")).secretValue || "10d";
export const getJwtMfaLifetime = async () =>
(await client.getSecret("JWT_MFA_LIFETIME")).secretValue || "5m";
export const getJwtRefreshLifetime = async () =>
(await client.getSecret("JWT_REFRESH_LIFETIME")).secretValue || "90d";
export const getJwtServiceSecret = async () =>
(await client.getSecret("JWT_SERVICE_SECRET")).secretValue; // TODO: deprecate (related to ST V1)
export const getJwtSignupLifetime = async () =>
(await client.getSecret("JWT_SIGNUP_LIFETIME")).secretValue || "15m";
export const getJwtProviderAuthLifetime = async () =>
(await client.getSecret("JWT_PROVIDER_AUTH_LIFETIME")).secretValue || "15m";
export const getMongoURL = async () => (await client.getSecret("MONGO_URL")).secretValue;
export const getNodeEnv = async () =>
(await client.getSecret("NODE_ENV")).secretValue || "production";
export const getVerboseErrorOutput = async () =>
(await client.getSecret("VERBOSE_ERROR_OUTPUT")).secretValue === "true" && true;
export const getLokiHost = async () => (await client.getSecret("LOKI_HOST")).secretValue;
export const getClientIdAzure = async () => (await client.getSecret("CLIENT_ID_AZURE")).secretValue;
export const getClientIdHeroku = async () =>
(await client.getSecret("CLIENT_ID_HEROKU")).secretValue;
export const getClientIdVercel = async () =>
(await client.getSecret("CLIENT_ID_VERCEL")).secretValue;
export const getClientIdNetlify = async () =>
(await client.getSecret("CLIENT_ID_NETLIFY")).secretValue;
export const getClientIdGitHub = async () =>
(await client.getSecret("CLIENT_ID_GITHUB")).secretValue;
export const getClientIdGitLab = async () =>
(await client.getSecret("CLIENT_ID_GITLAB")).secretValue;
export const getClientIdBitBucket = async () =>
(await client.getSecret("CLIENT_ID_BITBUCKET")).secretValue;
export const getClientIdGCPSecretManager = async () =>
(await client.getSecret("CLIENT_ID_GCP_SECRET_MANAGER")).secretValue;
export const getClientSecretAzure = async () =>
(await client.getSecret("CLIENT_SECRET_AZURE")).secretValue;
export const getClientSecretHeroku = async () =>
(await client.getSecret("CLIENT_SECRET_HEROKU")).secretValue;
export const getClientSecretVercel = async () =>
(await client.getSecret("CLIENT_SECRET_VERCEL")).secretValue;
export const getClientSecretNetlify = async () =>
(await client.getSecret("CLIENT_SECRET_NETLIFY")).secretValue;
export const getClientSecretGitHub = async () =>
(await client.getSecret("CLIENT_SECRET_GITHUB")).secretValue;
export const getClientSecretGitLab = async () =>
(await client.getSecret("CLIENT_SECRET_GITLAB")).secretValue;
export const getClientSecretBitBucket = async () =>
(await client.getSecret("CLIENT_SECRET_BITBUCKET")).secretValue;
export const getClientSecretGCPSecretManager = async () =>
(await client.getSecret("CLIENT_SECRET_GCP_SECRET_MANAGER")).secretValue;
export const getClientSlugVercel = async () =>
(await client.getSecret("CLIENT_SLUG_VERCEL")).secretValue;
export const getClientIdGoogleLogin = async () =>
(await client.getSecret("CLIENT_ID_GOOGLE_LOGIN")).secretValue;
export const getClientSecretGoogleLogin = async () =>
(await client.getSecret("CLIENT_SECRET_GOOGLE_LOGIN")).secretValue;
export const getClientIdGitHubLogin = async () =>
(await client.getSecret("CLIENT_ID_GITHUB_LOGIN")).secretValue;
export const getClientSecretGitHubLogin = async () =>
(await client.getSecret("CLIENT_SECRET_GITHUB_LOGIN")).secretValue;
export const getClientIdGitLabLogin = async () =>
(await client.getSecret("CLIENT_ID_GITLAB_LOGIN")).secretValue;
export const getClientSecretGitLabLogin = async () =>
(await client.getSecret("CLIENT_SECRET_GITLAB_LOGIN")).secretValue;
export const getUrlGitLabLogin = async () =>
(await client.getSecret("URL_GITLAB_LOGIN")).secretValue || GITLAB_URL;
export const getAwsCloudWatchLog = async () => {
const logGroupName =
(await client.getSecret("AWS_CLOUDWATCH_LOG_GROUP_NAME")).secretValue || "infisical-log-stream";
const region = (await client.getSecret("AWS_CLOUDWATCH_LOG_REGION")).secretValue;
const accessKeyId = (await client.getSecret("AWS_CLOUDWATCH_LOG_ACCESS_KEY_ID")).secretValue;
const accessKeySecret = (await client.getSecret("AWS_CLOUDWATCH_LOG_ACCESS_KEY_SECRET"))
.secretValue;
const interval = parseInt(
(await client.getSecret("AWS_CLOUDWATCH_LOG_INTERVAL")).secretValue || 1000,
10
);
if (!region || !accessKeyId || !accessKeySecret) return;
return { logGroupName, region, accessKeySecret, accessKeyId, interval };
};
export const getPostHogHost = async () =>
(await client.getSecret("POSTHOG_HOST")).secretValue || "https://app.posthog.com";
export const getPostHogProjectApiKey = async () =>
(await client.getSecret("POSTHOG_PROJECT_API_KEY")).secretValue ||
"phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE";
export const getSentryDSN = async () => (await client.getSecret("SENTRY_DSN")).secretValue;
export const getSiteURL = async () => (await client.getSecret("SITE_URL")).secretValue;
export const getSmtpHost = async () => (await client.getSecret("SMTP_HOST")).secretValue;
export const getSmtpSecure = async () =>
(await client.getSecret("SMTP_SECURE")).secretValue === "true" || false;
export const getSmtpPort = async () =>
parseInt((await client.getSecret("SMTP_PORT")).secretValue) || 587;
export const getSmtpUsername = async () => (await client.getSecret("SMTP_USERNAME")).secretValue;
export const getSmtpPassword = async () => (await client.getSecret("SMTP_PASSWORD")).secretValue;
export const getSmtpFromAddress = async () =>
(await client.getSecret("SMTP_FROM_ADDRESS")).secretValue;
export const getSmtpFromName = async () =>
(await client.getSecret("SMTP_FROM_NAME")).secretValue || "Infisical";
export const getSecretScanningWebhookProxy = async () =>
(await client.getSecret("SECRET_SCANNING_WEBHOOK_PROXY")).secretValue;
export const getSecretScanningWebhookSecret = async () =>
(await client.getSecret("SECRET_SCANNING_WEBHOOK_SECRET")).secretValue;
export const getSecretScanningGitAppId = async () =>
(await client.getSecret("SECRET_SCANNING_GIT_APP_ID")).secretValue;
export const getSecretScanningPrivateKey = async () =>
(await client.getSecret("SECRET_SCANNING_PRIVATE_KEY")).secretValue;
export const getRedisUrl = async () => (await client.getSecret("REDIS_URL")).secretValue;
export const getIsInfisicalCloud = async () =>
(await client.getSecret("INFISICAL_CLOUD")).secretValue === "true";
export const getLicenseKey = async () => {
const secretValue = (await client.getSecret("LICENSE_KEY")).secretValue;
return secretValue === "" ? undefined : secretValue;
};
export const getLicenseServerKey = async () => {
const secretValue = (await client.getSecret("LICENSE_SERVER_KEY")).secretValue;
return secretValue === "" ? undefined : secretValue;
};
export const getLicenseServerUrl = async () =>
(await client.getSecret("LICENSE_SERVER_URL")).secretValue || "https://portal.infisical.com";
export const getTelemetryEnabled = async () =>
(await client.getSecret("TELEMETRY_ENABLED")).secretValue !== "false" && true;
export const getLoopsApiKey = async () => (await client.getSecret("LOOPS_API_KEY")).secretValue;
export const getSmtpConfigured = async () =>
(await client.getSecret("SMTP_HOST")).secretValue == "" ||
(await client.getSecret("SMTP_HOST")).secretValue == undefined
? false
: true;
export const getHttpsEnabled = async () => {
if ((await getNodeEnv()) != "production") {
// no https for anything other than prod
return false;
}
if (
(await client.getSecret("HTTPS_ENABLED")).secretValue == undefined ||
(await client.getSecret("HTTPS_ENABLED")).secretValue == ""
) {
// default when no value present
return true;
}
return (await client.getSecret("HTTPS_ENABLED")).secretValue === "true" && true;
};

View File

@ -0,0 +1,124 @@
import axios from "axios";
import axiosRetry from "axios-retry";
import {
getLicenseKeyAuthToken,
getLicenseServerKeyAuthToken,
setLicenseKeyAuthToken,
setLicenseServerKeyAuthToken,
} from "./storage";
import {
getLicenseKey,
getLicenseServerKey,
getLicenseServerUrl,
} from "./index";
// should have JWT to interact with the license server
export const licenseServerKeyRequest = axios.create();
export const licenseKeyRequest = axios.create();
export const standardRequest = axios.create();
// add retry functionality to the axios instance
axiosRetry(standardRequest, {
retries: 3,
retryDelay: axiosRetry.exponentialDelay, // exponential back-off delay between retries
retryCondition: (error) => {
// only retry if the error is a network error or a 5xx server error
return axiosRetry.isNetworkError(error) || axiosRetry.isRetryableError(error);
},
});
export const refreshLicenseServerKeyToken = async () => {
const licenseServerKey = await getLicenseServerKey();
const licenseServerUrl = await getLicenseServerUrl();
const { data: { token } } = await standardRequest.post(
`${licenseServerUrl}/api/auth/v1/license-server-login`, {},
{
headers: {
"X-API-KEY": licenseServerKey,
},
}
);
setLicenseServerKeyAuthToken(token);
return token;
}
export const refreshLicenseKeyToken = async () => {
const licenseKey = await getLicenseKey();
const licenseServerUrl = await getLicenseServerUrl();
const { data: { token } } = await standardRequest.post(
`${licenseServerUrl}/api/auth/v1/license-login`, {},
{
headers: {
"X-API-KEY": licenseKey,
},
}
);
setLicenseKeyAuthToken(token);
return token;
}
licenseServerKeyRequest.interceptors.request.use((config) => {
const token = getLicenseServerKeyAuthToken();
if (token && config.headers) {
// eslint-disable-next-line no-param-reassign
config.headers.Authorization = `Bearer ${token}`;
}
return config;
}, (err) => {
return Promise.reject(err);
});
licenseServerKeyRequest.interceptors.response.use((response) => {
return response
}, async function (err) {
const originalRequest = err.config;
if (err.response.status === 401 && !originalRequest._retry) {
originalRequest._retry = true;
// refresh
const token = await refreshLicenseServerKeyToken();
axios.defaults.headers.common["Authorization"] = "Bearer " + token;
return licenseServerKeyRequest(originalRequest);
}
return Promise.reject(err);
});
licenseKeyRequest.interceptors.request.use((config) => {
const token = getLicenseKeyAuthToken();
if (token && config.headers) {
// eslint-disable-next-line no-param-reassign
config.headers.Authorization = `Bearer ${token}`;
}
return config;
}, (err) => {
return Promise.reject(err);
});
licenseKeyRequest.interceptors.response.use((response) => {
return response
}, async function (err) {
const originalRequest = err.config;
if (err.response.status === 401 && !originalRequest._retry) {
originalRequest._retry = true;
// refresh
const token = await refreshLicenseKeyToken();
axios.defaults.headers.common["Authorization"] = "Bearer " + token;
return licenseKeyRequest(originalRequest);
}
return Promise.reject(err);
});

View File

@ -0,0 +1,24 @@
import { IServerConfig, ServerConfig } from "../models/serverConfig";
let serverConfig: IServerConfig;
export const serverConfigInit = async () => {
const cfg = await ServerConfig.findOne({});
if (!cfg) {
const cfg = new ServerConfig();
await cfg.save();
serverConfig = cfg;
} else {
serverConfig = cfg;
}
return serverConfig;
};
export const getServerConfig = () => serverConfig;
export const updateServerConfig = async (data: Partial<IServerConfig>) => {
const cfg = await ServerConfig.findByIdAndUpdate(serverConfig._id, data, { new: true });
if (!cfg) throw new Error("Failed to update server config");
serverConfig = cfg;
return serverConfig;
};

View File

@ -0,0 +1,30 @@
const MemoryLicenseServerKeyTokenStorage = () => {
let authToken: string;
return {
setToken: (token: string) => {
authToken = token;
},
getToken: () => authToken,
};
};
const MemoryLicenseKeyTokenStorage = () => {
let authToken: string;
return {
setToken: (token: string) => {
authToken = token;
},
getToken: () => authToken,
};
};
const licenseServerTokenStorage = MemoryLicenseServerKeyTokenStorage();
const licenseTokenStorage = MemoryLicenseKeyTokenStorage();
export const getLicenseServerKeyAuthToken = licenseServerTokenStorage.getToken;
export const setLicenseServerKeyAuthToken = licenseServerTokenStorage.setToken;
export const getLicenseKeyAuthToken = licenseTokenStorage.getToken;
export const setLicenseKeyAuthToken = licenseTokenStorage.setToken;

View File

@ -0,0 +1,100 @@
import { Request, Response } from "express";
import { getHttpsEnabled } from "../../config";
import { getServerConfig, updateServerConfig as setServerConfig } from "../../config/serverConfig";
import { initializeDefaultOrg, issueAuthTokens } from "../../helpers";
import { validateRequest } from "../../helpers/validation";
import { User } from "../../models";
import { TelemetryService } from "../../services";
import { BadRequestError, UnauthorizedRequestError } from "../../utils/errors";
import * as reqValidator from "../../validation/admin";
export const getServerConfigInfo = (_req: Request, res: Response) => {
const config = getServerConfig();
return res.send({ config });
};
export const updateServerConfig = async (req: Request, res: Response) => {
const {
body: { allowSignUp }
} = await validateRequest(reqValidator.UpdateServerConfigV1, req);
const config = await setServerConfig({ allowSignUp });
return res.send({ config });
};
export const adminSignUp = async (req: Request, res: Response) => {
const cfg = getServerConfig();
if (cfg.initialized) throw UnauthorizedRequestError({ message: "Admin has been created" });
const {
body: {
email,
publicKey,
salt,
lastName,
verifier,
firstName,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
encryptedPrivateKeyIV,
encryptedPrivateKeyTag
}
} = await validateRequest(reqValidator.SignupV1, req);
let user = await User.findOne({ email });
if (user) throw BadRequestError({ message: "User already exist" });
user = new User({
email,
firstName,
lastName,
encryptionVersion: 2,
protectedKey,
protectedKeyIV,
protectedKeyTag,
publicKey,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
salt,
verifier,
superAdmin: true
});
await user.save();
await initializeDefaultOrg({ organizationName: "Admin Org", user });
await setServerConfig({ initialized: true });
// issue tokens
const tokens = await issueAuthTokens({
userId: user._id,
ip: req.realIP,
userAgent: req.headers["user-agent"] ?? ""
});
const token = tokens.token;
const postHogClient = await TelemetryService.getPostHogClient();
if (postHogClient) {
postHogClient.capture({
event: "admin initialization",
properties: {
email: user.email,
lastName,
firstName
}
});
}
// store (refresh) token in httpOnly cookie
res.cookie("jid", tokens.refreshToken, {
httpOnly: true,
path: "/",
sameSite: "strict",
secure: await getHttpsEnabled()
});
return res.status(200).send({
message: "Successfully set up admin account",
user,
token
});
};

View File

@ -0,0 +1,277 @@
import { Request, Response } from "express";
import jwt from "jsonwebtoken";
import * as bigintConversion from "bigint-conversion";
// eslint-disable-next-line @typescript-eslint/no-var-requires
const jsrp = require("jsrp");
import {
LoginSRPDetail,
TokenVersion,
User
} from "../../models";
import { clearTokens, createToken, issueAuthTokens } from "../../helpers/auth";
import { checkUserDevice } from "../../helpers/user";
import { AuthTokenType } from "../../variables";
import {
BadRequestError,
UnauthorizedRequestError
} from "../../utils/errors";
import {
getAuthSecret,
getHttpsEnabled,
getJwtAuthLifetime,
} from "../../config";
import { ActorType } from "../../ee/models";
import { validateRequest } from "../../helpers/validation";
import * as reqValidator from "../../validation/auth";
declare module "jsonwebtoken" {
export interface AuthnJwtPayload extends jwt.JwtPayload {
authTokenType: AuthTokenType;
}
export interface UserIDJwtPayload extends jwt.JwtPayload {
userId: string;
refreshVersion?: number;
}
export interface IdentityAccessTokenJwtPayload extends jwt.JwtPayload {
_id: string;
clientSecretId: string;
identityAccessTokenId: string;
authTokenType: string;
}
}
/**
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
* @param req
* @param res
* @returns
*/
export const login1 = async (req: Request, res: Response) => {
const {
body: { email, clientPublicKey }
} = await validateRequest(reqValidator.Login1V1, req);
const user = await User.findOne({
email
}).select("+salt +verifier");
if (!user) throw new Error("Failed to find user");
const server = new jsrp.server();
server.init(
{
salt: user.salt,
verifier: user.verifier
},
async () => {
// generate server-side public key
const serverPublicKey = server.getPublicKey();
await LoginSRPDetail.findOneAndReplace(
{ email: email },
{
email: email,
clientPublicKey: clientPublicKey,
serverBInt: bigintConversion.bigintToBuf(server.bInt)
},
{ upsert: true, returnNewDocument: false }
);
return res.status(200).send({
serverPublicKey,
salt: user.salt
});
}
);
};
/**
* Log in user step 2: complete step 2 of SRP protocol and return token and their (encrypted)
* private key
* @param req
* @param res
* @returns
*/
export const login2 = async (req: Request, res: Response) => {
const {
body: { email, clientProof }
} = await validateRequest(reqValidator.Login2V1, req);
const user = await User.findOne({
email
}).select("+salt +verifier +publicKey +encryptedPrivateKey +iv +tag");
if (!user) throw new Error("Failed to find user");
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: email });
if (!loginSRPDetailFromDB) {
return BadRequestError(
Error(
"It looks like some details from the first login are not found. Please try login one again"
)
);
}
const server = new jsrp.server();
server.init(
{
salt: user.salt,
verifier: user.verifier,
b: loginSRPDetailFromDB.serverBInt
},
async () => {
server.setClientPublicKey(loginSRPDetailFromDB.clientPublicKey);
// compare server and client shared keys
if (server.checkClientProof(clientProof)) {
// issue tokens
await checkUserDevice({
user,
ip: req.realIP,
userAgent: req.headers["user-agent"] ?? ""
});
const tokens = await issueAuthTokens({
userId: user._id,
ip: req.realIP,
userAgent: req.headers["user-agent"] ?? ""
});
// store (refresh) token in httpOnly cookie
res.cookie("jid", tokens.refreshToken, {
httpOnly: true,
path: "/",
sameSite: "strict",
secure: await getHttpsEnabled()
});
// return (access) token in response
return res.status(200).send({
token: tokens.token,
publicKey: user.publicKey,
encryptedPrivateKey: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag
});
}
return res.status(400).send({
message: "Failed to authenticate. Try again?"
});
}
);
};
/**
* Log out user
* @param req
* @param res
* @returns
*/
export const logout = async (req: Request, res: Response) => {
if (req.authData.actor.type === ActorType.USER && req.authData.tokenVersionId) {
await clearTokens(req.authData.tokenVersionId);
}
// clear httpOnly cookie
res.cookie("jid", "", {
httpOnly: true,
path: "/",
sameSite: "strict",
secure: (await getHttpsEnabled()) as boolean
});
return res.status(200).send({
message: "Successfully logged out."
});
};
export const revokeAllSessions = async (req: Request, res: Response) => {
await TokenVersion.updateMany(
{
user: req.user._id
},
{
$inc: {
refreshVersion: 1,
accessVersion: 1
}
}
);
return res.status(200).send({
message: "Successfully revoked all sessions."
});
};
/**
* Return user is authenticated
* @param req
* @param res
* @returns
*/
export const checkAuth = async (req: Request, res: Response) => {
return res.status(200).send({
message: "Authenticated"
});
};
/**
* Return new JWT access token by first validating the refresh token
* @param req
* @param res
* @returns
*/
export const getNewToken = async (req: Request, res: Response) => {
const refreshToken = req.cookies.jid;
if (!refreshToken)
throw BadRequestError({
message: "Failed to find refresh token in request cookies"
});
const decodedToken = <jwt.UserIDJwtPayload>jwt.verify(refreshToken, await getAuthSecret());
if (decodedToken.authTokenType !== AuthTokenType.REFRESH_TOKEN) throw UnauthorizedRequestError();
const user = await User.findOne({
_id: decodedToken.userId
}).select("+publicKey +refreshVersion +accessVersion");
if (!user) throw new Error("Failed to authenticate unfound user");
if (!user?.publicKey) throw new Error("Failed to authenticate not fully set up account");
const tokenVersion = await TokenVersion.findById(decodedToken.tokenVersionId);
if (!tokenVersion)
throw UnauthorizedRequestError({
message: "Failed to validate refresh token"
});
if (decodedToken.refreshVersion !== tokenVersion.refreshVersion)
throw BadRequestError({
message: "Failed to validate refresh token"
});
const token = createToken({
payload: {
authTokenType: AuthTokenType.ACCESS_TOKEN,
userId: decodedToken.userId,
tokenVersionId: tokenVersion._id.toString(),
accessVersion: tokenVersion.refreshVersion
},
expiresIn: await getJwtAuthLifetime(),
secret: await getAuthSecret()
});
return res.status(200).send({
token
});
};
export const handleAuthProviderCallback = (req: Request, res: Response) => {
res.redirect(`/login/provider/success?token=${encodeURIComponent(req.providerAuthToken)}`);
};

View File

@ -0,0 +1,135 @@
import { Request, Response } from "express";
import { Types } from "mongoose";
import { Bot, BotKey } from "../../models";
import { createBot } from "../../helpers/bot";
import { validateRequest } from "../../helpers/validation";
import * as reqValidator from "../../validation/bot";
import {
ProjectPermissionActions,
ProjectPermissionSub,
getAuthDataProjectPermissions
} from "../../ee/services/ProjectRoleService";
import { ForbiddenError } from "@casl/ability";
import { BadRequestError } from "../../utils/errors";
interface BotKey {
encryptedKey: string;
nonce: string;
}
/**
* Return bot for workspace with id [workspaceId]. If a workspace bot doesn't exist,
* then create and return a new bot.
* @param req
* @param res
* @returns
*/
export const getBotByWorkspaceId = async (req: Request, res: Response) => {
const {
params: { workspaceId }
} = await validateRequest(reqValidator.GetBotByWorkspaceIdV1, req);
const { permission } = await getAuthDataProjectPermissions({
authData: req.authData,
workspaceId: new Types.ObjectId(workspaceId)
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionSub.Integrations
);
let bot = await Bot.findOne({
workspace: workspaceId
});
if (!bot) {
// case: bot doesn't exist for workspace with id [workspaceId]
// -> create a new bot and return it
bot = await createBot({
name: "Infisical Bot",
workspaceId: new Types.ObjectId(workspaceId)
});
}
return res.status(200).send({
bot
});
};
/**
* Return bot with id [req.bot._id] with active state set to [isActive].
* @param req
* @param res
* @returns
*/
export const setBotActiveState = async (req: Request, res: Response) => {
const {
body: { botKey, isActive },
params: { botId }
} = await validateRequest(reqValidator.SetBotActiveStateV1, req);
const bot = await Bot.findById(botId);
if (!bot) {
throw BadRequestError({ message: "Bot not found" });
}
const userId = req.user._id;
const { permission } = await getAuthDataProjectPermissions({
authData: req.authData,
workspaceId: bot.workspace
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionSub.Integrations
);
if (isActive) {
// bot state set to active -> share workspace key with bot
if (!botKey?.encryptedKey || !botKey?.nonce) {
return res.status(400).send({
message: "Failed to set bot state to active - missing bot key"
});
}
await BotKey.findOneAndUpdate(
{
workspace: bot.workspace
},
{
encryptedKey: botKey.encryptedKey,
nonce: botKey.nonce,
sender: userId,
bot: bot._id,
workspace: bot.workspace
},
{
upsert: true,
new: true
}
);
} else {
// case: bot state set to inactive -> delete bot's workspace key
await BotKey.deleteOne({
bot: bot._id
});
}
const updatedBot = await Bot.findOneAndUpdate(
{
_id: bot._id
},
{
isActive
},
{
new: true
}
);
if (!updatedBot) throw new Error("Failed to update bot active state");
return res.status(200).send({
bot
});
};

View File

@ -0,0 +1,45 @@
import * as authController from "./authController";
import * as universalAuthController from "./universalAuthController";
import * as ldapController from "./ldapController";
import * as botController from "./botController";
import * as integrationAuthController from "./integrationAuthController";
import * as integrationController from "./integrationController";
import * as keyController from "./keyController";
import * as membershipController from "./membershipController";
import * as membershipOrgController from "./membershipOrgController";
import * as organizationController from "./organizationController";
import * as passwordController from "./passwordController";
import * as secretController from "./secretController";
import * as serviceTokenController from "./serviceTokenController";
import * as signupController from "./signupController";
import * as userActionController from "./userActionController";
import * as userController from "./userController";
import * as workspaceController from "./workspaceController";
import * as secretScanningController from "./secretScanningController";
import * as webhookController from "./webhookController";
import * as secretImpsController from "./secretImpsController";
import * as adminController from "./adminController";
export {
authController,
universalAuthController,
ldapController,
botController,
integrationAuthController,
integrationController,
keyController,
membershipController,
membershipOrgController,
organizationController,
passwordController,
secretController,
serviceTokenController,
signupController,
userActionController,
userController,
workspaceController,
secretScanningController,
webhookController,
secretImpsController,
adminController
};

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,322 @@
import { Request, Response } from "express";
import { Types } from "mongoose";
import { Folder, IWorkspace, Integration, IntegrationAuth } from "../../models";
import { EventService } from "../../services";
import { eventStartIntegration } from "../../events";
import { getFolderByPath } from "../../services/FolderService";
import { BadRequestError } from "../../utils/errors";
import { EEAuditLogService } from "../../ee/services";
import { EventType } from "../../ee/models";
import { syncSecretsToActiveIntegrationsQueue } from "../../queues/integrations/syncSecretsToThirdPartyServices";
import { validateRequest } from "../../helpers/validation";
import * as reqValidator from "../../validation/integration";
import {
ProjectPermissionActions,
ProjectPermissionSub,
getAuthDataProjectPermissions
} from "../../ee/services/ProjectRoleService";
import { ForbiddenError } from "@casl/ability";
/**
* Create/initialize an (empty) integration for integration authorization
* @param req
* @param res
* @returns
*/
export const createIntegration = async (req: Request, res: Response) => {
const {
body: {
isActive,
sourceEnvironment,
secretPath,
app,
path,
appId,
owner,
region,
scope,
targetService,
targetServiceId,
integrationAuthId,
targetEnvironment,
targetEnvironmentId,
metadata
}
} = await validateRequest(reqValidator.CreateIntegrationV1, req);
const integrationAuth = await IntegrationAuth.findById(integrationAuthId)
.populate<{ workspace: IWorkspace }>("workspace")
.select(
"+refreshCiphertext +refreshIV +refreshTag +accessCiphertext +accessIV +accessTag +accessExpiresAt"
);
if (!integrationAuth) throw BadRequestError({ message: "Integration auth not found" });
const { permission } = await getAuthDataProjectPermissions({
authData: req.authData,
workspaceId: integrationAuth.workspace._id
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
ProjectPermissionSub.Integrations
);
const folders = await Folder.findOne({
workspace: integrationAuth.workspace._id,
environment: sourceEnvironment
});
if (folders) {
const folder = getFolderByPath(folders.nodes, secretPath);
if (!folder) {
throw BadRequestError({
message: "Folder path doesn't exist"
});
}
}
// TODO: validate [sourceEnvironment] and [targetEnvironment]
// initialize new integration after saving integration access token
const integration = await new Integration({
workspace: integrationAuth.workspace._id,
environment: sourceEnvironment,
isActive,
app,
appId,
targetEnvironment,
targetEnvironmentId,
targetService,
targetServiceId,
owner,
path,
region,
scope,
secretPath,
integration: integrationAuth.integration,
integrationAuth: new Types.ObjectId(integrationAuthId),
metadata
}).save();
if (integration) {
// trigger event - push secrets
EventService.handleEvent({
event: eventStartIntegration({
workspaceId: integration.workspace,
environment: sourceEnvironment
})
});
}
await EEAuditLogService.createAuditLog(
req.authData,
{
type: EventType.CREATE_INTEGRATION,
metadata: {
integrationId: integration._id.toString(),
integration: integration.integration,
environment: integration.environment,
secretPath,
url: integration.url,
app: integration.app,
appId: integration.appId,
targetEnvironment: integration.targetEnvironment,
targetEnvironmentId: integration.targetEnvironmentId,
targetService: integration.targetService,
targetServiceId: integration.targetServiceId,
path: integration.path,
region: integration.region
}
},
{
workspaceId: integration.workspace
}
);
return res.status(200).send({
integration
});
};
/**
* Change environment or name of integration with id [integrationId]
* @param req
* @param res
* @returns
*/
export const updateIntegration = async (req: Request, res: Response) => {
// TODO: add integration-specific validation to ensure that each
// integration has the correct fields populated in [Integration]
const {
body: {
environment,
isActive,
app,
appId,
targetEnvironment,
owner, // github-specific integration param
secretPath
},
params: { integrationId }
} = await validateRequest(reqValidator.UpdateIntegrationV1, req);
const integration = await Integration.findById(integrationId);
if (!integration) throw BadRequestError({ message: "Integration not found" });
const { permission } = await getAuthDataProjectPermissions({
authData: req.authData,
workspaceId: integration.workspace
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionSub.Integrations
);
const folders = await Folder.findOne({
workspace: integration.workspace,
environment
});
if (folders) {
const folder = getFolderByPath(folders.nodes, secretPath);
if (!folder) {
throw BadRequestError({
message: "Path for service token does not exist"
});
}
}
const updatedIntegration = await Integration.findOneAndUpdate(
{
_id: integration._id
},
{
environment,
isActive,
app,
appId,
targetEnvironment,
owner,
secretPath
},
{
new: true
}
);
if (updatedIntegration) {
// trigger event - push secrets
EventService.handleEvent({
event: eventStartIntegration({
workspaceId: updatedIntegration.workspace,
environment
})
});
}
return res.status(200).send({
integration: updatedIntegration
});
};
/**
* Delete integration with id [integrationId]
* @param req
* @param res
* @returns
*/
export const deleteIntegration = async (req: Request, res: Response) => {
const {
params: { integrationId }
} = await validateRequest(reqValidator.DeleteIntegrationV1, req);
const integration = await Integration.findById(integrationId);
if (!integration) throw BadRequestError({ message: "Integration not found" });
const { permission } = await getAuthDataProjectPermissions({
authData: req.authData,
workspaceId: integration.workspace
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
ProjectPermissionSub.Integrations
);
const deletedIntegration = await Integration.findOneAndDelete({
_id: integrationId
});
if (!deletedIntegration) throw new Error("Failed to find integration");
const numOtherIntegrationsUsingSameAuth = await Integration.countDocuments({
integrationAuth: deletedIntegration.integrationAuth,
_id: {
$nin: [deletedIntegration._id]
}
});
if (numOtherIntegrationsUsingSameAuth === 0) {
// no other integrations are using the same integration auth
// -> delete integration auth associated with the integration being deleted
await IntegrationAuth.deleteOne({
_id: deletedIntegration.integrationAuth
});
}
await EEAuditLogService.createAuditLog(
req.authData,
{
type: EventType.DELETE_INTEGRATION,
metadata: {
integrationId: integration._id.toString(),
integration: integration.integration,
environment: integration.environment,
secretPath: integration.secretPath,
url: integration.url,
app: integration.app,
appId: integration.appId,
targetEnvironment: integration.targetEnvironment,
targetEnvironmentId: integration.targetEnvironmentId,
targetService: integration.targetService,
targetServiceId: integration.targetServiceId,
path: integration.path,
region: integration.region
}
},
{
workspaceId: integration.workspace
}
);
return res.status(200).send({
integration
});
};
// Will trigger sync for all integrations within the given env and workspace id
export const manualSync = async (req: Request, res: Response) => {
const {
body: { workspaceId, environment }
} = await validateRequest(reqValidator.ManualSyncV1, req);
const { permission } = await getAuthDataProjectPermissions({
authData: req.authData,
workspaceId: new Types.ObjectId(workspaceId)
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionSub.Integrations
);
syncSecretsToActiveIntegrationsQueue({
workspaceId,
environment
});
res.status(200).send();
};

View File

@ -0,0 +1,101 @@
import { Types } from "mongoose";
import { Request, Response } from "express";
import { Key } from "../../models";
import { findMembership } from "../../helpers/membership";
import { EventType } from "../../ee/models";
import { EEAuditLogService } from "../../ee/services";
import { validateRequest } from "../../helpers/validation";
import * as reqValidator from "../../validation/key";
import {
ProjectPermissionActions,
ProjectPermissionSub,
getAuthDataProjectPermissions
} from "../../ee/services/ProjectRoleService";
import { ForbiddenError } from "@casl/ability";
/**
* Add (encrypted) copy of workspace key for workspace with id [workspaceId] for user with
* id [key.userId]
* @param req
* @param res
* @returns
*/
export const uploadKey = async (req: Request, res: Response) => {
const {
params: { workspaceId },
body: { key }
} = await validateRequest(reqValidator.UploadKeyV1, req);
const { permission } = await getAuthDataProjectPermissions({
authData: req.authData,
workspaceId: new Types.ObjectId(workspaceId)
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionSub.Member
);
// validate membership of receiver
const receiverMembership = await findMembership({
user: key.userId,
workspace: workspaceId
});
if (!receiverMembership) {
throw new Error("Failed receiver membership validation for workspace");
}
await new Key({
encryptedKey: key.encryptedKey,
nonce: key.nonce,
sender: req.user._id,
receiver: key.userId,
workspace: workspaceId
}).save();
return res.status(200).send({
message: "Successfully uploaded key to workspace"
});
};
/**
* Return latest (encrypted) copy of workspace key for user
* @param req
* @param res
* @returns
*/
export const getLatestKey = async (req: Request, res: Response) => {
const {
params: { workspaceId }
} = await validateRequest(reqValidator.GetLatestKeyV1, req);
// get latest key
const latestKey = await Key.find({
workspace: workspaceId,
receiver: req.user._id
})
.sort({ createdAt: -1 })
.limit(1)
.populate("sender", "+publicKey");
const resObj: any = {};
if (latestKey.length > 0) {
resObj["latestKey"] = latestKey[0];
await EEAuditLogService.createAuditLog(
req.authData,
{
type: EventType.GET_WORKSPACE_KEY,
metadata: {
keyId: latestKey[0]._id.toString()
}
},
{
workspaceId: new Types.ObjectId(workspaceId)
}
);
}
return res.status(200).send(resObj);
};

Some files were not shown because too many files have changed in this diff Show More