mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-19 17:33:26 +00:00
Compare commits
7 Commits
maidul-dig
...
sem-ver-gh
Author | SHA1 | Date | |
---|---|---|---|
84616d063d | |||
344dc93d3e | |||
560f3b384a | |||
d497c2a219 | |||
dea064e7fa | |||
1a103715f6 | |||
8d03869ee4 |
@ -1,10 +0,0 @@
|
||||
backend/node_modules
|
||||
frontend/node_modules
|
||||
backend/frontend-build
|
||||
**/node_modules
|
||||
**/.next
|
||||
.dockerignore
|
||||
.git
|
||||
README.md
|
||||
.dockerignore
|
||||
**/Dockerfile
|
54
.env.example
54
.env.example
@ -1,23 +1,31 @@
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NOT BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
|
||||
JWT_SIGNUP_SECRET=3679e04ca949f914c03332aaaeba805a
|
||||
JWT_REFRESH_SECRET=5f2f3c8f0159068dc2bbb3a652a716ff
|
||||
JWT_AUTH_SECRET=4be6ba5602e0fa0ac6ac05c3cd4d247f
|
||||
JWT_SERVICE_SECRET=f32f716d70a42c5703f4656015e76200
|
||||
|
||||
# Postgres creds
|
||||
POSTGRES_PASSWORD=infisical
|
||||
POSTGRES_USER=infisical
|
||||
POSTGRES_DB=infisical
|
||||
# JWT lifetime
|
||||
# Optional lifetimes for JWT tokens expressed in seconds or a string
|
||||
# describing a time span (e.g. 60, "2 days", "10h", "7d")
|
||||
JWT_AUTH_LIFETIME=
|
||||
JWT_REFRESH_LIFETIME=
|
||||
JWT_SIGNUP_LIFETIME=
|
||||
|
||||
# MongoDB
|
||||
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
|
||||
# to the MongoDB container instance or Mongo Cloud
|
||||
# Required
|
||||
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://redis:6379
|
||||
# Optional credentials for MongoDB container instance and Mongo-Express
|
||||
MONGO_USERNAME=root
|
||||
MONGO_PASSWORD=example
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
@ -37,13 +45,11 @@ CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
@ -53,20 +59,10 @@ SENTRY_DSN=
|
||||
# Ignore - Not applicable for self-hosted version
|
||||
POSTHOG_HOST=
|
||||
POSTHOG_PROJECT_API_KEY=
|
||||
|
||||
# SSO-specific variables
|
||||
CLIENT_ID_GOOGLE_LOGIN=
|
||||
CLIENT_SECRET_GOOGLE_LOGIN=
|
||||
|
||||
CLIENT_ID_GITHUB_LOGIN=
|
||||
CLIENT_SECRET_GITHUB_LOGIN=
|
||||
|
||||
CLIENT_ID_GITLAB_LOGIN=
|
||||
CLIENT_SECRET_GITLAB_LOGIN=
|
||||
|
||||
CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
STRIPE_SECRET_KEY=
|
||||
STRIPE_PUBLISHABLE_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
STRIPE_PRODUCT_STARTER=
|
||||
STRIPE_PRODUCT_TEAM=
|
||||
STRIPE_PRODUCT_PRO=
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=
|
||||
|
@ -1 +0,0 @@
|
||||
DB_CONNECTION_URI=
|
@ -1,4 +0,0 @@
|
||||
REDIS_URL=redis://localhost:6379
|
||||
DB_CONNECTION_URI=postgres://infisical:infisical@localhost/infisical?sslmode=disable
|
||||
AUTH_SECRET=4bnfe4e407b8921c104518903515b218
|
||||
ENCRYPTION_KEY=4bnfe4e407b8921c104518903515b218
|
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -8,7 +8,7 @@ assignees: ''
|
||||
---
|
||||
|
||||
### Feature description
|
||||
A clear and concise description of what the feature should be.
|
||||
A clear and concise description of what the the feature should be.
|
||||
|
||||
### Why would it be useful?
|
||||
Why would this feature be useful for Infisical users?
|
||||
|
BIN
.github/images/Deploy to AWS.png
vendored
BIN
.github/images/Deploy to AWS.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 2.3 KiB |
BIN
.github/images/deploy-aws-button.png
vendored
BIN
.github/images/deploy-aws-button.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 19 KiB |
BIN
.github/images/deploy-to-aws.png
vendored
BIN
.github/images/deploy-to-aws.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 2.8 KiB |
BIN
.github/images/do-k8-install-btn.png
vendored
BIN
.github/images/do-k8-install-btn.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 28 KiB |
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@ -1,6 +1,6 @@
|
||||
# Description 📣
|
||||
|
||||
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. Here's how we expect a pull request to be : https://infisical.com/docs/contributing/getting-started/pull-requests -->
|
||||
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. -->
|
||||
|
||||
## Type ✨
|
||||
|
||||
@ -19,6 +19,4 @@
|
||||
|
||||
---
|
||||
|
||||
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/getting-started/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/getting-started/code-of-conduct). 📝
|
||||
|
||||
<!-- If you have any questions regarding contribution, here's the FAQ : https://infisical.com/docs/contributing/getting-started/faq -->
|
||||
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/code-of-conduct). 📝
|
190
.github/resources/changelog-generator.py
vendored
190
.github/resources/changelog-generator.py
vendored
@ -1,190 +0,0 @@
|
||||
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
|
||||
import os
|
||||
import requests
|
||||
import re
|
||||
from openai import OpenAI
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
import uuid
|
||||
|
||||
# Constants
|
||||
REPO_OWNER = "infisical"
|
||||
REPO_NAME = "infisical"
|
||||
TOKEN = os.environ["GITHUB_TOKEN"]
|
||||
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
|
||||
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
|
||||
SLACK_MSG_COLOR = "#36a64f"
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {TOKEN}",
|
||||
"Accept": "application/vnd.github+json",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
|
||||
|
||||
def set_multiline_output(name, value):
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
delimiter = uuid.uuid1()
|
||||
print(f'{name}<<{delimiter}', file=fh)
|
||||
print(value, file=fh)
|
||||
print(delimiter, file=fh)
|
||||
|
||||
def post_changelog_to_slack(changelog, tag):
|
||||
slack_payload = {
|
||||
"text": "Hey team, it's changelog time! :wave:",
|
||||
"attachments": [
|
||||
{
|
||||
"color": SLACK_MSG_COLOR,
|
||||
"title": f"🗓️Infisical Changelog - {tag}",
|
||||
"text": changelog,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Failed to post changelog to Slack.")
|
||||
|
||||
def find_previous_release_tag(release_tag:str):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
|
||||
while not(previous_tag.startswith("infisical/")):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
|
||||
return previous_tag
|
||||
|
||||
def get_tag_creation_date(tag_name):
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
commit_sha = response.json()['object']['sha']
|
||||
|
||||
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
|
||||
commit_response = requests.get(commit_url, headers=headers)
|
||||
commit_response.raise_for_status()
|
||||
creation_date = commit_response.json()['commit']['author']['date']
|
||||
|
||||
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
|
||||
|
||||
|
||||
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
|
||||
# Use GitHub API to fetch PRs merged between the commits
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Error fetching PRs from GitHub API!")
|
||||
|
||||
prs = []
|
||||
for pr in response.json():
|
||||
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
|
||||
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
|
||||
prs.append(pr)
|
||||
|
||||
return prs
|
||||
|
||||
|
||||
def extract_commit_details_from_prs(prs):
|
||||
commit_details = []
|
||||
for pr in prs:
|
||||
commit_message = pr["title"]
|
||||
commit_url = pr["html_url"]
|
||||
pr_number = pr["number"]
|
||||
branch_name = pr["head"]["ref"]
|
||||
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
|
||||
|
||||
# If no issue numbers are found, add the PR details without issue numbers and URLs
|
||||
if not issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": None,
|
||||
"issue_url": None,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
for issue in issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": issue,
|
||||
}
|
||||
)
|
||||
|
||||
return commit_details
|
||||
|
||||
# Function to generate changelog using OpenAI
|
||||
def generate_changelog_with_openai(commit_details):
|
||||
commit_messages = []
|
||||
for details in commit_details:
|
||||
base_message = f"{details['pr_url']} - {details['message']}"
|
||||
# Add the issue URL if available
|
||||
# if details["issue_url"]:
|
||||
# base_message += f" (Linear Issue: {details['issue_url']})"
|
||||
commit_messages.append(base_message)
|
||||
|
||||
commit_list = "\n".join(commit_messages)
|
||||
prompt = """
|
||||
Generate a changelog for Infisical, opensource secretops
|
||||
The changelog should:
|
||||
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
|
||||
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
|
||||
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
|
||||
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
|
||||
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
|
||||
6. Linear Links: note that the Linear link is optional, include it only if provided.
|
||||
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
|
||||
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
|
||||
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
|
||||
And here's an example of the full changelog:
|
||||
|
||||
*Features*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Fixes & Improvements*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Technical Updates*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
|
||||
Stay tuned for more exciting updates coming soon!
|
||||
And here are the commits:
|
||||
{}
|
||||
""".format(
|
||||
commit_list
|
||||
)
|
||||
|
||||
client = OpenAI(api_key=OPENAI_API_KEY)
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
|
||||
|
||||
if "error" in response.choices[0].message:
|
||||
raise Exception("Error generating changelog with OpenAI!")
|
||||
|
||||
return response.choices[0].message.content.strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
# Get the latest and previous release tags
|
||||
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
|
||||
previous_tag = find_previous_release_tag(latest_tag)
|
||||
|
||||
latest_tag_date = get_tag_creation_date(latest_tag)
|
||||
previous_tag_date = get_tag_creation_date(previous_tag)
|
||||
|
||||
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
|
||||
pr_details = extract_commit_details_from_prs(prs)
|
||||
|
||||
# Generate changelog
|
||||
changelog = generate_changelog_with_openai(pr_details)
|
||||
|
||||
post_changelog_to_slack(changelog,latest_tag)
|
||||
# Print or post changelog to Slack
|
||||
# set_multiline_output("changelog", changelog)
|
||||
|
||||
except Exception as e:
|
||||
print(str(e))
|
3
.github/resources/docker-compose.be-test.yml
vendored
3
.github/resources/docker-compose.be-test.yml
vendored
@ -6,14 +6,13 @@ services:
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
image: infisical/infisical:test
|
||||
image: infisical/backend:test
|
||||
command: npm run start
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- MONGO_URL=mongodb://test:example@mongo:27017/?authSource=admin
|
||||
- MONGO_USERNAME=test
|
||||
- MONGO_PASSWORD=example
|
||||
- ENCRYPTION_KEY=a984ecdf82ec779e55dbcc21303a900f
|
||||
networks:
|
||||
- infisical-test
|
||||
|
||||
|
26
.github/resources/rename_migration_files.py
vendored
26
.github/resources/rename_migration_files.py
vendored
@ -1,26 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def rename_migrations():
|
||||
migration_folder = "./backend/src/db/migrations"
|
||||
with open("added_files.txt", "r") as file:
|
||||
changed_files = file.readlines()
|
||||
|
||||
# Find the latest file among the changed files
|
||||
latest_timestamp = datetime.now() # utc time
|
||||
for file_path in changed_files:
|
||||
file_path = file_path.strip()
|
||||
# each new file bump by 1s
|
||||
latest_timestamp = latest_timestamp + timedelta(seconds=1)
|
||||
|
||||
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
|
||||
old_filename = os.path.join(migration_folder, file_path)
|
||||
os.rename(old_filename, new_filename)
|
||||
print(f"Renamed {old_filename} to {new_filename}")
|
||||
|
||||
if len(changed_files) == 0:
|
||||
print("No new files added to migration folder")
|
||||
|
||||
if __name__ == "__main__":
|
||||
rename_migrations()
|
||||
|
108
.github/values.yaml
vendored
108
.github/values.yaml
vendored
@ -1,57 +1,71 @@
|
||||
## @section Common parameters
|
||||
##
|
||||
|
||||
## @param nameOverride Override release name
|
||||
##
|
||||
nameOverride: ""
|
||||
## @param fullnameOverride Override release fullname
|
||||
##
|
||||
fullnameOverride: ""
|
||||
|
||||
## @section Infisical backend parameters
|
||||
## Documentation : https://infisical.com/docs/self-hosting/deployments/kubernetes
|
||||
##
|
||||
|
||||
infisical:
|
||||
autoDatabaseSchemaMigration: false
|
||||
|
||||
enabled: false
|
||||
|
||||
name: infisical
|
||||
replicaCount: 3
|
||||
image:
|
||||
repository: infisical/staging_infisical
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
|
||||
frontend:
|
||||
enabled: true
|
||||
name: frontend
|
||||
podAnnotations: {}
|
||||
deploymentAnnotations:
|
||||
secrets.infisical.com/auto-reload: "true"
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: infisical/frontend
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
kubeSecretRef: managed-secret-frontend
|
||||
service:
|
||||
annotations: {}
|
||||
type: ClusterIP
|
||||
nodePort: ""
|
||||
|
||||
kubeSecretRef: "managed-secret"
|
||||
frontendEnvironmentVariables: null
|
||||
|
||||
backend:
|
||||
enabled: true
|
||||
name: backend
|
||||
podAnnotations: {}
|
||||
deploymentAnnotations:
|
||||
secrets.infisical.com/auto-reload: "true"
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: infisical/backend
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
kubeSecretRef: managed-backend-secret
|
||||
service:
|
||||
annotations: {}
|
||||
type: ClusterIP
|
||||
nodePort: ""
|
||||
|
||||
backendEnvironmentVariables: null
|
||||
|
||||
## Mongo DB persistence
|
||||
mongodb:
|
||||
enabled: true
|
||||
persistence:
|
||||
enabled: false
|
||||
|
||||
## By default the backend will be connected to a Mongo instance within the cluster
|
||||
## However, it is recommended to add a managed document DB connection string for production-use (DBaaS)
|
||||
## Learn about connection string type here https://www.mongodb.com/docs/manual/reference/connection-string/
|
||||
## e.g. "mongodb://<user>:<pass>@<host>:<port>/<database-name>"
|
||||
mongodbConnection:
|
||||
externalMongoDBConnectionString: ""
|
||||
|
||||
ingress:
|
||||
## @param ingress.enabled Enable ingress
|
||||
##
|
||||
enabled: true
|
||||
## @param ingress.ingressClassName Ingress class name
|
||||
##
|
||||
ingressClassName: nginx
|
||||
## @param ingress.nginx.enabled Ingress controller
|
||||
##
|
||||
# nginx:
|
||||
# enabled: true
|
||||
## @param ingress.annotations Ingress annotations
|
||||
##
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt-prod"
|
||||
hostName: "gamma.infisical.com"
|
||||
kubernetes.io/ingress.class: "nginx"
|
||||
# cert-manager.io/issuer: letsencrypt-nginx
|
||||
hostName: gamma.infisical.com ## <- Replace with your own domain
|
||||
frontend:
|
||||
path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
path: /api
|
||||
pathType: Prefix
|
||||
tls:
|
||||
- secretName: letsencrypt-prod
|
||||
hosts:
|
||||
- gamma.infisical.com
|
||||
[]
|
||||
# - secretName: letsencrypt-nginx
|
||||
# hosts:
|
||||
# - infisical.local
|
||||
|
||||
postgresql:
|
||||
enabled: false
|
||||
|
||||
redis:
|
||||
mailhog:
|
||||
enabled: false
|
||||
|
104
.github/workflows/build-binaries.yml
vendored
104
.github/workflows/build-binaries.yml
vendored
@ -1,104 +0,0 @@
|
||||
name: Build Binaries and Deploy
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version number"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./backend
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [x64, arm64]
|
||||
os: [linux, win]
|
||||
include:
|
||||
- os: linux
|
||||
target: node20-linux
|
||||
- os: win
|
||||
target: node20-win
|
||||
runs-on: ${{ (matrix.arch == 'arm64' && matrix.os == 'linux') && 'ubuntu24-arm64' || 'ubuntu-latest' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install pkg
|
||||
run: npm install -g @yao-pkg/pkg
|
||||
|
||||
- name: Install dependencies (backend)
|
||||
run: npm install
|
||||
|
||||
- name: Install dependencies (frontend)
|
||||
run: npm install --prefix ../frontend
|
||||
|
||||
- name: Prerequisites for pkg
|
||||
run: npm run binary:build
|
||||
|
||||
- name: Package into node binary
|
||||
run: |
|
||||
if [ "${{ matrix.os }}" != "linux" ]; then
|
||||
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
||||
else
|
||||
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
||||
fi
|
||||
|
||||
# Set up .deb package structure (Debian/Ubuntu only)
|
||||
- name: Set up .deb package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core/DEBIAN
|
||||
mkdir -p infisical-core/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core/usr/local/bin/
|
||||
chmod +x infisical-core/usr/local/bin/infisical-core
|
||||
|
||||
- name: Create control file
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core/DEBIAN/control
|
||||
Package: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Section: base
|
||||
Priority: optional
|
||||
Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }}
|
||||
Maintainer: Infisical <daniel@infisical.com>
|
||||
Description: Infisical Core standalone executable (app.infisical.com)
|
||||
EOF
|
||||
|
||||
# Build .deb file (Debian/Ubunutu only)
|
||||
- name: Build .deb package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
dpkg-deb --build infisical-core
|
||||
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x" # Specify the Python version you need
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade cloudsmith-cli
|
||||
|
||||
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
|
||||
- name: Publish to Cloudsmith (Debian/Ubuntu)
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
|
||||
|
||||
# Publish .exe file to Cloudsmith (Windows only)
|
||||
- name: Publish to Cloudsmith (Windows)
|
||||
if: matrix.os == 'win'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}
|
123
.github/workflows/build-docker-image-to-prod.yml
vendored
123
.github/workflows/build-docker-image-to-prod.yml
vendored
@ -1,123 +0,0 @@
|
||||
name: Release production images (frontend, backend)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
- "!infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/infisical:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
- name: 🧪 Test backend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-backend-test
|
||||
- name: ⏻ Shut down backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: backend
|
||||
tags: |
|
||||
infisical/backend:${{ steps.commit.outputs.short }}
|
||||
infisical/backend:latest
|
||||
infisical/backend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build frontend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
load: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
project: 64mmf0n610
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
- name: ⏻ Spawn frontend container
|
||||
run: |
|
||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
||||
- name: 🧪 Test frontend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
||||
- name: ⏻ Shut down frontend container
|
||||
run: |
|
||||
docker stop infisical-frontend-test
|
||||
- name: 🏗️ Build frontend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
push: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: frontend
|
||||
tags: |
|
||||
infisical/frontend:${{ steps.commit.outputs.short }}
|
||||
infisical/frontend:latest
|
||||
infisical/frontend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
38
.github/workflows/build-patroni-docker-img.yml
vendored
38
.github/workflows/build-patroni-docker-img.yml
vendored
@ -1,38 +0,0 @@
|
||||
name: Build patroni
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
patroni-image:
|
||||
name: Build patroni
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: 'zalando/patroni'
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
infisical/patroni:${{ steps.commit.outputs.short }}
|
||||
infisical/patroni:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
|
154
.github/workflows/build-staging-and-deploy-aws.yml
vendored
154
.github/workflows/build-staging-and-deploy-aws.yml
vendored
@ -1,154 +0,0 @@
|
||||
name: Deployment pipeline
|
||||
on: [workflow_dispatch]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-postgres-deployment:
|
||||
name: Deploy to production
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
@ -1,77 +0,0 @@
|
||||
name: "Check API For Breaking Changes"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/src/server/routes/**"
|
||||
- "backend/src/ee/routes/**"
|
||||
|
||||
jobs:
|
||||
check-be-api-changes:
|
||||
name: Check API Changes
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Checkout source
|
||||
uses: actions/checkout@v3
|
||||
# - name: Setup Node 20
|
||||
# uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: "20"
|
||||
# uncomment this when testing locally using nektos/act
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker-compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 📦Build the latest image
|
||||
run: docker build --tag infisical-api .
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
JWT_AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21.5'
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
done
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
42
.github/workflows/check-be-pull-request.yml
vendored
Normal file
42
.github/workflows/check-be-pull-request.yml
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
name: "Check Backend Pull Request"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "!backend/README.md"
|
||||
- "!backend/.*"
|
||||
- "backend/.eslintrc.js"
|
||||
|
||||
jobs:
|
||||
check-be-pr:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "16"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
- name: 📦 Install dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: 🧪 Run tests
|
||||
run: npm run test:ci
|
||||
working-directory: backend
|
||||
- name: 📁 Upload test results
|
||||
uses: actions/upload-artifact@v3
|
||||
if: always()
|
||||
with:
|
||||
name: be-test-results
|
||||
path: |
|
||||
./backend/reports
|
||||
./backend/coverage
|
||||
- name: 🏗️ Run build
|
||||
run: npm run build
|
||||
working-directory: backend
|
35
.github/workflows/check-be-ts-and-lint.yml
vendored
35
.github/workflows/check-be-ts-and-lint.yml
vendored
@ -1,35 +0,0 @@
|
||||
name: "Check Backend PR types and lint"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "!backend/README.md"
|
||||
- "!backend/.*"
|
||||
- "backend/.eslintrc.js"
|
||||
|
||||
jobs:
|
||||
check-be-pr:
|
||||
name: Check TS and Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Run type check
|
||||
run: npm run type:check
|
||||
working-directory: backend
|
||||
- name: Run lint check
|
||||
run: npm run lint
|
||||
working-directory: backend
|
41
.github/workflows/check-fe-pull-request.yml
vendored
Normal file
41
.github/workflows/check-fe-pull-request.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Check Frontend Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, synchronize ]
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '!frontend/README.md'
|
||||
- '!frontend/.*'
|
||||
- 'frontend/.eslintrc.js'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
check-fe-pr:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
-
|
||||
name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
-
|
||||
name: 📦 Install dependencies
|
||||
run: npm ci --only-production --ignore-scripts
|
||||
working-directory: frontend
|
||||
# -
|
||||
# name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: frontend
|
||||
-
|
||||
name: 🏗️ Run build
|
||||
run: npm run build
|
||||
working-directory: frontend
|
35
.github/workflows/check-fe-ts-and-lint.yml
vendored
35
.github/workflows/check-fe-ts-and-lint.yml
vendored
@ -1,35 +0,0 @@
|
||||
name: Check Frontend Type and Lint check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "frontend/**"
|
||||
- "!frontend/README.md"
|
||||
- "!frontend/.*"
|
||||
- "frontend/.eslintrc.js"
|
||||
|
||||
jobs:
|
||||
check-fe-ts-lint:
|
||||
name: Check Frontend Type and Lint check
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "16"
|
||||
cache: "npm"
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Type check
|
||||
run: npm run type:check
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Link check
|
||||
run: npm run lint:fix
|
||||
working-directory: frontend
|
@ -1,25 +0,0 @@
|
||||
name: Check migration file edited
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check any migration files are modified, renamed or duplicated.
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
|
||||
if [ -s edited_files.txt ]; then
|
||||
echo "Exiting migration files cannot be modified."
|
||||
cat edited_files.txt
|
||||
exit 1
|
||||
fi
|
155
.github/workflows/docker-image.yml
vendored
Normal file
155
.github/workflows/docker-image.yml
vendored
Normal file
@ -0,0 +1,155 @@
|
||||
name: Build, Publish and Deploy to Gamma
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: 🧪 Run tests
|
||||
run: npm run test:ci
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/backend:test
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
- name: 🧪 Test backend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-backend-test
|
||||
- name: ⏻ Shut down backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: backend
|
||||
tags: |
|
||||
infisical/backend:${{ steps.commit.outputs.short }}
|
||||
infisical/backend:latest
|
||||
infisical/backend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build frontend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
load: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
project: 64mmf0n610
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
- name: ⏻ Spawn frontend container
|
||||
run: |
|
||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
||||
- name: 🧪 Test frontend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
||||
- name: ⏻ Shut down frontend container
|
||||
run: |
|
||||
docker stop infisical-frontend-test
|
||||
- name: 🏗️ Build frontend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
push: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: frontend
|
||||
tags: |
|
||||
infisical/frontend:${{ steps.commit.outputs.short }}
|
||||
infisical/frontend:latest
|
||||
infisical/frontend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [frontend-image, backend-image]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install infisical helm chart
|
||||
run: |
|
||||
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
|
||||
helm repo update
|
||||
- name: Install kubectl
|
||||
uses: azure/setup-kubectl@v3
|
||||
- name: Install doctl
|
||||
uses: digitalocean/action-doctl@v2
|
||||
with:
|
||||
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
- name: Save DigitalOcean kubeconfig with short-lived credentials
|
||||
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 k8s-1-25-4-do-0-nyc1-1670645170179
|
||||
- name: switch to gamma namespace
|
||||
run: kubectl config set-context --current --namespace=gamma
|
||||
- name: test kubectl
|
||||
run: kubectl get ingress
|
||||
- name: Download helm values to file and upgrade gamma deploy
|
||||
run: |
|
||||
wget https://raw.githubusercontent.com/Infisical/infisical/main/.github/values.yaml
|
||||
helm upgrade infisical infisical-helm-charts/infisical --values values.yaml --recreate-pods
|
||||
if [[ $(helm status infisical) == *"FAILED"* ]]; then
|
||||
echo "Helm upgrade failed"
|
||||
exit 1
|
||||
else
|
||||
echo "Helm upgrade was successful"
|
||||
fi
|
34
.github/workflows/generate-release-changelog.yml
vendored
34
.github/workflows/generate-release-changelog.yml
vendored
@ -1,34 +0,0 @@
|
||||
name: Generate Changelog
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
generate_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-tags: true
|
||||
fetch-depth: 0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12.0"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests openai
|
||||
- name: Generate Changelog and Post to Slack
|
||||
id: gen-changelog
|
||||
run: python .github/resources/changelog-generator.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
@ -1,62 +0,0 @@
|
||||
name: Release standalone docker image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
69
.github/workflows/release-standalone-docker-img.yml
vendored
Normal file
69
.github/workflows/release-standalone-docker-img.yml
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
name: Release standalone docker image
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# - name: 📦 Install dependencies to test all dependencies
|
||||
# run: npm ci --only-production
|
||||
# working-directory: backend
|
||||
- uses: paulhatch/semantic-version@v5.0.2
|
||||
id: version
|
||||
with:
|
||||
# The prefix to use to identify tags
|
||||
tag_prefix: "infisical-standalone/v"
|
||||
# A string which, if present in a git commit, indicates that a change represents a
|
||||
# major (breaking) change, supports regular expressions wrapped with '/'
|
||||
major_pattern: "(MAJOR)"
|
||||
# Same as above except indicating a minor change, supports regular expressions wrapped with '/'
|
||||
minor_pattern: "(MINOR)"
|
||||
# A string to determine the format of the version output
|
||||
version_format: "${major}.${minor}.${patch}-${increment}"
|
||||
# Optional path to check for changes. If any changes are detected in the path the
|
||||
# 'changed' output will true. Enter multiple paths separated by spaces.
|
||||
change_path: "backend/ frontend/"
|
||||
# Prevents pre-v1.0.0 version from automatically incrementing the major version.
|
||||
# If enabled, when the major version is 0, major releases will be treated as minor and minor as patch. Note that the version_type output is unchanged.
|
||||
enable_prerelease_mode: true
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.changed }}"
|
||||
# - name: Save commit hashes for tag
|
||||
# id: commit
|
||||
# uses: pr-mpt/actions-commit-hash@v2
|
||||
# - name: 🔧 Set up Docker Buildx
|
||||
# uses: docker/setup-buildx-action@v2
|
||||
# - name: 🐋 Login to Docker Hub
|
||||
# uses: docker/login-action@v2
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
# password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# - name: Set up Depot CLI
|
||||
# uses: depot/setup-action@v1
|
||||
# - name: 📦 Build backend and export to Docker
|
||||
# uses: depot/build-push-action@v1
|
||||
# with:
|
||||
# project: 64mmf0n610
|
||||
# token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
# push: true
|
||||
# context: .
|
||||
# tags: |
|
||||
# infisical/infisical:latest
|
||||
# infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
# platforms: linux/amd64,linux/arm64
|
||||
# file: Dockerfile.standalone-infisical
|
57
.github/workflows/release_build.yml
vendored
Normal file
57
.github/workflows/release_build.yml
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,75 +0,0 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,16 +1,10 @@
|
||||
name: Release Docker image for K8 operator
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
name: Release Docker image for K8 operator
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
@ -32,6 +26,4 @@ jobs:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
tags: infisical/kubernetes-operator:latest
|
47
.github/workflows/run-backend-tests.yml
vendored
47
.github/workflows/run-backend-tests.yml
vendored
@ -1,47 +0,0 @@
|
||||
name: "Run backend tests"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "!backend/README.md"
|
||||
- "!backend/.*"
|
||||
- "backend/.eslintrc.js"
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
check-be-pr:
|
||||
name: Run integration test
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker-compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
55
.github/workflows/run-cli-tests.yml
vendored
55
.github/workflows/run-cli-tests.yml
vendored
@ -1,55 +0,0 @@
|
||||
name: Go CLI Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "cli/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
workflow_call:
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID:
|
||||
required: true
|
||||
CLI_TESTS_UA_CLIENT_SECRET:
|
||||
required: true
|
||||
CLI_TESTS_SERVICE_TOKEN:
|
||||
required: true
|
||||
CLI_TESTS_PROJECT_ID:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
CLI_TESTS_USER_EMAIL:
|
||||
required: true
|
||||
CLI_TESTS_USER_PASSWORD:
|
||||
required: true
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
|
||||
required: true
|
||||
jobs:
|
||||
test:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21.x"
|
||||
- name: Install dependencies
|
||||
run: go get .
|
||||
- name: Test with the Go CLI
|
||||
env:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
18
.gitignore
vendored
18
.gitignore
vendored
@ -1,12 +1,10 @@
|
||||
# backend
|
||||
node_modules
|
||||
.env
|
||||
.env.test
|
||||
.env.dev
|
||||
.env.gamma
|
||||
.env.prod
|
||||
.env.infisical
|
||||
.env.migration
|
||||
|
||||
*~
|
||||
*.swp
|
||||
*.swo
|
||||
@ -34,7 +32,7 @@ reports
|
||||
junit.xml
|
||||
|
||||
# next.js
|
||||
.next/
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
@ -58,15 +56,3 @@ yarn-error.log*
|
||||
|
||||
# Infisical init
|
||||
.infisical.json
|
||||
|
||||
.infisicalignore
|
||||
|
||||
# Editor specific
|
||||
.vscode/*
|
||||
|
||||
frontend-build
|
||||
|
||||
*.tgz
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
@ -18,9 +18,7 @@ monorepo:
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
ldflags: -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
flags:
|
||||
- -trimpath
|
||||
env:
|
||||
@ -38,9 +36,7 @@ builds:
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
ldflags: -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
flags:
|
||||
- -trimpath
|
||||
goos:
|
||||
@ -108,22 +104,6 @@ brews:
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
- name: "infisical@{{.Version}}"
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
@ -181,43 +161,17 @@ aurs:
|
||||
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/infisical"
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
dockers:
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:latest-amd64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/amd64"
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/arm64"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- name_template: "infisical/cli:latest"
|
||||
image_templates:
|
||||
- "infisical/cli:latest-amd64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
# dockers:
|
||||
# - dockerfile: cli/docker/Dockerfile
|
||||
# goos: linux
|
||||
# goarch: amd64
|
||||
# ids:
|
||||
# - infisical
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:latest"
|
||||
|
@ -1,6 +1,5 @@
|
||||
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -1,8 +0,0 @@
|
||||
.github/resources/docker-compose.be-test.yml:generic-api-key:16
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
@ -2,6 +2,6 @@
|
||||
|
||||
Thanks for taking the time to contribute! 😃 🚀
|
||||
|
||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/getting-started/overview) for instructions on how to contribute.
|
||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/overview) for instructions on how to contribute.
|
||||
|
||||
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀
|
||||
|
@ -1,14 +1,7 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
FROM node:16-alpine AS frontend-dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@ -18,7 +11,7 @@ COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS frontend-builder
|
||||
FROM node:16-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
@ -32,43 +25,38 @@ ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM base AS frontend-runner
|
||||
FROM node:16-alpine AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
RUN adduser --system --uid 1001 nextjs
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||
RUN mkdir -p /app/.next/cache/images && chown nextjs:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
|
||||
COPY --chown=nextjs:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
RUN chown nextjs:nodejs ./public/data
|
||||
COPY --from=frontend-builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER non-root-user
|
||||
USER nextjs
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
FROM node:16-alpine AS backend-build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@ -76,12 +64,10 @@ COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
FROM node:16-alpine AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@ -90,53 +76,27 @@ RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
FROM node:14-alpine AS production
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install PM2
|
||||
RUN npm install -g pm2
|
||||
# Copy ecosystem.config.js
|
||||
COPY ecosystem.config.js .
|
||||
|
||||
RUN apk add --no-cache nginx
|
||||
|
||||
COPY nginx/default-stand-alone-docker.conf /etc/nginx/nginx.conf
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
COPY --from=frontend-runner /app/ /app/
|
||||
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
EXPOSE 80
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
CMD ["pm2-runtime", "start", "ecosystem.config.js"]
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
|
10
Makefile
10
Makefile
@ -5,13 +5,13 @@ push:
|
||||
docker-compose -f docker-compose.yml push
|
||||
|
||||
up-dev:
|
||||
docker compose -f docker-compose.dev.yml up --build
|
||||
docker-compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
i-dev:
|
||||
infisical run -- docker-compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
docker-compose -f docker-compose.yml up --build
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
docker-compose down
|
||||
|
@ -1,3 +1,2 @@
|
||||
vitest-environment-infisical.ts
|
||||
vitest.config.ts
|
||||
vitest.e2e.config.ts
|
||||
node_modules
|
||||
built
|
12
backend/.eslintrc
Normal file
12
backend/.eslintrc
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"no-console": 2
|
||||
}
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
/* eslint-env node */
|
||||
module.exports = {
|
||||
env: {
|
||||
es6: true,
|
||||
node: true
|
||||
},
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@typescript-eslint/recommended-type-checked",
|
||||
"airbnb-base",
|
||||
"airbnb-typescript/base",
|
||||
"plugin:prettier/recommended",
|
||||
"prettier"
|
||||
],
|
||||
plugins: ["@typescript-eslint", "simple-import-sort", "import"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
project: true,
|
||||
sourceType: "module",
|
||||
tsconfigRootDir: __dirname
|
||||
},
|
||||
root: true,
|
||||
overrides: [
|
||||
{
|
||||
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-argument": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-unsafe-call": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
rules: {
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"@typescript-eslint/no-unsafe-enum-comparison": "off",
|
||||
"no-void": "off",
|
||||
"consistent-return": "off", // my style
|
||||
"import/order": "off", // for simple-import-order
|
||||
"import/prefer-default-export": "off", // why
|
||||
"no-restricted-syntax": "off",
|
||||
// importing rules
|
||||
"simple-import-sort/exports": "error",
|
||||
"import/first": "error",
|
||||
"import/newline-after-import": "error",
|
||||
"import/no-duplicates": "error",
|
||||
"simple-import-sort/imports": [
|
||||
"warn",
|
||||
{
|
||||
groups: [
|
||||
// Side effect imports.
|
||||
["^\\u0000"],
|
||||
// Node.js builtins prefixed with `node:`.
|
||||
["^node:"],
|
||||
// Packages.
|
||||
// Things that start with a letter (or digit or underscore), or `@` followed by a letter.
|
||||
["^@?\\w"],
|
||||
["^@app"],
|
||||
["@lib"],
|
||||
["@server"],
|
||||
// Absolute imports and other imports such as Vue-style `@/foo`.
|
||||
// Anything not matched in another group.
|
||||
["^"],
|
||||
// Relative imports.
|
||||
// Anything that starts with a dot.
|
||||
["^\\."]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
1
backend/.gitignore
vendored
1
backend/.gitignore
vendored
@ -1 +0,0 @@
|
||||
dist
|
@ -1,7 +0,0 @@
|
||||
{
|
||||
"singleQuote": false,
|
||||
"printWidth": 120,
|
||||
"trailingComma": "none",
|
||||
"tabWidth": 2,
|
||||
"semi": true
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
# Build stage
|
||||
FROM node:20-alpine AS build
|
||||
FROM node:16-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@ -10,26 +10,18 @@ COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
FROM node:16-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
EXPOSE 4000
|
||||
|
||||
CMD ["npm", "start"]
|
||||
CMD ["npm", "run", "start"]
|
@ -1,18 +0,0 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package.json
|
||||
COPY package-lock.json package-lock.json
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
CMD ["npm", "run", "dev:docker"]
|
@ -1,4 +0,0 @@
|
||||
{
|
||||
"presets": ["@babel/preset-env", "@babel/preset-react"],
|
||||
"plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"]
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
|
||||
return {
|
||||
setItem: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
setItemWithExpiry: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
deleteItem: async (key) => {
|
||||
delete store[key];
|
||||
return 1;
|
||||
},
|
||||
getItem: async (key) => {
|
||||
const value = store[key];
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
};
|
||||
};
|
@ -1,26 +0,0 @@
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
export const mockQueue = (): TQueueServiceFactory => {
|
||||
const queues: Record<string, unknown> = {};
|
||||
const workers: Record<string, unknown> = {};
|
||||
const job: Record<string, unknown> = {};
|
||||
const events: Record<string, unknown> = {};
|
||||
|
||||
return {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
clearQueue: async () => {},
|
||||
stopJobById: async () => {},
|
||||
stopRepeatableJobByJobId: async () => true
|
||||
};
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
import { TSmtpSendMail, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
export const mockSmtpServer = (): TSmtpService => {
|
||||
const storage: TSmtpSendMail[] = [];
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
}
|
||||
};
|
||||
};
|
@ -1,71 +0,0 @@
|
||||
import { OrgMembershipRole } from "@app/db/schemas";
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
export const createIdentity = async (name: string, role: string) => {
|
||||
const createIdentityRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v1/identities",
|
||||
body: {
|
||||
name,
|
||||
role,
|
||||
organizationId: seedData1.organization.id
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(createIdentityRes.statusCode).toBe(200);
|
||||
return createIdentityRes.json().identity;
|
||||
};
|
||||
|
||||
export const deleteIdentity = async (id: string) => {
|
||||
const deleteIdentityRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/identities/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(deleteIdentityRes.statusCode).toBe(200);
|
||||
return deleteIdentityRes.json().identity;
|
||||
};
|
||||
|
||||
describe("Identity v1", async () => {
|
||||
test("Create identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
|
||||
test("Update identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
|
||||
const updatedIdentity = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/identities/${newIdentity.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: "updated-mac-1",
|
||||
role: OrgMembershipRole.Member
|
||||
}
|
||||
});
|
||||
|
||||
expect(updatedIdentity.statusCode).toBe(200);
|
||||
expect(updatedIdentity.json().identity.name).toBe("updated-mac-1");
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
|
||||
test("Delete Identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
|
||||
const deletedIdentity = await deleteIdentity(newIdentity.id);
|
||||
expect(deletedIdentity.name).toBe("mac1");
|
||||
});
|
||||
});
|
@ -1,46 +0,0 @@
|
||||
import jsrp from "jsrp";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Login V1 Router", async () => {
|
||||
// eslint-disable-next-line
|
||||
const client = new jsrp.client();
|
||||
await new Promise((resolve) => {
|
||||
client.init({ username: seedData1.email, password: seedData1.password }, () => resolve(null));
|
||||
});
|
||||
let clientProof: string;
|
||||
|
||||
test("Login first phase", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v3/auth/login1",
|
||||
body: {
|
||||
email: "test@localhost.local",
|
||||
clientPublicKey: client.getPublicKey()
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("serverPublicKey");
|
||||
expect(payload).toHaveProperty("salt");
|
||||
client.setSalt(payload.salt);
|
||||
client.setServerPublicKey(payload.serverPublicKey);
|
||||
clientProof = client.getProof(); // called M1
|
||||
});
|
||||
|
||||
test("Login second phase", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v3/auth/login2",
|
||||
body: {
|
||||
email: seedData1.email,
|
||||
clientProof
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("mfaEnabled");
|
||||
expect(payload).toHaveProperty("token");
|
||||
expect(payload.mfaEnabled).toBeFalsy();
|
||||
});
|
||||
});
|
@ -1,19 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Org V1 Router", async () => {
|
||||
test("GET Org list", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v1/organization",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("organizations");
|
||||
expect(payload).toEqual({
|
||||
organizations: [expect.objectContaining({ name: seedData1.organization.name })]
|
||||
});
|
||||
});
|
||||
});
|
@ -1,132 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { DEFAULT_PROJECT_ENVS } from "@app/db/seeds/3-project";
|
||||
|
||||
const createProjectEnvironment = async (name: string, slug: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name,
|
||||
slug
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
return payload.environment;
|
||||
};
|
||||
|
||||
const deleteProjectEnvironment = async (envId: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${envId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
return payload.environment;
|
||||
};
|
||||
|
||||
describe("Project Environment Router", async () => {
|
||||
test("Get default environments", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("workspace");
|
||||
// check for default environments
|
||||
expect(payload).toEqual({
|
||||
workspace: expect.objectContaining({
|
||||
name: seedData1.project.name,
|
||||
id: seedData1.project.id,
|
||||
slug: seedData1.project.slug,
|
||||
environments: expect.arrayContaining([
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
|
||||
])
|
||||
})
|
||||
});
|
||||
// ensure only two default environments exist
|
||||
expect(payload.workspace.environments.length).toBe(3);
|
||||
});
|
||||
|
||||
const mockProjectEnv = { name: "temp", slug: "temp" }; // id will be filled in create op
|
||||
test("Create environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
expect(newEnvironment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
projectId: seedData1.project.id,
|
||||
position: DEFAULT_PROJECT_ENVS.length + 1,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteProjectEnvironment(newEnvironment.id);
|
||||
});
|
||||
|
||||
test("Update environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
const updatedName = { name: "temp#2", slug: "temp2" };
|
||||
const res = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${newEnvironment.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: updatedName.name,
|
||||
slug: updatedName.slug,
|
||||
position: 1
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
expect(payload.environment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: newEnvironment.id,
|
||||
name: updatedName.name,
|
||||
slug: updatedName.slug,
|
||||
projectId: seedData1.project.id,
|
||||
position: 1,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteProjectEnvironment(newEnvironment.id);
|
||||
});
|
||||
|
||||
test("Delete environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
const deletedProjectEnvironment = await deleteProjectEnvironment(newEnvironment.id);
|
||||
expect(deletedProjectEnvironment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: deletedProjectEnvironment.id,
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
position: 4,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
@ -1,165 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createFolder = async (dto: { path: string; name: string }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
path: dto.path
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder;
|
||||
};
|
||||
|
||||
const deleteFolder = async (dto: { path: string; id: string }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: dto.path
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder;
|
||||
};
|
||||
|
||||
describe("Secret Folder Router", async () => {
|
||||
test.each([
|
||||
{ name: "folder1", path: "/" }, // one in root
|
||||
{ name: "folder1", path: "/level1/level2" }, // then create a deep one creating intermediate ones
|
||||
{ name: "folder2", path: "/" },
|
||||
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
|
||||
])("Create folder $name in $path", async ({ name, path }) => {
|
||||
const createdFolder = await createFolder({ path, name });
|
||||
// check for default environments
|
||||
expect(createdFolder).toEqual(
|
||||
expect.objectContaining({
|
||||
name,
|
||||
id: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteFolder({ path, id: createdFolder.id });
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
path: "/",
|
||||
expected: {
|
||||
folders: [{ name: "folder1" }, { name: "level1" }, { name: "folder2" }],
|
||||
length: 3
|
||||
}
|
||||
},
|
||||
{ path: "/level1/level2", expected: { folders: [{ name: "folder1" }], length: 1 } }
|
||||
])("Get folders $path", async ({ path, expected }) => {
|
||||
const newFolders = await Promise.all(expected.folders.map(({ name }) => createFolder({ name, path })));
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folders");
|
||||
expect(payload.folders.length >= expected.folders.length).toBeTruthy();
|
||||
expect(payload).toEqual({
|
||||
folders: expect.arrayContaining(expected.folders.map((el) => expect.objectContaining(el)))
|
||||
});
|
||||
|
||||
await Promise.all(newFolders.map(({ id }) => deleteFolder({ path, id })));
|
||||
});
|
||||
|
||||
test("Update a deep folder", async () => {
|
||||
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
|
||||
expect(newFolder).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: "folder-updated"
|
||||
})
|
||||
);
|
||||
|
||||
const resUpdatedFolders = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(resUpdatedFolders.statusCode).toBe(200);
|
||||
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
|
||||
expect(updatedFolderList).toHaveProperty("folders");
|
||||
expect(updatedFolderList.folders[0].name).toEqual("folder-updated");
|
||||
|
||||
await deleteFolder({ path: "/level1/level2", id: newFolder.id });
|
||||
});
|
||||
|
||||
test("Delete a deep folder", async () => {
|
||||
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${newFolder.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folder");
|
||||
expect(payload.folder).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: "folder-updated"
|
||||
})
|
||||
);
|
||||
|
||||
const resUpdatedFolders = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(resUpdatedFolders.statusCode).toBe(200);
|
||||
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
|
||||
expect(updatedFolderList).toHaveProperty("folders");
|
||||
expect(updatedFolderList.folders.length).toEqual(0);
|
||||
});
|
||||
});
|
@ -1,206 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
await deleteSecretImport(payload.id);
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
expect(payload.secretImports.length).toBe(2);
|
||||
expect(payload.secretImports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/secret-imports/${createdImport1.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
position: 2
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(updateImportRes.statusCode).toBe(200);
|
||||
const payload = JSON.parse(updateImportRes.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
// check for default environments
|
||||
expect(payload.secretImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
position: 2,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.stringMatching(prodImportDetails.envSlug),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
const secretImportsListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(secretImportsListRes.statusCode).toBe(200);
|
||||
const secretImportList = JSON.parse(secretImportsListRes.payload);
|
||||
expect(secretImportList).toHaveProperty("secretImports");
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
const secretImportsListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(secretImportsListRes.statusCode).toBe(200);
|
||||
const secretImportList = JSON.parse(secretImportsListRes.payload);
|
||||
expect(secretImportList).toHaveProperty("secretImports");
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
});
|
@ -1,9 +0,0 @@
|
||||
describe("Status V1 Router", async () => {
|
||||
test("Simple check", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/status"
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
});
|
||||
});
|
@ -1,579 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { SecretType, TSecrets } from "@app/db/schemas";
|
||||
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
|
||||
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
|
||||
const createServiceToken = async (
|
||||
scopes: { environment: string; secretPath: string }[],
|
||||
permissions: ("read" | "write")[]
|
||||
) => {
|
||||
const projectKeyRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
const projectKeyEnc = JSON.parse(projectKeyRes.payload);
|
||||
|
||||
const userInfoRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v2/users/me",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
const { user: userInfo } = JSON.parse(userInfoRes.payload);
|
||||
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
|
||||
const projectKey = decryptAsymmetric({
|
||||
ciphertext: projectKeyEnc.encryptedKey,
|
||||
nonce: projectKeyEnc.nonce,
|
||||
publicKey: projectKeyEnc.sender.publicKey,
|
||||
privateKey
|
||||
});
|
||||
|
||||
const randomBytes = crypto.randomBytes(16).toString("hex");
|
||||
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
|
||||
const serviceTokenRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v2/service-token",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: "test-token",
|
||||
workspaceId: seedData1.project.id,
|
||||
scopes,
|
||||
encryptedKey: ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
permissions,
|
||||
expiresIn: null
|
||||
}
|
||||
});
|
||||
expect(serviceTokenRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenRes.json();
|
||||
expect(serviceTokenInfo).toHaveProperty("serviceToken");
|
||||
expect(serviceTokenInfo).toHaveProperty("serviceTokenData");
|
||||
return `${serviceTokenInfo.serviceToken}.${randomBytes}`;
|
||||
};
|
||||
|
||||
const deleteServiceToken = async () => {
|
||||
const serviceTokenListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/service-token-data`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(serviceTokenListRes.statusCode).toBe(200);
|
||||
const serviceTokens = JSON.parse(serviceTokenListRes.payload).serviceTokenData as { name: string; id: string }[];
|
||||
expect(serviceTokens.length).toBeGreaterThan(0);
|
||||
const serviceTokenInfo = serviceTokens.find(({ name }) => name === "test-token");
|
||||
expect(serviceTokenInfo).toBeDefined();
|
||||
|
||||
const deleteTokenRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v2/service-token/${serviceTokenInfo?.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(deleteTokenRes.statusCode).toBe(200);
|
||||
};
|
||||
|
||||
const createSecret = async (dto: {
|
||||
projectKey: string;
|
||||
path: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment: string;
|
||||
type?: SecretType;
|
||||
token: string;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.path,
|
||||
...encryptSecret(dto.projectKey, dto.key, dto.value, dto.comment)
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.token}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret;
|
||||
};
|
||||
|
||||
const deleteSecret = async (dto: { path: string; key: string; token: string }) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.token}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: dto.path
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret;
|
||||
};
|
||||
|
||||
describe("Service token secret ops", async () => {
|
||||
let serviceToken = "";
|
||||
let projectKey = "";
|
||||
let folderId = "";
|
||||
beforeAll(async () => {
|
||||
serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/**", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
|
||||
// this is ensure cli service token decryptiong working fine
|
||||
const serviceTokenInfoRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v2/service-token",
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(serviceTokenInfoRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenInfoRes.json();
|
||||
const serviceTokenParts = serviceToken.split(".");
|
||||
projectKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
key: serviceTokenParts[3],
|
||||
tag: serviceTokenInfo.tag,
|
||||
ciphertext: serviceTokenInfo.encryptedKey,
|
||||
iv: serviceTokenInfo.iv
|
||||
});
|
||||
|
||||
// create a deep folder
|
||||
const folderCreate = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: "folder",
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(folderCreate.statusCode).toBe(200);
|
||||
folderId = folderCreate.json().folder.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await deleteServiceToken();
|
||||
|
||||
// create a deep folder
|
||||
const deleteFolder = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${folderId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(deleteFolder.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
const testSecrets = [
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "ST-SEC",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "NESTED-ST-SEC",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
const getSecrets = async (environment: string, secretPath = "/") => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath,
|
||||
environment,
|
||||
workspaceId: seedData1.project.id
|
||||
}
|
||||
});
|
||||
const secrets: TSecrets[] = JSON.parse(res.payload).secrets || [];
|
||||
return secrets.map((el) => ({ ...decryptSecret(projectKey, el), type: el.type }));
|
||||
};
|
||||
|
||||
test.each(testSecrets)("Create secret in path $path", async ({ secret, path }) => {
|
||||
const createdSecret = await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const decryptedSecret = decryptSecret(projectKey, createdSecret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual(secret.value);
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
expect(decryptedSecret.version).toEqual(1);
|
||||
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Get secret by name in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
|
||||
const getSecByNameRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath: path,
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug
|
||||
}
|
||||
});
|
||||
expect(getSecByNameRes.statusCode).toBe(200);
|
||||
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
|
||||
expect(getSecretByNamePayload).toHaveProperty("secret");
|
||||
const decryptedSecret = decryptSecret(projectKey, getSecretByNamePayload.secret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual(secret.value);
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Update secret in path $path", async ({ path, secret }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const updateSecretReqBody = {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: path,
|
||||
...encryptSecret(projectKey, secret.key, "new-value", secret.comment)
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
const decryptedSecret = decryptSecret(projectKey, updatedSecretPayload.secret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual("new-value");
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
|
||||
// list secret should have updated value
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: "new-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Delete secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const deletedSecret = await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
const decryptedSecret = decryptSecret(projectKey, deletedSecret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk create secrets in path $path", async ({ secret, path }) => {
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(200);
|
||||
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
|
||||
expect(createSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.key}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
deleteSecret({ path, token: serviceToken, key: `BULK-${secret.key}-${i + 1}` })
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, ...secret, key: `BULK-${secret.key}-1`, path, token: serviceToken });
|
||||
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(400);
|
||||
|
||||
await deleteSecret({ path, key: `BULK-${secret.key}-1`, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk update secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
|
||||
)
|
||||
);
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, "update-value", secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.key}-${i + 1}`,
|
||||
value: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}`, token: serviceToken })
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
|
||||
)
|
||||
);
|
||||
|
||||
const deletedSharedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
expect(deletedSharedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.value}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Service token fail cases", async () => {
|
||||
test("Unauthorized secret path access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/nested/deep"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
|
||||
test("Unauthorized secret environment access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: "prod",
|
||||
secretPath: "/"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
|
||||
test("Unauthorized write operation", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read"]
|
||||
);
|
||||
const writeSecrets = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/NEW`,
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: "/",
|
||||
// doesn't matter project key because this will fail before that due to read only access
|
||||
...encryptSecret(crypto.randomBytes(16).toString("hex"), "NEW", "value", "")
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(writeSecrets.statusCode).toBe(401);
|
||||
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
||||
|
||||
// but read access should still work fine
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(200);
|
||||
await deleteServiceToken();
|
||||
});
|
||||
});
|
File diff suppressed because it is too large
Load Diff
@ -1,87 +0,0 @@
|
||||
// eslint-disable-next-line
|
||||
import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import jwt from "jsonwebtoken";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
try {
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
globalThis.jwtAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.ACCESS_TOKEN,
|
||||
userId: seedData1.id,
|
||||
tokenVersionId: seedData1.token.id,
|
||||
authMethod: AuthMethod.EMAIL,
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
// @ts-expect-error type
|
||||
await globalThis.testServer.close();
|
||||
// @ts-expect-error type
|
||||
delete globalThis.testServer;
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
52
backend/environment.d.ts
vendored
Normal file
52
backend/environment.d.ts
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
export {};
|
||||
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
interface ProcessEnv {
|
||||
PORT: string;
|
||||
ENCRYPTION_KEY: string;
|
||||
SALT_ROUNDS: string;
|
||||
JWT_AUTH_LIFETIME: string;
|
||||
JWT_AUTH_SECRET: string;
|
||||
JWT_REFRESH_LIFETIME: string;
|
||||
JWT_REFRESH_SECRET: string;
|
||||
JWT_SERVICE_SECRET: string;
|
||||
JWT_SIGNUP_LIFETIME: string;
|
||||
JWT_SIGNUP_SECRET: string;
|
||||
MONGO_URL: string;
|
||||
NODE_ENV: 'development' | 'staging' | 'testing' | 'production';
|
||||
VERBOSE_ERROR_OUTPUT: string;
|
||||
LOKI_HOST: string;
|
||||
CLIENT_ID_HEROKU: string;
|
||||
CLIENT_ID_VERCEL: string;
|
||||
CLIENT_ID_NETLIFY: string;
|
||||
CLIENT_ID_GITHUB: string;
|
||||
CLIENT_ID_GITLAB: string;
|
||||
CLIENT_SECRET_HEROKU: string;
|
||||
CLIENT_SECRET_VERCEL: string;
|
||||
CLIENT_SECRET_NETLIFY: string;
|
||||
CLIENT_SECRET_GITHUB: string;
|
||||
CLIENT_SECRET_GITLAB: string;
|
||||
CLIENT_SLUG_VERCEL: string;
|
||||
POSTHOG_HOST: string;
|
||||
POSTHOG_PROJECT_API_KEY: string;
|
||||
SENTRY_DSN: string;
|
||||
SITE_URL: string;
|
||||
SMTP_HOST: string;
|
||||
SMTP_SECURE: string;
|
||||
SMTP_PORT: string;
|
||||
SMTP_USERNAME: string;
|
||||
SMTP_PASSWORD: string;
|
||||
SMTP_FROM_ADDRESS: string;
|
||||
SMTP_FROM_NAME: string;
|
||||
STRIPE_PRODUCT_STARTER: string;
|
||||
STRIPE_PRODUCT_TEAM: string;
|
||||
STRIPE_PRODUCT_PRO: string;
|
||||
STRIPE_PUBLISHABLE_KEY: string;
|
||||
STRIPE_SECRET_KEY: string;
|
||||
STRIPE_WEBHOOK_SECRET: string;
|
||||
TELEMETRY_ENABLED: string;
|
||||
LICENSE_KEY: string;
|
||||
}
|
||||
}
|
||||
}
|
24
backend/healthcheck.js
Normal file
24
backend/healthcheck.js
Normal file
@ -0,0 +1,24 @@
|
||||
const http = require('http');
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const options = {
|
||||
host: 'localhost',
|
||||
port: PORT,
|
||||
timeout: 2000,
|
||||
path: '/healthcheck'
|
||||
};
|
||||
|
||||
const healthCheck = http.request(options, (res) => {
|
||||
console.log(`HEALTHCHECK STATUS: ${res.statusCode}`);
|
||||
if (res.statusCode == 200) {
|
||||
process.exit(0);
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
healthCheck.on('error', function (err) {
|
||||
console.error(`HEALTH CHECK ERROR: ${err}`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
healthCheck.end();
|
BIN
backend/img/dashboard.png
Normal file
BIN
backend/img/dashboard.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 493 KiB |
9
backend/jest.config.ts
Normal file
9
backend/jest.config.ts
Normal file
@ -0,0 +1,9 @@
|
||||
export default {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
collectCoverageFrom: ['src/*.{js,ts}', '!**/node_modules/**'],
|
||||
modulePaths: ['<rootDir>/src'],
|
||||
testMatch: ['<rootDir>/tests/**/*.test.ts'],
|
||||
setupFiles: ['<rootDir>/test-resources/env-vars.js'],
|
||||
setupFilesAfterEnv: ['<rootDir>/tests/setupTests.ts']
|
||||
};
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ext": ".ts,.js",
|
||||
"ignore": [],
|
||||
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
|
||||
}
|
||||
"watch": ["src"],
|
||||
"ext": ".ts,.js",
|
||||
"ignore": [],
|
||||
"exec": "ts-node ./src/index.ts"
|
||||
}
|
34784
backend/package-lock.json
generated
34784
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,182 +1,114 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "./dist/main.mjs",
|
||||
"bin": "dist/main.js",
|
||||
"pkg": {
|
||||
"scripts": [
|
||||
"dist/**/*.js",
|
||||
"../frontend/node_modules/next/**/*.js",
|
||||
"../frontend/.next/*/**/*.js",
|
||||
"../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js"
|
||||
],
|
||||
"assets": [
|
||||
"dist/**",
|
||||
"!dist/**/*.js",
|
||||
"node_modules/**",
|
||||
"../frontend/node_modules/**",
|
||||
"../frontend/.next/**",
|
||||
"!../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
|
||||
"../frontend/public/**"
|
||||
],
|
||||
"outputPath": "binary"
|
||||
},
|
||||
"scripts": {
|
||||
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
|
||||
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
|
||||
"binary:babel-backend": " babel ./dist -d ./dist",
|
||||
"binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server",
|
||||
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
|
||||
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.18.10",
|
||||
"@babel/core": "^7.18.10",
|
||||
"@babel/plugin-syntax-import-attributes": "^7.24.7",
|
||||
"@babel/preset-env": "^7.18.10",
|
||||
"@babel/preset-react": "^7.24.7",
|
||||
"@types/bcrypt": "^5.0.2",
|
||||
"@types/jmespath": "^0.15.2",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
"@types/jsrp": "^0.2.6",
|
||||
"@types/libsodium-wrappers": "^0.7.13",
|
||||
"@types/lodash.isequal": "^4.5.8",
|
||||
"@types/node": "^20.9.5",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/passport-github": "^1.1.12",
|
||||
"@types/passport-google-oauth20": "^2.0.14",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
"@yao-pkg/pkg": "^5.12.0",
|
||||
"babel-plugin-transform-import-meta": "^2.2.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-config-airbnb-typescript": "^17.1.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-simple-import-sort": "^10.0.0",
|
||||
"nodemon": "^3.0.2",
|
||||
"pino-pretty": "^10.2.3",
|
||||
"prompt-sync": "^4.2.0",
|
||||
"rimraf": "^5.0.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsc-alias": "^1.8.8",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-kms": "^3.609.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.10.0",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@team-plain/typescript-sdk": "^4.6.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.6.7",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
"google-auth-library": "^9.9.0",
|
||||
"googleapis": "^137.1.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"ioredis": "^5.3.2",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"@aws-sdk/client-secrets-manager": "^3.309.0",
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.41.0",
|
||||
"@sentry/tracing": "^7.47.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/libsodium-wrappers": "^0.7.10",
|
||||
"argon2": "^0.30.3",
|
||||
"await-to-js": "^3.0.0",
|
||||
"aws-sdk": "^2.1338.0",
|
||||
"axios": "^1.3.5",
|
||||
"axios-retry": "^3.4.0",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.4.0",
|
||||
"builder-pattern": "^2.2.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-js": "^4.1.1",
|
||||
"dotenv": "^16.0.1",
|
||||
"express": "^4.18.1",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"infisical-node": "^1.1.3",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jsrp": "^0.2.4",
|
||||
"jwks-rsa": "^3.1.0",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"libsodium-wrappers": "^0.7.10",
|
||||
"lodash": "^4.17.21",
|
||||
"mongoose": "^6.10.5",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.6.0",
|
||||
"query-string": "^7.1.3",
|
||||
"request-ip": "^3.3.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"swagger-autogen": "^2.22.0",
|
||||
"swagger-ui-express": "^4.6.2",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.22.4"
|
||||
"typescript": "^4.9.3",
|
||||
"utility-types": "^3.10.0",
|
||||
"winston": "^3.8.2",
|
||||
"winston-loki": "^6.0.6"
|
||||
},
|
||||
"name": "infisical-api",
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
"scripts": {
|
||||
"start": "node build/index.js",
|
||||
"dev": "nodemon",
|
||||
"swagger-autogen": "node ./swagger/index.ts",
|
||||
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"lint-and-fix": "eslint . --ext .ts --fix",
|
||||
"lint-staged": "lint-staged",
|
||||
"pretest": "docker compose -f test-resources/docker-compose.test.yml up -d",
|
||||
"test": "cross-env NODE_ENV=test jest --verbose --testTimeout=10000 --detectOpenHandles; npm run posttest",
|
||||
"test:ci": "npm test -- --watchAll=false --ci --reporters=default --reporters=jest-junit --reporters=github-actions --coverage --testLocationInResults --json --outputFile=coverage/report.json",
|
||||
"posttest": "docker compose -f test-resources/docker-compose.test.yml down"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/Infisical/infisical-api.git"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Infisical/infisical-api/issues"
|
||||
},
|
||||
"homepage": "https://github.com/Infisical/infisical-api#readme",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"@jest/globals": "^29.3.1",
|
||||
"@posthog/plugin-scaffold": "^1.3.4",
|
||||
"@types/bcrypt": "^5.0.0",
|
||||
"@types/bcryptjs": "^2.4.2",
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/cors": "^2.8.12",
|
||||
"@types/express": "^4.17.14",
|
||||
"@types/jest": "^29.5.0",
|
||||
"@types/jsonwebtoken": "^8.5.9",
|
||||
"@types/lodash": "^4.14.191",
|
||||
"@types/node": "^18.11.3",
|
||||
"@types/nodemailer": "^6.4.6",
|
||||
"@types/supertest": "^2.0.12",
|
||||
"@types/swagger-jsdoc": "^6.0.1",
|
||||
"@types/swagger-ui-express": "^4.1.3",
|
||||
"@typescript-eslint/eslint-plugin": "^5.54.0",
|
||||
"@typescript-eslint/parser": "^5.40.1",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint": "^8.26.0",
|
||||
"install": "^0.13.0",
|
||||
"jest": "^29.3.1",
|
||||
"jest-junit": "^15.0.0",
|
||||
"nodemon": "^2.0.19",
|
||||
"npm": "^8.19.3",
|
||||
"supertest": "^6.3.3",
|
||||
"ts-jest": "^29.0.3",
|
||||
"ts-node": "^10.9.1"
|
||||
},
|
||||
"jest-junit": {
|
||||
"outputDirectory": "reports",
|
||||
"outputName": "jest-junit.xml",
|
||||
"ancestorSeparator": " › ",
|
||||
"uniqueOutputName": "false",
|
||||
"suiteNameTemplate": "{filepath}",
|
||||
"classNameTemplate": "{classname}",
|
||||
"titleTemplate": "{title}"
|
||||
}
|
||||
}
|
||||
|
@ -1,127 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { mkdirSync, writeFileSync } from "fs";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
|
||||
const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
console.log(`
|
||||
Component List
|
||||
--------------
|
||||
1. Service component
|
||||
2. DAL component
|
||||
3. Router component
|
||||
`);
|
||||
const componentType = parseInt(prompt("Select a component: "), 10);
|
||||
|
||||
if (componentType === 1) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
const dir = path.join(__dirname, `../src/services/${componentName}`);
|
||||
const pascalCase = componentName
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
const camelCase = componentName
|
||||
.split("-")
|
||||
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
|
||||
.join("");
|
||||
const dalTypeName = `T${pascalCase}DALFactory`;
|
||||
const dalName = `${camelCase}DALFactory`;
|
||||
const serviceTypeName = `T${pascalCase}ServiceFactory`;
|
||||
const serviceName = `${camelCase}ServiceFactory`;
|
||||
|
||||
mkdirSync(dir);
|
||||
|
||||
writeFileSync(
|
||||
path.join(dir, `${componentName}-dal.ts`),
|
||||
`import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
|
||||
|
||||
export const ${dalName} = (db: TDbClient) => {
|
||||
|
||||
return { };
|
||||
};
|
||||
`
|
||||
);
|
||||
|
||||
writeFileSync(
|
||||
path.join(dir, `${componentName}-service.ts`),
|
||||
`import { ${dalTypeName} } from "./${componentName}-dal";
|
||||
|
||||
type ${serviceTypeName}Dep = {
|
||||
${camelCase}DAL: ${dalTypeName};
|
||||
};
|
||||
|
||||
export type ${serviceTypeName} = ReturnType<typeof ${serviceName}>;
|
||||
|
||||
export const ${serviceName} = ({ ${camelCase}DAL }: ${serviceTypeName}Dep) => {
|
||||
return {};
|
||||
};
|
||||
`
|
||||
);
|
||||
writeFileSync(path.join(dir, `${componentName}-types.ts`), "");
|
||||
} else if (componentType === 2) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
const componentPath = prompt("Path wrt service folder: ");
|
||||
const pascalCase = componentName
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
const camelCase = componentName
|
||||
.split("-")
|
||||
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
|
||||
.join("");
|
||||
const dalTypeName = `T${pascalCase}DALFactory`;
|
||||
const dalName = `${camelCase}DALFactory`;
|
||||
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/services", componentPath, `${componentName}-dal.ts`),
|
||||
`import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
|
||||
|
||||
export const ${dalName} = (db: TDbClient) => {
|
||||
|
||||
return { };
|
||||
};
|
||||
`
|
||||
);
|
||||
} else if (componentType === 3) {
|
||||
const name = prompt("Enter router name: ");
|
||||
const version = prompt("Version number: ");
|
||||
const pascalCase = name
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
writeFileSync(
|
||||
path.join(__dirname, `../src/server/routes/v${Number(version)}/${name}-router.ts`),
|
||||
`import { z } from "zod";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
|
||||
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {}
|
||||
});
|
||||
};
|
||||
`
|
||||
);
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
import slugify from "@sindresorhus/slugify"
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for migration: ");
|
||||
|
||||
// Remove spaces from migration name and replace with hyphens
|
||||
const formattedMigrationName = slugify(migrationName);
|
||||
|
||||
execSync(
|
||||
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
@ -1,16 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { execSync } from "child_process";
|
||||
import { readdirSync } from "fs";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for seedfile: ");
|
||||
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seeds")).length || 1;
|
||||
execSync(
|
||||
`npx knex seed:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${
|
||||
fileCounter + 1
|
||||
}-${migrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
@ -1,159 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
import knex from "knex";
|
||||
import { writeFileSync } from "fs";
|
||||
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../.env.migration")
|
||||
});
|
||||
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: process.env.DB_CONNECTION_URI
|
||||
});
|
||||
|
||||
const getZodPrimitiveType = (type: string) => {
|
||||
switch (type) {
|
||||
case "uuid":
|
||||
return "z.string().uuid()";
|
||||
case "character varying":
|
||||
return "z.string()";
|
||||
case "ARRAY":
|
||||
return "z.string().array()";
|
||||
case "boolean":
|
||||
return "z.boolean()";
|
||||
case "jsonb":
|
||||
return "z.unknown()";
|
||||
case "json":
|
||||
return "z.unknown()";
|
||||
case "timestamp with time zone":
|
||||
return "z.date()";
|
||||
case "integer":
|
||||
return "z.number()";
|
||||
case "bigint":
|
||||
return "z.coerce.number()";
|
||||
case "text":
|
||||
return "z.string()";
|
||||
case "bytea":
|
||||
return "zodBuffer";
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
};
|
||||
|
||||
const getZodDefaultValue = (type: unknown, value: string | number | boolean | Object) => {
|
||||
if (!value || value === "null") return;
|
||||
switch (type) {
|
||||
case "uuid":
|
||||
return `.default("00000000-0000-0000-0000-000000000000")`;
|
||||
case "character varying": {
|
||||
if (value === "gen_random_uuid()") return;
|
||||
if (typeof value === "string" && value.includes("::")) {
|
||||
return `.default(${value.split("::")[0]})`;
|
||||
}
|
||||
return `.default(${value})`;
|
||||
}
|
||||
case "ARRAY":
|
||||
return `.default(${value})`;
|
||||
case "boolean":
|
||||
return `.default(${value})`;
|
||||
case "jsonb":
|
||||
return "z.string()";
|
||||
case "json":
|
||||
return "z.string()";
|
||||
case "timestamp with time zone": {
|
||||
if (value === "CURRENT_TIMESTAMP") return;
|
||||
return "z.string().datetime()";
|
||||
}
|
||||
case "integer": {
|
||||
if ((value as string).includes("nextval")) return;
|
||||
return `.default(${value})`;
|
||||
}
|
||||
case "bigint": {
|
||||
if ((value as string).includes("nextval")) return;
|
||||
return `.default(${parseInt((value as string).split("::")[0].slice(1, -1), 10)})`;
|
||||
}
|
||||
case "text":
|
||||
if (typeof value === "string" && value.includes("::")) {
|
||||
return `.default(${value.split("::")[0]})`;
|
||||
}
|
||||
return `.default(${value})`;
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
const tables = (
|
||||
await db("information_schema.tables")
|
||||
.whereRaw("table_schema = current_schema()")
|
||||
.select<{ tableName: string }[]>("table_name as tableName")
|
||||
.orderBy("table_name")
|
||||
).filter((el) => !el.tableName.includes("_migrations"));
|
||||
|
||||
for (let i = 0; i < tables.length; i += 1) {
|
||||
const { tableName } = tables[i];
|
||||
const columns = await db(tableName).columnInfo();
|
||||
const columnNames = Object.keys(columns);
|
||||
|
||||
let schema = "";
|
||||
const zodImportSet = new Set<string>();
|
||||
for (let colNum = 0; colNum < columnNames.length; colNum++) {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
||||
// don't put optional on id
|
||||
if (colInfo.defaultValue && columnName !== "id") {
|
||||
const { defaultValue } = colInfo;
|
||||
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
|
||||
if (zSchema) {
|
||||
ztype = ztype.concat(zSchema);
|
||||
}
|
||||
}
|
||||
if (colInfo.nullable) {
|
||||
ztype = ztype.concat(".nullable().optional()");
|
||||
}
|
||||
schema = schema.concat(
|
||||
`${!schema ? "\n" : ""} ${columnName}: ${ztype}${colNum === columnNames.length - 1 ? "" : ","}\n`
|
||||
);
|
||||
}
|
||||
|
||||
const dashcase = tableName.split("_").join("-");
|
||||
const pascalCase = tableName
|
||||
.split("_")
|
||||
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
|
||||
|
||||
const zodImports = Array.from(zodImportSet);
|
||||
|
||||
// the insert and update are changed to zod input type to use default cases
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
|
||||
`// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ${pascalCase}Schema = z.object({${schema}});
|
||||
|
||||
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
|
||||
export type T${pascalCase}Insert = Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>;
|
||||
export type T${pascalCase}Update = Partial<Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>>;
|
||||
`
|
||||
);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
main();
|
@ -1,27 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-shadow */
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
function replaceMjsOccurrences(directory: string) {
|
||||
fs.readdir(directory, (err, files) => {
|
||||
if (err) throw err;
|
||||
files.forEach((file) => {
|
||||
const filePath = path.join(directory, file);
|
||||
if (fs.statSync(filePath).isDirectory()) {
|
||||
replaceMjsOccurrences(filePath);
|
||||
} else {
|
||||
fs.readFile(filePath, "utf8", (err, data) => {
|
||||
if (err) throw err;
|
||||
const result = data.replace(/\.mjs/g, ".js");
|
||||
fs.writeFile(filePath, result, "utf8", (err) => {
|
||||
if (err) throw err;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Updated: ${filePath}`);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
replaceMjsOccurrences("dist");
|
5444
backend/spec.json
Normal file
5444
backend/spec.json
Normal file
File diff suppressed because it is too large
Load Diff
18
backend/src/@types/fastify-zod.d.ts
vendored
18
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,18 +0,0 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
type FastifyZodProvider = FastifyInstance<
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<Logger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
// used only for testing
|
||||
const testServer: FastifyZodProvider;
|
||||
const jwtAuthToken: string;
|
||||
}
|
175
backend/src/@types/fastify.d.ts
vendored
175
backend/src/@types/fastify.d.ts
vendored
@ -1,175 +0,0 @@
|
||||
import "fastify";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||
import { TProjectKeyServiceFactory } from "@app/services/project-key/project-key-service";
|
||||
import { TProjectMembershipServiceFactory } from "@app/services/project-membership/project-membership-service";
|
||||
import { TProjectRoleServiceFactory } from "@app/services/project-role/project-role-service";
|
||||
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyRequest {
|
||||
realIp: string;
|
||||
// used for mfa session authentication
|
||||
mfa: {
|
||||
userId: string;
|
||||
orgId?: string;
|
||||
user: TUsers;
|
||||
};
|
||||
// identity injection. depending on which kinda of token the information is filled in auth
|
||||
auth: TAuthMode;
|
||||
permission: {
|
||||
authMethod: ActorAuthMethod;
|
||||
type: ActorType;
|
||||
id: string;
|
||||
orgId: string;
|
||||
};
|
||||
// passport data
|
||||
passportUser: {
|
||||
isUserCompleted: string;
|
||||
providerAuthToken: string;
|
||||
};
|
||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
password: TAuthPasswordFactory;
|
||||
signup: TAuthSignupFactory;
|
||||
authToken: TAuthTokenServiceFactory;
|
||||
permission: TPermissionServiceFactory;
|
||||
org: TOrgServiceFactory;
|
||||
orgRole: TOrgRoleServiceFactory;
|
||||
oidc: TOidcConfigServiceFactory;
|
||||
superAdmin: TSuperAdminServiceFactory;
|
||||
user: TUserServiceFactory;
|
||||
group: TGroupServiceFactory;
|
||||
groupProject: TGroupProjectServiceFactory;
|
||||
apiKey: TApiKeyServiceFactory;
|
||||
project: TProjectServiceFactory;
|
||||
projectMembership: TProjectMembershipServiceFactory;
|
||||
projectEnv: TProjectEnvServiceFactory;
|
||||
projectKey: TProjectKeyServiceFactory;
|
||||
projectRole: TProjectRoleServiceFactory;
|
||||
secret: TSecretServiceFactory;
|
||||
secretReplication: TSecretReplicationServiceFactory;
|
||||
secretTag: TSecretTagServiceFactory;
|
||||
secretImport: TSecretImportServiceFactory;
|
||||
projectBot: TProjectBotServiceFactory;
|
||||
folder: TSecretFolderServiceFactory;
|
||||
integration: TIntegrationServiceFactory;
|
||||
integrationAuth: TIntegrationAuthServiceFactory;
|
||||
webhook: TWebhookServiceFactory;
|
||||
serviceToken: TServiceTokenServiceFactory;
|
||||
identity: TIdentityServiceFactory;
|
||||
identityAccessToken: TIdentityAccessTokenServiceFactory;
|
||||
identityProject: TIdentityProjectServiceFactory;
|
||||
identityTokenAuth: TIdentityTokenAuthServiceFactory;
|
||||
identityUa: TIdentityUaServiceFactory;
|
||||
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||
secretApprovalRequest: TSecretApprovalRequestServiceFactory;
|
||||
secretRotation: TSecretRotationServiceFactory;
|
||||
snapshot: TSecretSnapshotServiceFactory;
|
||||
saml: TSamlConfigServiceFactory;
|
||||
scim: TScimServiceFactory;
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
secretBlindIndex: TSecretBlindIndexServiceFactory;
|
||||
telemetry: TTelemetryServiceFactory;
|
||||
dynamicSecret: TDynamicSecretServiceFactory;
|
||||
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
store: {
|
||||
user: Pick<TUserDALFactory, "findById">;
|
||||
};
|
||||
}
|
||||
}
|
674
backend/src/@types/knex.d.ts
vendored
674
backend/src/@types/knex.d.ts
vendored
@ -1,674 +0,0 @@
|
||||
import { Knex as KnexOriginal } from "knex";
|
||||
|
||||
import {
|
||||
TableName,
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate,
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate,
|
||||
TAccessApprovalRequestsUpdate,
|
||||
TApiKeys,
|
||||
TApiKeysInsert,
|
||||
TApiKeysUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate,
|
||||
TAuditLogsUpdate,
|
||||
TAuthTokens,
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate,
|
||||
TAuthTokensInsert,
|
||||
TAuthTokensUpdate,
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate,
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate,
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate,
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate,
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate,
|
||||
TCertificates,
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
TGitAppOrg,
|
||||
TGitAppOrgInsert,
|
||||
TGitAppOrgUpdate,
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate,
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate,
|
||||
TGroups,
|
||||
TGroupsInsert,
|
||||
TGroupsUpdate,
|
||||
TIdentities,
|
||||
TIdentitiesInsert,
|
||||
TIdentitiesUpdate,
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate,
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate,
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate,
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate,
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate,
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate,
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate,
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate,
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate,
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate,
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate,
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate,
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TInternalKms,
|
||||
TInternalKmsInsert,
|
||||
TInternalKmsUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate,
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate,
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOidcConfigs,
|
||||
TOidcConfigsInsert,
|
||||
TOidcConfigsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
TOrgBots,
|
||||
TOrgBotsInsert,
|
||||
TOrgBotsUpdate,
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate,
|
||||
TOrgRoles,
|
||||
TOrgRolesInsert,
|
||||
TOrgRolesUpdate,
|
||||
TProjectBots,
|
||||
TProjectBotsInsert,
|
||||
TProjectBotsUpdate,
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate,
|
||||
TProjectKeys,
|
||||
TProjectKeysInsert,
|
||||
TProjectKeysUpdate,
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate,
|
||||
TProjectRoles,
|
||||
TProjectRolesInsert,
|
||||
TProjectRolesUpdate,
|
||||
TProjects,
|
||||
TProjectsInsert,
|
||||
TProjectsUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate,
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate,
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
TScimTokens,
|
||||
TScimTokensInsert,
|
||||
TScimTokensUpdate,
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate,
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate,
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate,
|
||||
TSecretApprovalRequestsUpdate,
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate,
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate,
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate,
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate,
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate,
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate,
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
TSecretsInsert,
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate,
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate,
|
||||
TSecretsUpdate,
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate,
|
||||
TSecretTags,
|
||||
TSecretTagsInsert,
|
||||
TSecretTagsUpdate,
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate,
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate,
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate,
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
TTrustedIps,
|
||||
TTrustedIpsInsert,
|
||||
TTrustedIpsUpdate,
|
||||
TUserActions,
|
||||
TUserActionsInsert,
|
||||
TUserActionsUpdate,
|
||||
TUserAliases,
|
||||
TUserAliasesInsert,
|
||||
TUserAliasesUpdate,
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate,
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate,
|
||||
TUsers,
|
||||
TUsersInsert,
|
||||
TUsersUpdate,
|
||||
TWebhooks,
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
interface QueryInterface {
|
||||
primaryNode(): KnexOriginal;
|
||||
replicaNode(): KnexOriginal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate
|
||||
>;
|
||||
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate
|
||||
>;
|
||||
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate
|
||||
>;
|
||||
[TableName.Organization]: KnexOriginal.CompositeTableType<
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate
|
||||
>;
|
||||
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate
|
||||
>;
|
||||
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.Environment]: KnexOriginal.CompositeTableType<
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate
|
||||
>;
|
||||
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate
|
||||
>;
|
||||
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate
|
||||
>;
|
||||
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate
|
||||
>;
|
||||
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate
|
||||
>;
|
||||
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate
|
||||
>;
|
||||
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityTokenAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate
|
||||
>;
|
||||
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate
|
||||
>;
|
||||
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
|
||||
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate
|
||||
>;
|
||||
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate
|
||||
>;
|
||||
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate
|
||||
>;
|
||||
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate
|
||||
>;
|
||||
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate
|
||||
>;
|
||||
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate
|
||||
>;
|
||||
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate
|
||||
>;
|
||||
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate
|
||||
>;
|
||||
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
// Junction tables
|
||||
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.InternalKms]: KnexOriginal.CompositeTableType<TInternalKms, TInternalKmsInsert, TInternalKmsUpdate>;
|
||||
[TableName.ExternalKms]: KnexOriginal.CompositeTableType<TExternalKms, TExternalKmsInsert, TExternalKmsUpdate>;
|
||||
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
1
backend/src/@types/passport-gitlab2.d.ts
vendored
1
backend/src/@types/passport-gitlab2.d.ts
vendored
@ -1 +0,0 @@
|
||||
declare module "passport-gitlab2";
|
69
backend/src/config/index.ts
Normal file
69
backend/src/config/index.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import InfisicalClient from 'infisical-node';
|
||||
|
||||
const client = new InfisicalClient({
|
||||
token: process.env.INFISICAL_TOKEN!
|
||||
});
|
||||
|
||||
export const getPort = async () => (await client.getSecret('PORT')).secretValue || 4000;
|
||||
export const getInviteOnlySignup = async () => (await client.getSecret('INVITE_ONLY_SIGNUP')).secretValue == undefined ? false : (await client.getSecret('INVITE_ONLY_SIGNUP')).secretValue;
|
||||
export const getEncryptionKey = async () => (await client.getSecret('ENCRYPTION_KEY')).secretValue;
|
||||
export const getSaltRounds = async () => parseInt((await client.getSecret('SALT_ROUNDS')).secretValue) || 10;
|
||||
export const getJwtAuthLifetime = async () => (await client.getSecret('JWT_AUTH_LIFETIME')).secretValue || '10d';
|
||||
export const getJwtAuthSecret = async () => (await client.getSecret('JWT_AUTH_SECRET')).secretValue;
|
||||
export const getJwtMfaLifetime = async () => (await client.getSecret('JWT_MFA_LIFETIME')).secretValue || '5m';
|
||||
export const getJwtMfaSecret = async () => (await client.getSecret('JWT_MFA_LIFETIME')).secretValue || '5m';
|
||||
export const getJwtRefreshLifetime = async () => (await client.getSecret('JWT_REFRESH_LIFETIME')).secretValue || '90d';
|
||||
export const getJwtRefreshSecret = async () => (await client.getSecret('JWT_REFRESH_SECRET')).secretValue;
|
||||
export const getJwtServiceSecret = async () => (await client.getSecret('JWT_SERVICE_SECRET')).secretValue;
|
||||
export const getJwtSignupLifetime = async () => (await client.getSecret('JWT_SIGNUP_LIFETIME')).secretValue || '15m';
|
||||
export const getJwtSignupSecret = async () => (await client.getSecret('JWT_SIGNUP_SECRET')).secretValue;
|
||||
export const getMongoURL = async () => (await client.getSecret('MONGO_URL')).secretValue;
|
||||
export const getNodeEnv = async () => (await client.getSecret('NODE_ENV')).secretValue || 'production';
|
||||
export const getVerboseErrorOutput = async () => (await client.getSecret('VERBOSE_ERROR_OUTPUT')).secretValue === 'true' && true;
|
||||
export const getLokiHost = async () => (await client.getSecret('LOKI_HOST')).secretValue;
|
||||
export const getClientIdAzure = async () => (await client.getSecret('CLIENT_ID_AZURE')).secretValue;
|
||||
export const getClientIdHeroku = async () => (await client.getSecret('CLIENT_ID_HEROKU')).secretValue;
|
||||
export const getClientIdVercel = async () => (await client.getSecret('CLIENT_ID_VERCEL')).secretValue;
|
||||
export const getClientIdNetlify = async () => (await client.getSecret('CLIENT_ID_NETLIFY')).secretValue;
|
||||
export const getClientIdGitHub = async () => (await client.getSecret('CLIENT_ID_GITHUB')).secretValue;
|
||||
export const getClientIdGitLab = async () => (await client.getSecret('CLIENT_ID_GITLAB')).secretValue;
|
||||
export const getClientSecretAzure = async () => (await client.getSecret('CLIENT_SECRET_AZURE')).secretValue;
|
||||
export const getClientSecretHeroku = async () => (await client.getSecret('CLIENT_SECRET_HEROKU')).secretValue;
|
||||
export const getClientSecretVercel = async () => (await client.getSecret('CLIENT_SECRET_VERCEL')).secretValue;
|
||||
export const getClientSecretNetlify = async () => (await client.getSecret('CLIENT_SECRET_NETLIFY')).secretValue;
|
||||
export const getClientSecretGitHub = async () => (await client.getSecret('CLIENT_SECRET_GITHUB')).secretValue;
|
||||
export const getClientSecretGitLab = async () => (await client.getSecret('CLIENT_SECRET_GITLAB')).secretValue;
|
||||
export const getClientSlugVercel = async () => (await client.getSecret('CLIENT_SLUG_VERCEL')).secretValue;
|
||||
export const getPostHogHost = async () => (await client.getSecret('POSTHOG_HOST')).secretValue || 'https://app.posthog.com';
|
||||
export const getPostHogProjectApiKey = async () => (await client.getSecret('POSTHOG_PROJECT_API_KEY')).secretValue || 'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
export const getSentryDSN = async () => (await client.getSecret('SENTRY_DSN')).secretValue;
|
||||
export const getSiteURL = async () => (await client.getSecret('SITE_URL')).secretValue;
|
||||
export const getSmtpHost = async () => (await client.getSecret('SMTP_HOST')).secretValue;
|
||||
export const getSmtpSecure = async () => (await client.getSecret('SMTP_SECURE')).secretValue === 'true' || false;
|
||||
export const getSmtpPort = async () => parseInt((await client.getSecret('SMTP_PORT')).secretValue) || 587;
|
||||
export const getSmtpUsername = async () => (await client.getSecret('SMTP_USERNAME')).secretValue;
|
||||
export const getSmtpPassword = async () => (await client.getSecret('SMTP_PASSWORD')).secretValue;
|
||||
export const getSmtpFromAddress = async () => (await client.getSecret('SMTP_FROM_ADDRESS')).secretValue;
|
||||
export const getSmtpFromName = async () => (await client.getSecret('SMTP_FROM_NAME')).secretValue || 'Infisical';
|
||||
export const getStripeProductStarter = async () => (await client.getSecret('STRIPE_PRODUCT_STARTER')).secretValue;
|
||||
export const getStripeProductPro = async () => (await client.getSecret('STRIPE_PRODUCT_PRO')).secretValue;
|
||||
export const getStripeProductTeam = async () => (await client.getSecret('STRIPE_PRODUCT_TEAM')).secretValue;
|
||||
export const getStripePublishableKey = async () => (await client.getSecret('STRIPE_PUBLISHABLE_KEY')).secretValue;
|
||||
export const getStripeSecretKey = async () => (await client.getSecret('STRIPE_SECRET_KEY')).secretValue;
|
||||
export const getStripeWebhookSecret = async () => (await client.getSecret('STRIPE_WEBHOOK_SECRET')).secretValue;
|
||||
export const getTelemetryEnabled = async () => (await client.getSecret('TELEMETRY_ENABLED')).secretValue !== 'false' && true;
|
||||
export const getLoopsApiKey = async () => (await client.getSecret('LOOPS_API_KEY')).secretValue;
|
||||
export const getSmtpConfigured = async () => (await client.getSecret('SMTP_HOST')).secretValue == '' || (await client.getSecret('SMTP_HOST')).secretValue == undefined ? false : true
|
||||
export const getHttpsEnabled = async () => {
|
||||
if ((await getNodeEnv()) != "production") {
|
||||
// no https for anything other than prod
|
||||
return false
|
||||
}
|
||||
|
||||
if ((await client.getSecret('HTTPS_ENABLED')).secretValue == undefined || (await client.getSecret('HTTPS_ENABLED')).secretValue == "") {
|
||||
// default when no value present
|
||||
return true
|
||||
}
|
||||
|
||||
return (await client.getSecret('HTTPS_ENABLED')).secretValue === 'true' && true
|
||||
}
|
16
backend/src/config/request.ts
Normal file
16
backend/src/config/request.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import axios from 'axios';
|
||||
import axiosRetry from 'axios-retry';
|
||||
|
||||
const axiosInstance = axios.create();
|
||||
|
||||
// add retry functionality to the axios instance
|
||||
axiosRetry(axiosInstance, {
|
||||
retries: 3,
|
||||
retryDelay: axiosRetry.exponentialDelay, // exponential back-off delay between retries
|
||||
retryCondition: (error) => {
|
||||
// only retry if the error is a network error or a 5xx server error
|
||||
return axiosRetry.isNetworkError(error) || axiosRetry.isRetryableError(error);
|
||||
},
|
||||
});
|
||||
|
||||
export default axiosInstance;
|
269
backend/src/controllers/v1/authController.ts
Normal file
269
backend/src/controllers/v1/authController.ts
Normal file
@ -0,0 +1,269 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response } from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const jsrp = require('jsrp');
|
||||
import { User, LoginSRPDetail } from '../../models';
|
||||
import { createToken, issueAuthTokens, clearTokens } from '../../helpers/auth';
|
||||
import { checkUserDevice } from '../../helpers/user';
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT
|
||||
} from '../../variables';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
import { EELogService } from '../../ee/services';
|
||||
import { getChannelFromUserAgent } from '../../utils/posthog'; // TODO: move this
|
||||
import {
|
||||
getJwtRefreshSecret,
|
||||
getJwtAuthLifetime,
|
||||
getJwtAuthSecret,
|
||||
getHttpsEnabled
|
||||
} from '../../config';
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login1 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
clientPublicKey
|
||||
}: { email: string; clientPublicKey: string } = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
async () => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
|
||||
await LoginSRPDetail.findOneAndReplace({ email: email }, {
|
||||
email: email,
|
||||
clientPublicKey: clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt),
|
||||
}, { upsert: true, returnNewDocument: false })
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
salt: user.salt
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to start authentication process'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log in user step 2: complete step 2 of SRP protocol and return token and their (encrypted)
|
||||
* private key
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login2 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, clientProof } = req.body;
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +publicKey +encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: email })
|
||||
|
||||
if (!loginSRPDetailFromDB) {
|
||||
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
|
||||
}
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: loginSRPDetailFromDB.serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(loginSRPDetailFromDB.clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// issue tokens
|
||||
|
||||
await checkUserDevice({
|
||||
user,
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'] ?? ''
|
||||
});
|
||||
|
||||
const tokens = await issueAuthTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
|
||||
const loginAction = await EELogService.createAction({
|
||||
name: ACTION_LOGIN,
|
||||
userId: user._id
|
||||
});
|
||||
|
||||
loginAction && await EELogService.createLog({
|
||||
userId: user._id,
|
||||
actions: [loginAction],
|
||||
channel: getChannelFromUserAgent(req.headers['user-agent']),
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
// return (access) token in response
|
||||
return res.status(200).send({
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log out user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const logout = async (req: Request, res: Response) => {
|
||||
try {
|
||||
await clearTokens({
|
||||
userId: req.user._id.toString()
|
||||
});
|
||||
|
||||
// clear httpOnly cookie
|
||||
res.cookie('jid', '', {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: (await getHttpsEnabled()) as boolean
|
||||
});
|
||||
|
||||
const logoutAction = await EELogService.createAction({
|
||||
name: ACTION_LOGOUT,
|
||||
userId: req.user._id
|
||||
});
|
||||
|
||||
logoutAction && await EELogService.createLog({
|
||||
userId: req.user._id,
|
||||
actions: [logoutAction],
|
||||
channel: getChannelFromUserAgent(req.headers['user-agent']),
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to logout'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully logged out.'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return user is authenticated
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const checkAuth = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
message: 'Authenticated'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return new token by redeeming refresh token
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getNewToken = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const refreshToken = req.cookies.jid;
|
||||
|
||||
if (!refreshToken) {
|
||||
throw new Error('Failed to find token in request cookies');
|
||||
}
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(refreshToken, await getJwtRefreshSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
_id: decodedToken.userId
|
||||
}).select('+publicKey');
|
||||
|
||||
if (!user) throw new Error('Failed to authenticate unfound user');
|
||||
if (!user?.publicKey)
|
||||
throw new Error('Failed to authenticate not fully set up account');
|
||||
|
||||
const token = createToken({
|
||||
payload: {
|
||||
userId: decodedToken.userId
|
||||
},
|
||||
expiresIn: await getJwtAuthLifetime(),
|
||||
secret: await getJwtAuthSecret()
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
token
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Invalid request'
|
||||
});
|
||||
}
|
||||
};
|
108
backend/src/controllers/v1/botController.ts
Normal file
108
backend/src/controllers/v1/botController.ts
Normal file
@ -0,0 +1,108 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Bot, BotKey } from '../../models';
|
||||
import { createBot } from '../../helpers/bot';
|
||||
|
||||
interface BotKey {
|
||||
encryptedKey: string;
|
||||
nonce: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bot for workspace with id [workspaceId]. If a workspace bot doesn't exist,
|
||||
* then create and return a new bot.
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getBotByWorkspaceId = async (req: Request, res: Response) => {
|
||||
let bot;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
bot = await Bot.findOne({
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!bot) {
|
||||
// case: bot doesn't exist for workspace with id [workspaceId]
|
||||
// -> create a new bot and return it
|
||||
bot = await createBot({
|
||||
name: 'Infisical Bot',
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get bot for workspace'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
bot
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return bot with id [req.bot._id] with active state set to [isActive].
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const setBotActiveState = async (req: Request, res: Response) => {
|
||||
let bot;
|
||||
try {
|
||||
const { isActive, botKey }: { isActive: boolean, botKey: BotKey } = req.body;
|
||||
|
||||
if (isActive) {
|
||||
// bot state set to active -> share workspace key with bot
|
||||
if (!botKey?.encryptedKey || !botKey?.nonce) {
|
||||
return res.status(400).send({
|
||||
message: 'Failed to set bot state to active - missing bot key'
|
||||
});
|
||||
}
|
||||
|
||||
await BotKey.findOneAndUpdate({
|
||||
workspace: req.bot.workspace
|
||||
}, {
|
||||
encryptedKey: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
sender: req.user._id,
|
||||
bot: req.bot._id,
|
||||
workspace: req.bot.workspace
|
||||
}, {
|
||||
upsert: true,
|
||||
new: true
|
||||
});
|
||||
} else {
|
||||
// case: bot state set to inactive -> delete bot's workspace key
|
||||
await BotKey.deleteOne({
|
||||
bot: req.bot._id
|
||||
});
|
||||
}
|
||||
|
||||
bot = await Bot.findOneAndUpdate({
|
||||
_id: req.bot._id
|
||||
}, {
|
||||
isActive
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
|
||||
if (!bot) throw new Error('Failed to update bot active state');
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update bot active state'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
bot
|
||||
});
|
||||
};
|
35
backend/src/controllers/v1/index.ts
Normal file
35
backend/src/controllers/v1/index.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import * as authController from './authController';
|
||||
import * as botController from './botController';
|
||||
import * as integrationAuthController from './integrationAuthController';
|
||||
import * as integrationController from './integrationController';
|
||||
import * as keyController from './keyController';
|
||||
import * as membershipController from './membershipController';
|
||||
import * as membershipOrgController from './membershipOrgController';
|
||||
import * as organizationController from './organizationController';
|
||||
import * as passwordController from './passwordController';
|
||||
import * as secretController from './secretController';
|
||||
import * as serviceTokenController from './serviceTokenController';
|
||||
import * as signupController from './signupController';
|
||||
import * as stripeController from './stripeController';
|
||||
import * as userActionController from './userActionController';
|
||||
import * as userController from './userController';
|
||||
import * as workspaceController from './workspaceController';
|
||||
|
||||
export {
|
||||
authController,
|
||||
botController,
|
||||
integrationAuthController,
|
||||
integrationController,
|
||||
keyController,
|
||||
membershipController,
|
||||
membershipOrgController,
|
||||
organizationController,
|
||||
passwordController,
|
||||
secretController,
|
||||
serviceTokenController,
|
||||
signupController,
|
||||
stripeController,
|
||||
userActionController,
|
||||
userController,
|
||||
workspaceController
|
||||
};
|
420
backend/src/controllers/v1/integrationAuthController.ts
Normal file
420
backend/src/controllers/v1/integrationAuthController.ts
Normal file
@ -0,0 +1,420 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
IntegrationAuth,
|
||||
Bot
|
||||
} from '../../models';
|
||||
import { INTEGRATION_SET, getIntegrationOptions as getIntegrationOptionsFunc } from '../../variables';
|
||||
import { IntegrationService } from '../../services';
|
||||
import {
|
||||
getApps,
|
||||
getTeams,
|
||||
revokeAccess
|
||||
} from '../../integrations';
|
||||
import {
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_RAILWAY_API_URL
|
||||
} from '../../variables';
|
||||
import request from '../../config/request';
|
||||
|
||||
/***
|
||||
* Return integration authorization with id [integrationAuthId]
|
||||
*/
|
||||
export const getIntegrationAuth = async (req: Request, res: Response) => {
|
||||
let integrationAuth;
|
||||
try {
|
||||
const { integrationAuthId } = req.params;
|
||||
integrationAuth = await IntegrationAuth.findById(integrationAuthId);
|
||||
|
||||
if (!integrationAuth) return res.status(400).send({
|
||||
message: 'Failed to find integration authorization'
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integration authorization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth
|
||||
});
|
||||
}
|
||||
|
||||
export const getIntegrationOptions = async (req: Request, res: Response) => {
|
||||
const INTEGRATION_OPTIONS = await getIntegrationOptionsFunc();
|
||||
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange as part of integration [integration] for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const oAuthExchange = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const { workspaceId, code, integration } = req.body;
|
||||
if (!INTEGRATION_SET.has(integration))
|
||||
throw new Error('Failed to validate integration');
|
||||
|
||||
const environments = req.membership.workspace?.environments || [];
|
||||
if(environments.length === 0){
|
||||
throw new Error("Failed to get environments")
|
||||
}
|
||||
|
||||
const integrationAuth = await IntegrationService.handleOAuthExchange({
|
||||
workspaceId,
|
||||
integration,
|
||||
code,
|
||||
environment: environments[0].slug,
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get OAuth2 code-token exchange'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Save integration access token and (optionally) access id as part of integration
|
||||
* [integration] for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const saveIntegrationAccessToken = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
// TODO: refactor
|
||||
// TODO: check if access token is valid for each integration
|
||||
|
||||
let integrationAuth;
|
||||
try {
|
||||
const {
|
||||
workspaceId,
|
||||
accessId,
|
||||
accessToken,
|
||||
integration
|
||||
}: {
|
||||
workspaceId: string;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
integration: string;
|
||||
} = req.body;
|
||||
|
||||
const bot = await Bot.findOne({
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (!bot) throw new Error('Bot must be enabled to save integration access token');
|
||||
|
||||
integrationAuth = await IntegrationAuth.findOneAndUpdate({
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
integration
|
||||
}, {
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
integration
|
||||
}, {
|
||||
new: true,
|
||||
upsert: true
|
||||
});
|
||||
|
||||
// encrypt and save integration access details
|
||||
integrationAuth = await IntegrationService.setIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt: undefined
|
||||
});
|
||||
|
||||
if (!integrationAuth) throw new Error('Failed to save integration access token');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to save access token for integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of applications allowed for integration with integration authorization id [integrationAuthId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
let apps;
|
||||
try {
|
||||
const teamId = req.query.teamId as string;
|
||||
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
...teamId && { teamId }
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get integration authorization applications",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apps
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of teams allowed for integration with integration authorization id [integrationAuthId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthTeams = async (req: Request, res: Response) => {
|
||||
const teams = await getTeams({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
teams
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of available Vercel (preview) branches for Vercel project with
|
||||
* id [appId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const getIntegrationAuthVercelBranches = async (req: Request, res: Response) => {
|
||||
const { integrationAuthId } = req.params;
|
||||
const appId = req.query.appId as string;
|
||||
|
||||
interface VercelBranch {
|
||||
ref: string;
|
||||
lastCommit: string;
|
||||
isProtected: boolean;
|
||||
}
|
||||
|
||||
const params = new URLSearchParams({
|
||||
projectId: appId,
|
||||
...(req.integrationAuth.teamId ? {
|
||||
teamId: req.integrationAuth.teamId
|
||||
} : {})
|
||||
});
|
||||
|
||||
let branches: string[] = [];
|
||||
|
||||
if (appId && appId !== '') {
|
||||
const { data }: { data: VercelBranch[] } = await request.get(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v1/integrations/git-branches`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${req.accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
branches = data.map((b) => b.ref);
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
branches
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of Railway environments for Railway project with
|
||||
* id [appId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const getIntegrationAuthRailwayEnvironments = async (req: Request, res: Response) => {
|
||||
const { integrationAuthId } = req.params;
|
||||
const appId = req.query.appId as string;
|
||||
|
||||
interface RailwayEnvironment {
|
||||
node: {
|
||||
id: string;
|
||||
name: string;
|
||||
isEphemeral: boolean;
|
||||
}
|
||||
}
|
||||
|
||||
interface Environment {
|
||||
environmentId: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
let environments: Environment[] = [];
|
||||
|
||||
if (appId && appId !== '') {
|
||||
const query = `
|
||||
query GetEnvironments($projectId: String!, $after: String, $before: String, $first: Int, $isEphemeral: Boolean, $last: Int) {
|
||||
environments(projectId: $projectId, after: $after, before: $before, first: $first, isEphemeral: $isEphemeral, last: $last) {
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
isEphemeral
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const variables = {
|
||||
projectId: appId
|
||||
}
|
||||
|
||||
const { data: { data: { environments: { edges } } } } = await request.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
query,
|
||||
variables,
|
||||
}, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${req.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
environments = edges.map((e: RailwayEnvironment) => {
|
||||
return ({
|
||||
name: e.node.name,
|
||||
environmentId: e.node.id
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
environments
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of Railway services for Railway project with id
|
||||
* [appId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const getIntegrationAuthRailwayServices = async (req: Request, res: Response) => {
|
||||
const { integrationAuthId } = req.params;
|
||||
const appId = req.query.appId as string;
|
||||
|
||||
interface RailwayService {
|
||||
node: {
|
||||
id: string;
|
||||
name: string;
|
||||
}
|
||||
}
|
||||
|
||||
interface Service {
|
||||
name: string;
|
||||
serviceId: string;
|
||||
}
|
||||
|
||||
let services: Service[] = [];
|
||||
|
||||
const query = `
|
||||
query project($id: String!) {
|
||||
project(id: $id) {
|
||||
createdAt
|
||||
deletedAt
|
||||
id
|
||||
description
|
||||
expiredAt
|
||||
isPublic
|
||||
isTempProject
|
||||
isUpdatable
|
||||
name
|
||||
prDeploys
|
||||
teamId
|
||||
updatedAt
|
||||
upstreamUrl
|
||||
services {
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
if (appId && appId !== '') {
|
||||
const variables = {
|
||||
id: appId
|
||||
}
|
||||
|
||||
const { data: { data: { project: { services: { edges } } } } } = await request.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
query,
|
||||
variables
|
||||
}, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${req.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
services = edges.map((e: RailwayService) => ({
|
||||
name: e.node.name,
|
||||
serviceId: e.node.id
|
||||
}));
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
services
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete integration authorization with id [integrationAuthId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete integration authorization",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth,
|
||||
});
|
||||
};
|
167
backend/src/controllers/v1/integrationController.ts
Normal file
167
backend/src/controllers/v1/integrationController.ts
Normal file
@ -0,0 +1,167 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Integration
|
||||
} from '../../models';
|
||||
import { EventService } from '../../services';
|
||||
import { eventPushSecrets } from '../../events';
|
||||
|
||||
/**
|
||||
* Create/initialize an (empty) integration for integration authorization
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
|
||||
try {
|
||||
const {
|
||||
integrationAuthId,
|
||||
app,
|
||||
appId,
|
||||
isActive,
|
||||
sourceEnvironment,
|
||||
targetEnvironment,
|
||||
targetEnvironmentId,
|
||||
targetService,
|
||||
targetServiceId,
|
||||
owner,
|
||||
path,
|
||||
region
|
||||
} = req.body;
|
||||
|
||||
// TODO: validate [sourceEnvironment] and [targetEnvironment]
|
||||
|
||||
// initialize new integration after saving integration access token
|
||||
integration = await new Integration({
|
||||
workspace: req.integrationAuth.workspace._id,
|
||||
environment: sourceEnvironment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
targetEnvironmentId,
|
||||
targetService,
|
||||
targetServiceId,
|
||||
owner,
|
||||
path,
|
||||
region,
|
||||
integration: req.integrationAuth.integration,
|
||||
integrationAuth: new Types.ObjectId(integrationAuthId)
|
||||
}).save();
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace,
|
||||
environment: sourceEnvironment
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to create integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change environment or name of integration with id [integrationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const {
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner, // github-specific integration param
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id,
|
||||
},
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace,
|
||||
environment
|
||||
}),
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to update integration",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete integration with id [integrationId] and deactivate bot if there are
|
||||
* no integrations left
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
try {
|
||||
const { integrationId } = req.params;
|
||||
|
||||
integration = await Integration.findOneAndDelete({
|
||||
_id: integrationId,
|
||||
});
|
||||
|
||||
if (!integration) throw new Error("Failed to find integration");
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete integration",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
82
backend/src/controllers/v1/keyController.ts
Normal file
82
backend/src/controllers/v1/keyController.ts
Normal file
@ -0,0 +1,82 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Key } from '../../models';
|
||||
import { findMembership } from '../../helpers/membership';
|
||||
|
||||
/**
|
||||
* Add (encrypted) copy of workspace key for workspace with id [workspaceId] for user with
|
||||
* id [key.userId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const uploadKey = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { key } = req.body;
|
||||
|
||||
// validate membership of receiver
|
||||
const receiverMembership = await findMembership({
|
||||
user: key.userId,
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!receiverMembership) {
|
||||
throw new Error('Failed receiver membership validation for workspace');
|
||||
}
|
||||
|
||||
await new Key({
|
||||
encryptedKey: key.encryptedKey,
|
||||
nonce: key.nonce,
|
||||
sender: req.user._id,
|
||||
receiver: key.userId,
|
||||
workspace: workspaceId
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to upload key to workspace'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully uploaded key to workspace'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return latest (encrypted) copy of workspace key for user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getLatestKey = async (req: Request, res: Response) => {
|
||||
let latestKey;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// get latest key
|
||||
latestKey = await Key.find({
|
||||
workspace: workspaceId,
|
||||
receiver: req.user._id
|
||||
})
|
||||
.sort({ createdAt: -1 })
|
||||
.limit(1)
|
||||
.populate('sender', '+publicKey');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get latest key'
|
||||
});
|
||||
}
|
||||
|
||||
const resObj: any = {};
|
||||
|
||||
if (latestKey.length > 0) {
|
||||
resObj['latestKey'] = latestKey[0];
|
||||
}
|
||||
|
||||
return res.status(200).send(resObj);
|
||||
};
|
233
backend/src/controllers/v1/membershipController.ts
Normal file
233
backend/src/controllers/v1/membershipController.ts
Normal file
@ -0,0 +1,233 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response } from 'express';
|
||||
import { Membership, MembershipOrg, User, Key } from '../../models';
|
||||
import {
|
||||
findMembership,
|
||||
deleteMembership as deleteMember
|
||||
} from '../../helpers/membership';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { ADMIN, MEMBER, ACCEPTED } from '../../variables';
|
||||
import { getSiteURL } from '../../config';
|
||||
|
||||
/**
|
||||
* Check that user is a member of workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const validateMembership = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// validate membership
|
||||
const membership = await findMembership({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new Error('Failed to validate membership');
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed workspace connection check'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Workspace membership confirmed'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete membership with id [membershipId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteMembership = async (req: Request, res: Response) => {
|
||||
let deletedMembership;
|
||||
try {
|
||||
const { membershipId } = req.params;
|
||||
|
||||
// check if membership to delete exists
|
||||
const membershipToDelete = await Membership.findOne({
|
||||
_id: membershipId
|
||||
}).populate('user');
|
||||
|
||||
if (!membershipToDelete) {
|
||||
throw new Error(
|
||||
"Failed to delete workspace membership that doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
// check if user is a member and admin of the workspace
|
||||
// whose membership we wish to delete
|
||||
const membership = await Membership.findOne({
|
||||
user: req.user._id,
|
||||
workspace: membershipToDelete.workspace
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new Error('Failed to validate workspace membership');
|
||||
}
|
||||
|
||||
if (membership.role !== ADMIN) {
|
||||
// user is not an admin member of the workspace
|
||||
throw new Error('Insufficient role for deleting workspace membership');
|
||||
}
|
||||
|
||||
// delete workspace membership
|
||||
deletedMembership = await deleteMember({
|
||||
membershipId: membershipToDelete._id.toString()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete membership'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
deletedMembership
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change and return workspace membership role
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const changeMembershipRole = async (req: Request, res: Response) => {
|
||||
let membershipToChangeRole;
|
||||
try {
|
||||
const { membershipId } = req.params;
|
||||
const { role } = req.body;
|
||||
|
||||
if (![ADMIN, MEMBER].includes(role)) {
|
||||
throw new Error('Failed to validate role');
|
||||
}
|
||||
|
||||
// validate target membership
|
||||
membershipToChangeRole = await findMembership({
|
||||
_id: membershipId
|
||||
});
|
||||
|
||||
if (!membershipToChangeRole) {
|
||||
throw new Error('Failed to find membership to change role');
|
||||
}
|
||||
|
||||
// check if user is a member and admin of target membership's
|
||||
// workspace
|
||||
const membership = await findMembership({
|
||||
user: req.user._id,
|
||||
workspace: membershipToChangeRole.workspace
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new Error('Failed to validate membership');
|
||||
}
|
||||
|
||||
if (membership.role !== ADMIN) {
|
||||
// user is not an admin member of the workspace
|
||||
throw new Error('Insufficient role for changing member roles');
|
||||
}
|
||||
|
||||
membershipToChangeRole.role = role;
|
||||
await membershipToChangeRole.save();
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change membership role'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
membership: membershipToChangeRole
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Add user with email [email] to workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const inviteUserToWorkspace = async (req: Request, res: Response) => {
|
||||
let invitee, latestKey;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { email }: { email: string } = req.body;
|
||||
|
||||
invitee = await User.findOne({
|
||||
email
|
||||
}).select('+publicKey');
|
||||
|
||||
if (!invitee || !invitee?.publicKey)
|
||||
throw new Error('Failed to validate invitee');
|
||||
|
||||
// validate invitee's workspace membership - ensure member isn't
|
||||
// already a member of the workspace
|
||||
const inviteeMembership = await Membership.findOne({
|
||||
user: invitee._id,
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (inviteeMembership)
|
||||
throw new Error('Failed to add existing member of workspace');
|
||||
|
||||
// validate invitee's organization membership - ensure that only
|
||||
// (accepted) organization members can be added to the workspace
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: invitee._id,
|
||||
organization: req.membership.workspace.organization,
|
||||
status: ACCEPTED
|
||||
});
|
||||
|
||||
if (!membershipOrg)
|
||||
throw new Error("Failed to validate invitee's organization membership");
|
||||
|
||||
// get latest key
|
||||
latestKey = await Key.findOne({
|
||||
workspace: workspaceId,
|
||||
receiver: req.user._id
|
||||
})
|
||||
.sort({ createdAt: -1 })
|
||||
.populate('sender', '+publicKey');
|
||||
|
||||
// create new workspace membership
|
||||
const m = await new Membership({
|
||||
user: invitee._id,
|
||||
workspace: workspaceId,
|
||||
role: MEMBER
|
||||
}).save();
|
||||
|
||||
await sendMail({
|
||||
template: 'workspaceInvitation.handlebars',
|
||||
subjectLine: 'Infisical workspace invitation',
|
||||
recipients: [invitee.email],
|
||||
substitutions: {
|
||||
inviterFirstName: req.user.firstName,
|
||||
inviterEmail: req.user.email,
|
||||
workspaceName: req.membership.workspace.name,
|
||||
callback_url: (await getSiteURL()) + '/login'
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to invite user to workspace'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
invitee,
|
||||
latestKey
|
||||
});
|
||||
};
|
284
backend/src/controllers/v1/membershipOrgController.ts
Normal file
284
backend/src/controllers/v1/membershipOrgController.ts
Normal file
@ -0,0 +1,284 @@
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { MembershipOrg, Organization, User } from '../../models';
|
||||
import { deleteMembershipOrg as deleteMemberFromOrg } from '../../helpers/membershipOrg';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { updateSubscriptionOrgQuantity } from '../../helpers/organization';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { TokenService } from '../../services';
|
||||
import { OWNER, ADMIN, MEMBER, ACCEPTED, INVITED, TOKEN_EMAIL_ORG_INVITATION } from '../../variables';
|
||||
import { getSiteURL, getJwtSignupLifetime, getJwtSignupSecret, getSmtpConfigured } from '../../config';
|
||||
|
||||
/**
|
||||
* Delete organization membership with id [membershipOrgId] from organization
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteMembershipOrg = async (req: Request, res: Response) => {
|
||||
let membershipOrgToDelete;
|
||||
try {
|
||||
const { membershipOrgId } = req.params;
|
||||
|
||||
// check if organization membership to delete exists
|
||||
membershipOrgToDelete = await MembershipOrg.findOne({
|
||||
_id: membershipOrgId
|
||||
}).populate('user');
|
||||
|
||||
if (!membershipOrgToDelete) {
|
||||
throw new Error(
|
||||
"Failed to delete organization membership that doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
// check if user is a member and admin of the organization
|
||||
// whose membership we wish to delete
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: membershipOrgToDelete.organization
|
||||
});
|
||||
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
|
||||
if (membershipOrg.role !== OWNER && membershipOrg.role !== ADMIN) {
|
||||
// user is not an admin member of the organization
|
||||
throw new Error('Insufficient role for deleting organization membership');
|
||||
}
|
||||
|
||||
// delete organization membership
|
||||
const deletedMembershipOrg = await deleteMemberFromOrg({
|
||||
membershipOrgId: membershipOrgToDelete._id.toString()
|
||||
});
|
||||
|
||||
await updateSubscriptionOrgQuantity({
|
||||
organizationId: membershipOrg.organization.toString()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete organization membership'
|
||||
});
|
||||
}
|
||||
|
||||
return membershipOrgToDelete;
|
||||
};
|
||||
|
||||
/**
|
||||
* Change and return organization membership role
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const changeMembershipOrgRole = async (req: Request, res: Response) => {
|
||||
// change role for (target) organization membership with id
|
||||
// [membershipOrgId]
|
||||
|
||||
let membershipToChangeRole;
|
||||
// try {
|
||||
// } catch (err) {
|
||||
// Sentry.setUser({ email: req.user.email });
|
||||
// Sentry.captureException(err);
|
||||
// return res.status(400).send({
|
||||
// message: 'Failed to change organization membership role'
|
||||
// });
|
||||
// }
|
||||
|
||||
return res.status(200).send({
|
||||
membershipOrg: membershipToChangeRole
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Organization invitation step 1: Send email invitation to user with email [email]
|
||||
* for organization with id [organizationId] containing magic link
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
let invitee, inviteeMembershipOrg, completeInviteLink;
|
||||
try {
|
||||
const { organizationId, inviteeEmail } = req.body;
|
||||
const host = req.headers.host;
|
||||
const siteUrl = `${req.protocol}://${host}`;
|
||||
|
||||
// validate membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId
|
||||
});
|
||||
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
|
||||
invitee = await User.findOne({
|
||||
email: inviteeEmail
|
||||
}).select('+publicKey');
|
||||
|
||||
if (invitee) {
|
||||
// case: invitee is an existing user
|
||||
|
||||
inviteeMembershipOrg = await MembershipOrg.findOne({
|
||||
user: invitee._id,
|
||||
organization: organizationId
|
||||
});
|
||||
|
||||
if (inviteeMembershipOrg && inviteeMembershipOrg.status === ACCEPTED) {
|
||||
throw new Error(
|
||||
'Failed to invite an existing member of the organization'
|
||||
);
|
||||
}
|
||||
|
||||
if (!inviteeMembershipOrg) {
|
||||
await new MembershipOrg({
|
||||
user: invitee,
|
||||
inviteEmail: inviteeEmail,
|
||||
organization: organizationId,
|
||||
role: MEMBER,
|
||||
status: INVITED
|
||||
}).save();
|
||||
}
|
||||
} else {
|
||||
// check if invitee has been invited before
|
||||
inviteeMembershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: inviteeEmail,
|
||||
organization: organizationId
|
||||
});
|
||||
|
||||
if (!inviteeMembershipOrg) {
|
||||
// case: invitee has never been invited before
|
||||
|
||||
await new MembershipOrg({
|
||||
inviteEmail: inviteeEmail,
|
||||
organization: organizationId,
|
||||
role: MEMBER,
|
||||
status: INVITED
|
||||
}).save();
|
||||
}
|
||||
}
|
||||
|
||||
const organization = await Organization.findOne({ _id: organizationId });
|
||||
|
||||
if (organization) {
|
||||
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_ORG_INVITATION,
|
||||
email: inviteeEmail,
|
||||
organizationId: organization._id
|
||||
});
|
||||
|
||||
await sendMail({
|
||||
template: 'organizationInvitation.handlebars',
|
||||
subjectLine: 'Infisical organization invitation',
|
||||
recipients: [inviteeEmail],
|
||||
substitutions: {
|
||||
inviterFirstName: req.user.firstName,
|
||||
inviterEmail: req.user.email,
|
||||
organizationName: organization.name,
|
||||
email: inviteeEmail,
|
||||
organizationId: organization._id.toString(),
|
||||
token,
|
||||
callback_url: (await getSiteURL()) + '/signupinvite'
|
||||
}
|
||||
});
|
||||
|
||||
if (!(await getSmtpConfigured())) {
|
||||
completeInviteLink = `${siteUrl + '/signupinvite'}?token=${token}&to=${inviteeEmail}&organization_id=${organization._id}`
|
||||
}
|
||||
}
|
||||
|
||||
await updateSubscriptionOrgQuantity({ organizationId });
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to send organization invite'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: `Sent an invite link to ${req.body.inviteeEmail}`,
|
||||
completeInviteLink
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Organization invitation step 2: Verify that code [code] was sent to email [email] as part of
|
||||
* magic link and issue a temporary signup token for user to complete setting up their account
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
let user, token;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
organizationId,
|
||||
code
|
||||
} = req.body;
|
||||
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: email,
|
||||
status: INVITED,
|
||||
organization: new Types.ObjectId(organizationId)
|
||||
});
|
||||
|
||||
if (!membershipOrg)
|
||||
throw new Error('Failed to find any invitations for email');
|
||||
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_ORG_INVITATION,
|
||||
email,
|
||||
organizationId: membershipOrg.organization,
|
||||
token: code
|
||||
});
|
||||
|
||||
if (user && user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
// membership can be approved and redirected to login/dashboard
|
||||
membershipOrg.status = ACCEPTED;
|
||||
await membershipOrg.save();
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully verified email',
|
||||
user,
|
||||
});
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
// initialize user account
|
||||
user = await new User({
|
||||
email
|
||||
}).save();
|
||||
}
|
||||
|
||||
// generate temporary signup token
|
||||
token = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed email magic link verification for organization invitation'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully verified email',
|
||||
user,
|
||||
token
|
||||
});
|
||||
};
|
427
backend/src/controllers/v1/organizationController.ts
Normal file
427
backend/src/controllers/v1/organizationController.ts
Normal file
@ -0,0 +1,427 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response } from 'express';
|
||||
import Stripe from 'stripe';
|
||||
import {
|
||||
Membership,
|
||||
MembershipOrg,
|
||||
Organization,
|
||||
Workspace,
|
||||
IncidentContactOrg
|
||||
} from '../../models';
|
||||
import { createOrganization as create } from '../../helpers/organization';
|
||||
import { addMembershipsOrg } from '../../helpers/membershipOrg';
|
||||
import { OWNER, ACCEPTED } from '../../variables';
|
||||
import _ from 'lodash';
|
||||
import { getStripeSecretKey, getSiteURL } from '../../config';
|
||||
|
||||
export const getOrganizations = async (req: Request, res: Response) => {
|
||||
let organizations;
|
||||
try {
|
||||
organizations = (
|
||||
await MembershipOrg.find({
|
||||
user: req.user._id
|
||||
}).populate('organization')
|
||||
).map((m) => m.organization);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get organizations'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
organizations
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create new organization named [organizationName]
|
||||
* and add user as owner
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createOrganization = async (req: Request, res: Response) => {
|
||||
let organization;
|
||||
try {
|
||||
const { organizationName } = req.body;
|
||||
|
||||
if (organizationName.length < 1) {
|
||||
throw new Error('Organization names must be at least 1-character long');
|
||||
}
|
||||
|
||||
// create organization and add user as member
|
||||
organization = await create({
|
||||
email: req.user.email,
|
||||
name: organizationName
|
||||
});
|
||||
|
||||
await addMembershipsOrg({
|
||||
userIds: [req.user._id.toString()],
|
||||
organizationId: organization._id.toString(),
|
||||
roles: [OWNER],
|
||||
statuses: [ACCEPTED]
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to create organization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
organization
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return organization with id [organizationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganization = async (req: Request, res: Response) => {
|
||||
let organization;
|
||||
try {
|
||||
organization = req.organization
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to find organization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
organization
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return organization memberships for organization with id [organizationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationMembers = async (req: Request, res: Response) => {
|
||||
let users;
|
||||
try {
|
||||
const { organizationId } = req.params;
|
||||
|
||||
users = await MembershipOrg.find({
|
||||
organization: organizationId
|
||||
}).populate('user', '+publicKey');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get organization members'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
users
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return workspaces that user is part of in organization with id [organizationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationWorkspaces = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let workspaces;
|
||||
try {
|
||||
const { organizationId } = req.params;
|
||||
|
||||
const workspacesSet = new Set(
|
||||
(
|
||||
await Workspace.find(
|
||||
{
|
||||
organization: organizationId
|
||||
},
|
||||
'_id'
|
||||
)
|
||||
).map((w) => w._id.toString())
|
||||
);
|
||||
|
||||
workspaces = (
|
||||
await Membership.find({
|
||||
user: req.user._id
|
||||
}).populate('workspace')
|
||||
)
|
||||
.filter((m) => workspacesSet.has(m.workspace._id.toString()))
|
||||
.map((m) => m.workspace);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get my workspaces'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspaces
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change name of organization with id [organizationId] to [name]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const changeOrganizationName = async (req: Request, res: Response) => {
|
||||
let organization;
|
||||
try {
|
||||
const { organizationId } = req.params;
|
||||
const { name } = req.body;
|
||||
|
||||
organization = await Organization.findOneAndUpdate(
|
||||
{
|
||||
_id: organizationId
|
||||
},
|
||||
{
|
||||
name
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change organization name'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully changed organization name',
|
||||
organization
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return incident contacts of organization with id [organizationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationIncidentContacts = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let incidentContactsOrg;
|
||||
try {
|
||||
const { organizationId } = req.params;
|
||||
|
||||
incidentContactsOrg = await IncidentContactOrg.find({
|
||||
organization: organizationId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get organization incident contacts'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
incidentContactsOrg
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Add and return new incident contact with email [email] for organization with id [organizationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const addOrganizationIncidentContact = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let incidentContactOrg;
|
||||
try {
|
||||
const { organizationId } = req.params;
|
||||
const { email } = req.body;
|
||||
|
||||
incidentContactOrg = await IncidentContactOrg.findOneAndUpdate(
|
||||
{ email, organization: organizationId },
|
||||
{ email, organization: organizationId },
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to add incident contact for organization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
incidentContactOrg
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete incident contact with email [email] for organization with id [organizationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteOrganizationIncidentContact = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let incidentContactOrg;
|
||||
try {
|
||||
const { organizationId } = req.params;
|
||||
const { email } = req.body;
|
||||
|
||||
incidentContactOrg = await IncidentContactOrg.findOneAndDelete({
|
||||
email,
|
||||
organization: organizationId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete organization incident contact'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully deleted organization incident contact',
|
||||
incidentContactOrg
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Redirect user to (stripe) billing portal or add card page depending on
|
||||
* if there is a card on file
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createOrganizationPortalSession = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let session;
|
||||
try {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
// check if there is a payment method on file
|
||||
const paymentMethods = await stripe.paymentMethods.list({
|
||||
customer: req.organization.customerId,
|
||||
type: 'card'
|
||||
});
|
||||
|
||||
if (paymentMethods.data.length < 1) {
|
||||
// case: no payment method on file
|
||||
session = await stripe.checkout.sessions.create({
|
||||
customer: req.organization.customerId,
|
||||
mode: 'setup',
|
||||
payment_method_types: ['card'],
|
||||
success_url: (await getSiteURL()) + '/dashboard',
|
||||
cancel_url: (await getSiteURL()) + '/dashboard'
|
||||
});
|
||||
} else {
|
||||
session = await stripe.billingPortal.sessions.create({
|
||||
customer: req.organization.customerId,
|
||||
return_url: (await getSiteURL()) + '/dashboard'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({ url: session.url });
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to redirect to organization billing portal'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return organization subscriptions
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationSubscriptions = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let subscriptions;
|
||||
try {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
subscriptions = await stripe.subscriptions.list({
|
||||
customer: req.organization.customerId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get organization subscriptions'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
subscriptions
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Given a org id, return the projects each member of the org belongs to
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationMembersAndTheirWorkspaces = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
const { organizationId } = req.params;
|
||||
|
||||
const workspacesSet = (
|
||||
await Workspace.find(
|
||||
{
|
||||
organization: organizationId
|
||||
},
|
||||
'_id'
|
||||
)
|
||||
).map((w) => w._id.toString());
|
||||
|
||||
const memberships = (
|
||||
await Membership.find({
|
||||
workspace: { $in: workspacesSet }
|
||||
}).populate('workspace')
|
||||
);
|
||||
const userToWorkspaceIds: any = {};
|
||||
|
||||
memberships.forEach(membership => {
|
||||
const user = membership.user.toString();
|
||||
if (userToWorkspaceIds[user]) {
|
||||
userToWorkspaceIds[user].push(membership.workspace);
|
||||
} else {
|
||||
userToWorkspaceIds[user] = [membership.workspace];
|
||||
}
|
||||
});
|
||||
|
||||
return res.json(userToWorkspaceIds);
|
||||
};
|
388
backend/src/controllers/v1/passwordController.ts
Normal file
388
backend/src/controllers/v1/passwordController.ts
Normal file
@ -0,0 +1,388 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const jsrp = require('jsrp');
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
import { User, BackupPrivateKey, LoginSRPDetail } from '../../models';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { TokenService } from '../../services';
|
||||
import { TOKEN_EMAIL_PASSWORD_RESET } from '../../variables';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
import { getSiteURL, getJwtSignupLifetime, getJwtSignupSecret } from '../../config';
|
||||
|
||||
/**
|
||||
* Password reset step 1: Send email verification link to email [email]
|
||||
* for account recovery.
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
let email: string;
|
||||
try {
|
||||
email = req.body.email;
|
||||
|
||||
const user = await User.findOne({ email }).select('+publicKey');
|
||||
if (!user || !user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
|
||||
return res.status(403).send({
|
||||
error: 'Failed to send email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_PASSWORD_RESET,
|
||||
email
|
||||
});
|
||||
|
||||
await sendMail({
|
||||
template: 'passwordReset.handlebars',
|
||||
subjectLine: 'Infisical password reset',
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
email,
|
||||
token,
|
||||
callback_url: (await getSiteURL()) + '/password-reset'
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to send email for account recovery'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: `Sent an email for account recovery to ${email}`
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Password reset step 2: Verify email verification link sent to email [email]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
let user, token;
|
||||
try {
|
||||
const { email, code } = req.body;
|
||||
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
if (!user || !user?.publicKey) {
|
||||
// case: user doesn't exist with email [email] or
|
||||
// hasn't even completed their account
|
||||
return res.status(403).send({
|
||||
error: 'Failed email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_PASSWORD_RESET,
|
||||
email,
|
||||
token: code
|
||||
});
|
||||
|
||||
// generate temporary password-reset token
|
||||
token = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully verified email',
|
||||
user,
|
||||
token
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const srp1 = async (req: Request, res: Response) => {
|
||||
// return salt, serverPublicKey as part of first step of SRP protocol
|
||||
try {
|
||||
const { clientPublicKey } = req.body;
|
||||
const user = await User.findOne({
|
||||
email: req.user.email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
async () => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
|
||||
await LoginSRPDetail.findOneAndReplace({ email: req.user.email }, {
|
||||
email: req.user.email,
|
||||
clientPublicKey: clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt),
|
||||
}, { upsert: true, returnNewDocument: false })
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
salt: user.salt
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed to start change password process'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Change account SRP authentication information for user
|
||||
* Requires verifying [clientProof] as part of step 2 of SRP protocol
|
||||
* as initiated in POST /srp1
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const changePassword = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
clientProof,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
} = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email: req.user.email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: req.user.email })
|
||||
|
||||
if (!loginSRPDetailFromDB) {
|
||||
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
|
||||
}
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: loginSRPDetailFromDB.serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(loginSRPDetailFromDB.clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// change password
|
||||
|
||||
await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully changed password'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(400).send({
|
||||
error: 'Failed to change password. Try again?'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed to change password. Try again?'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Create or change backup private key for user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createBackupPrivateKey = async (req: Request, res: Response) => {
|
||||
// create/change backup private key
|
||||
// requires verifying [clientProof] as part of second step of SRP protocol
|
||||
// as initiated in /srp1
|
||||
|
||||
try {
|
||||
const { clientProof, encryptedPrivateKey, iv, tag, salt, verifier } =
|
||||
req.body;
|
||||
const user = await User.findOne({
|
||||
email: req.user.email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: req.user.email })
|
||||
|
||||
if (!loginSRPDetailFromDB) {
|
||||
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
|
||||
}
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: loginSRPDetailFromDB.serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(
|
||||
loginSRPDetailFromDB.clientPublicKey
|
||||
);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// create new or replace backup private key
|
||||
|
||||
const backupPrivateKey = await BackupPrivateKey.findOneAndUpdate(
|
||||
{ user: req.user._id },
|
||||
{
|
||||
user: req.user._id,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
).select('+user, encryptedPrivateKey');
|
||||
|
||||
// issue tokens
|
||||
return res.status(200).send({
|
||||
message: 'Successfully updated backup private key',
|
||||
backupPrivateKey
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update backup private key'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update backup private key'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return backup private key for user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getBackupPrivateKey = async (req: Request, res: Response) => {
|
||||
let backupPrivateKey;
|
||||
try {
|
||||
backupPrivateKey = await BackupPrivateKey.findOne({
|
||||
user: req.user._id
|
||||
}).select('+encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!backupPrivateKey) throw new Error('Failed to find backup private key');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get backup private key'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
backupPrivateKey
|
||||
});
|
||||
}
|
||||
|
||||
export const resetPassword = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier,
|
||||
} = req.body;
|
||||
|
||||
await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get backup private key'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully reset password'
|
||||
});
|
||||
}
|
240
backend/src/controllers/v1/secretController.ts
Normal file
240
backend/src/controllers/v1/secretController.ts
Normal file
@ -0,0 +1,240 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { Key, Secret } from '../../models';
|
||||
import {
|
||||
v1PushSecrets as push,
|
||||
pullSecrets as pull,
|
||||
reformatPullSecrets
|
||||
} from '../../helpers/secret';
|
||||
import { pushKeys } from '../../helpers/key';
|
||||
import { eventPushSecrets } from '../../events';
|
||||
import { EventService } from '../../services';
|
||||
import { TelemetryService } from '../../services';
|
||||
|
||||
interface PushSecret {
|
||||
ciphertextKey: string;
|
||||
ivKey: string;
|
||||
tagKey: string;
|
||||
hashKey: string;
|
||||
ciphertextValue: string;
|
||||
ivValue: string;
|
||||
tagValue: string;
|
||||
hashValue: string;
|
||||
ciphertextComment: string;
|
||||
ivComment: string;
|
||||
tagComment: string;
|
||||
hashComment: string;
|
||||
type: 'shared' | 'personal';
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload (encrypted) secrets to workspace with id [workspaceId]
|
||||
* for environment [environment]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const pushSecrets = async (req: Request, res: Response) => {
|
||||
// upload (encrypted) secrets to workspace with id [workspaceId]
|
||||
|
||||
try {
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
let { secrets }: { secrets: PushSecret[] } = req.body;
|
||||
const { keys, environment, channel } = req.body;
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// validate environment
|
||||
const workspaceEnvs = req.membership.workspace.environments;
|
||||
if (!workspaceEnvs.find(({ slug }: { slug: string }) => slug === environment)) {
|
||||
throw new Error('Failed to validate environment');
|
||||
}
|
||||
|
||||
// sanitize secrets
|
||||
secrets = secrets.filter(
|
||||
(s: PushSecret) => s.ciphertextKey !== '' && s.ciphertextValue !== ''
|
||||
);
|
||||
|
||||
await push({
|
||||
userId: req.user._id,
|
||||
workspaceId,
|
||||
environment,
|
||||
secrets
|
||||
});
|
||||
|
||||
await pushKeys({
|
||||
userId: req.user._id,
|
||||
workspaceId,
|
||||
keys
|
||||
});
|
||||
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets pushed',
|
||||
distinctId: req.user.email,
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel: channel ? channel : 'cli'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment
|
||||
})
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to upload workspace secrets'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully uploaded workspace secrets'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return (encrypted) secrets for workspace with id [workspaceId]
|
||||
* for environment [environment] and (encrypted) workspace key
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const pullSecrets = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
let key;
|
||||
try {
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// validate environment
|
||||
const workspaceEnvs = req.membership.workspace.environments;
|
||||
if (!workspaceEnvs.find(({ slug }: { slug: string }) => slug === environment)) {
|
||||
throw new Error('Failed to validate environment');
|
||||
}
|
||||
|
||||
secrets = await pull({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId,
|
||||
environment,
|
||||
channel: channel ? channel : 'cli',
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
key = await Key.findOne({
|
||||
workspace: workspaceId,
|
||||
receiver: req.user._id
|
||||
})
|
||||
.sort({ createdAt: -1 })
|
||||
.populate('sender', '+publicKey');
|
||||
|
||||
if (channel !== 'cli') {
|
||||
secrets = reformatPullSecrets({ secrets });
|
||||
}
|
||||
|
||||
if (postHogClient) {
|
||||
// capture secrets pushed event in production
|
||||
postHogClient.capture({
|
||||
distinctId: req.user.email,
|
||||
event: 'secrets pulled',
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel: channel ? channel : 'cli'
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to pull workspace secrets'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets,
|
||||
key
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return (encrypted) secrets for workspace with id [workspaceId]
|
||||
* for environment [environment] and (encrypted) workspace key
|
||||
* via service token
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const pullSecretsServiceToken = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
let key;
|
||||
try {
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// validate environment
|
||||
const workspaceEnvs = req.membership.workspace.environments;
|
||||
if (!workspaceEnvs.find(({ slug }: { slug: string }) => slug === environment)) {
|
||||
throw new Error('Failed to validate environment');
|
||||
}
|
||||
|
||||
secrets = await pull({
|
||||
userId: req.serviceToken.user._id.toString(),
|
||||
workspaceId,
|
||||
environment,
|
||||
channel: 'cli',
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
key = {
|
||||
encryptedKey: req.serviceToken.encryptedKey,
|
||||
nonce: req.serviceToken.nonce,
|
||||
sender: {
|
||||
publicKey: req.serviceToken.publicKey
|
||||
},
|
||||
receiver: req.serviceToken.user,
|
||||
workspace: req.serviceToken.workspace
|
||||
};
|
||||
|
||||
if (postHogClient) {
|
||||
// capture secrets pulled event in production
|
||||
postHogClient.capture({
|
||||
distinctId: req.serviceToken.user.email,
|
||||
event: 'secrets pulled',
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel: channel ? channel : 'cli'
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.serviceToken.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to pull workspace secrets'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets: reformatPullSecrets({ secrets }),
|
||||
key
|
||||
});
|
||||
};
|
89
backend/src/controllers/v1/secretsFolderController.ts
Normal file
89
backend/src/controllers/v1/secretsFolderController.ts
Normal file
@ -0,0 +1,89 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { Secret } from '../../models';
|
||||
import Folder from '../../models/folder';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
import { ROOT_FOLDER_PATH, getFolderPath, getParentPath, normalizePath, validateFolderName } from '../../utils/folder';
|
||||
import { ADMIN, MEMBER } from '../../variables';
|
||||
import { validateMembership } from '../../helpers/membership';
|
||||
|
||||
// TODO
|
||||
// verify workspace id/environment
|
||||
export const createFolder = async (req: Request, res: Response) => {
|
||||
const { workspaceId, environment, folderName, parentFolderId } = req.body
|
||||
if (!validateFolderName(folderName)) {
|
||||
throw BadRequestError({ message: "Folder name cannot contain spaces. Only underscore and dashes" })
|
||||
}
|
||||
|
||||
if (parentFolderId) {
|
||||
const parentFolder = await Folder.find({ environment: environment, workspace: workspaceId, id: parentFolderId });
|
||||
if (!parentFolder) {
|
||||
throw BadRequestError({ message: "The parent folder doesn't exist" })
|
||||
}
|
||||
}
|
||||
|
||||
let completePath = await getFolderPath(parentFolderId)
|
||||
if (completePath == ROOT_FOLDER_PATH) {
|
||||
completePath = ""
|
||||
}
|
||||
|
||||
const currentFolderPath = completePath + "/" + folderName // construct new path with current folder to be created
|
||||
const normalizedCurrentPath = normalizePath(currentFolderPath)
|
||||
const normalizedParentPath = getParentPath(normalizedCurrentPath)
|
||||
|
||||
const existingFolder = await Folder.findOne({
|
||||
name: folderName,
|
||||
workspace: workspaceId,
|
||||
environment: environment,
|
||||
parent: parentFolderId,
|
||||
path: normalizedCurrentPath
|
||||
});
|
||||
|
||||
if (existingFolder) {
|
||||
return res.json(existingFolder)
|
||||
}
|
||||
|
||||
const newFolder = new Folder({
|
||||
name: folderName,
|
||||
workspace: workspaceId,
|
||||
environment: environment,
|
||||
parent: parentFolderId,
|
||||
path: normalizedCurrentPath,
|
||||
parentPath: normalizedParentPath
|
||||
});
|
||||
|
||||
await newFolder.save();
|
||||
|
||||
return res.json(newFolder)
|
||||
}
|
||||
|
||||
export const deleteFolder = async (req: Request, res: Response) => {
|
||||
const { folderId } = req.params
|
||||
const queue: any[] = [folderId];
|
||||
|
||||
const folder = await Folder.findById(folderId);
|
||||
if (!folder) {
|
||||
throw BadRequestError({ message: "The folder doesn't exist" })
|
||||
}
|
||||
|
||||
// check that user is a member of the workspace
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: folder.workspace as any,
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
});
|
||||
|
||||
while (queue.length > 0) {
|
||||
const currentFolderId = queue.shift();
|
||||
|
||||
const childFolders = await Folder.find({ parent: currentFolderId });
|
||||
for (const childFolder of childFolders) {
|
||||
queue.push(childFolder._id);
|
||||
}
|
||||
|
||||
await Secret.deleteMany({ folder: currentFolderId });
|
||||
|
||||
await Folder.deleteOne({ _id: currentFolderId });
|
||||
}
|
||||
|
||||
res.send()
|
||||
}
|
75
backend/src/controllers/v1/serviceTokenController.ts
Normal file
75
backend/src/controllers/v1/serviceTokenController.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { ServiceToken } from '../../models';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { getJwtServiceSecret } from '../../config';
|
||||
|
||||
/**
|
||||
* Return service token on request
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getServiceToken = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
serviceToken: req.serviceToken
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create and return a new service token
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createServiceToken = async (req: Request, res: Response) => {
|
||||
let token;
|
||||
try {
|
||||
const {
|
||||
name,
|
||||
workspaceId,
|
||||
environment,
|
||||
expiresIn,
|
||||
publicKey,
|
||||
encryptedKey,
|
||||
nonce
|
||||
} = req.body;
|
||||
|
||||
// validate environment
|
||||
const workspaceEnvs = req.membership.workspace.environments;
|
||||
if (!workspaceEnvs.find(({ slug }: { slug: string }) => slug === environment)) {
|
||||
throw new Error('Failed to validate environment');
|
||||
}
|
||||
|
||||
// compute access token expiration date
|
||||
const expiresAt = new Date();
|
||||
expiresAt.setSeconds(expiresAt.getSeconds() + expiresIn);
|
||||
|
||||
const serviceToken = await new ServiceToken({
|
||||
name,
|
||||
user: req.user._id,
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
expiresAt,
|
||||
publicKey,
|
||||
encryptedKey,
|
||||
nonce
|
||||
}).save();
|
||||
|
||||
token = createToken({
|
||||
payload: {
|
||||
serviceTokenId: serviceToken._id.toString(),
|
||||
workspaceId
|
||||
},
|
||||
expiresIn: expiresIn,
|
||||
secret: await getJwtServiceSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
return res.status(400).send({
|
||||
message: 'Failed to create service token'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
token
|
||||
});
|
||||
};
|
112
backend/src/controllers/v1/signupController.ts
Normal file
112
backend/src/controllers/v1/signupController.ts
Normal file
@ -0,0 +1,112 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { User } from '../../models';
|
||||
import {
|
||||
sendEmailVerification,
|
||||
checkEmailVerification,
|
||||
} from '../../helpers/signup';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
import { getInviteOnlySignup, getJwtSignupLifetime, getJwtSignupSecret, getSmtpConfigured } from '../../config';
|
||||
|
||||
/**
|
||||
* Signup step 1: Initialize account for user under email [email] and send a verification code
|
||||
* to that email
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const beginEmailSignup = async (req: Request, res: Response) => {
|
||||
let email: string;
|
||||
try {
|
||||
email = req.body.email;
|
||||
|
||||
if (await getInviteOnlySignup()) {
|
||||
// Only one user can create an account without being invited. The rest need to be invited in order to make an account
|
||||
const userCount = await User.countDocuments({})
|
||||
if (userCount != 0) {
|
||||
throw BadRequestError({ message: "New user sign ups are not allowed at this time. You must be invited to sign up." })
|
||||
}
|
||||
}
|
||||
|
||||
const user = await User.findOne({ email }).select('+publicKey');
|
||||
if (user && user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
|
||||
return res.status(403).send({
|
||||
error: 'Failed to send email verification code for complete account'
|
||||
});
|
||||
}
|
||||
|
||||
// send send verification email
|
||||
await sendEmailVerification({ email });
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed to send email verification code'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: `Sent an email verification code to ${email}`
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Signup step 2: Verify that code [code] was sent to email [email] and issue
|
||||
* a temporary signup token for user to complete setting up their account
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const verifyEmailSignup = async (req: Request, res: Response) => {
|
||||
let user, token;
|
||||
try {
|
||||
const { email, code } = req.body;
|
||||
|
||||
// initialize user account
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
if (user && user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed email verification for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
// verify email
|
||||
if (await getSmtpConfigured()) {
|
||||
await checkEmailVerification({
|
||||
email,
|
||||
code
|
||||
});
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
user = await new User({
|
||||
email
|
||||
}).save();
|
||||
}
|
||||
|
||||
// generate temporary signup token
|
||||
token = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed email verification'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfuly verified email',
|
||||
user,
|
||||
token
|
||||
});
|
||||
};
|
41
backend/src/controllers/v1/stripeController.ts
Normal file
41
backend/src/controllers/v1/stripeController.ts
Normal file
@ -0,0 +1,41 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import Stripe from 'stripe';
|
||||
import { getStripeSecretKey, getStripeWebhookSecret } from '../../config';
|
||||
|
||||
/**
|
||||
* Handle service provisioning/un-provisioning via Stripe
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const handleWebhook = async (req: Request, res: Response) => {
|
||||
let event;
|
||||
try {
|
||||
// check request for valid stripe signature
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
const sig = req.headers['stripe-signature'] as string;
|
||||
event = stripe.webhooks.constructEvent(
|
||||
req.body,
|
||||
sig,
|
||||
await getStripeWebhookSecret()
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed to process webhook'
|
||||
});
|
||||
}
|
||||
|
||||
switch (event.type) {
|
||||
case '':
|
||||
break;
|
||||
default:
|
||||
}
|
||||
|
||||
return res.json({ received: true });
|
||||
};
|
70
backend/src/controllers/v1/userActionController.ts
Normal file
70
backend/src/controllers/v1/userActionController.ts
Normal file
@ -0,0 +1,70 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { UserAction } from '../../models';
|
||||
|
||||
/**
|
||||
* Add user action [action]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const addUserAction = async (req: Request, res: Response) => {
|
||||
// add/record new action [action] for user with id [req.user._id]
|
||||
|
||||
let userAction;
|
||||
try {
|
||||
const { action } = req.body;
|
||||
|
||||
userAction = await UserAction.findOneAndUpdate(
|
||||
{
|
||||
user: req.user._id,
|
||||
action
|
||||
},
|
||||
{ user: req.user._id, action },
|
||||
{
|
||||
new: true,
|
||||
upsert: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to record user action'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully recorded user action',
|
||||
userAction
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return user action [action] for user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getUserAction = async (req: Request, res: Response) => {
|
||||
// get user action [action] for user with id [req.user._id]
|
||||
let userAction;
|
||||
try {
|
||||
const action: string = req.query.action as string;
|
||||
|
||||
userAction = await UserAction.findOne({
|
||||
user: req.user._id,
|
||||
action
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get user action'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
userAction
|
||||
});
|
||||
};
|
13
backend/src/controllers/v1/userController.ts
Normal file
13
backend/src/controllers/v1/userController.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { Request, Response } from 'express';
|
||||
|
||||
/**
|
||||
* Return user on request
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getUser = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
user: req.user
|
||||
});
|
||||
};
|
335
backend/src/controllers/v1/workspaceController.ts
Normal file
335
backend/src/controllers/v1/workspaceController.ts
Normal file
@ -0,0 +1,335 @@
|
||||
import { Request, Response } from "express";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import {
|
||||
Workspace,
|
||||
Membership,
|
||||
MembershipOrg,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
IUser,
|
||||
ServiceToken,
|
||||
ServiceTokenData,
|
||||
} from "../../models";
|
||||
import {
|
||||
createWorkspace as create,
|
||||
deleteWorkspace as deleteWork,
|
||||
} from "../../helpers/workspace";
|
||||
import { addMemberships } from "../../helpers/membership";
|
||||
import { ADMIN } from "../../variables";
|
||||
|
||||
/**
|
||||
* Return public keys of members of workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
|
||||
let publicKeys;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
publicKeys = (
|
||||
await Membership.find({
|
||||
workspace: workspaceId,
|
||||
}).populate<{ user: IUser }>("user", "publicKey")
|
||||
).map((member) => {
|
||||
return {
|
||||
publicKey: member.user.publicKey,
|
||||
userId: member.user._id,
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace member public keys",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
publicKeys,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return memberships for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
let users;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
users = await Membership.find({
|
||||
workspace: workspaceId,
|
||||
}).populate("user", "+publicKey");
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace members",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
users,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return workspaces that user is part of
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaces = async (req: Request, res: Response) => {
|
||||
let workspaces;
|
||||
try {
|
||||
workspaces = (
|
||||
await Membership.find({
|
||||
user: req.user._id,
|
||||
}).populate("workspace")
|
||||
).map((m) => m.workspace);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspaces",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspaces,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspace = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
workspace = await Workspace.findOne({
|
||||
_id: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create new workspace named [workspaceName] under organization with id
|
||||
* [organizationId] and add user as admin
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createWorkspace = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceName, organizationId } = req.body;
|
||||
|
||||
// validate organization membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId,
|
||||
});
|
||||
|
||||
if (!membershipOrg) {
|
||||
throw new Error("Failed to validate organization membership");
|
||||
}
|
||||
|
||||
if (workspaceName.length < 1) {
|
||||
throw new Error("Workspace names must be at least 1-character long");
|
||||
}
|
||||
|
||||
// create workspace and add user as member
|
||||
workspace = await create({
|
||||
name: workspaceName,
|
||||
organizationId,
|
||||
});
|
||||
|
||||
await addMemberships({
|
||||
userIds: [req.user._id],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN],
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to create workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteWorkspace = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// delete workspace
|
||||
await deleteWork({
|
||||
id: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: "Successfully deleted workspace",
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change name of workspace with id [workspaceId] to [name]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const changeWorkspaceName = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { name } = req.body;
|
||||
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId,
|
||||
},
|
||||
{
|
||||
name,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to change workspace name",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: "Successfully changed workspace name",
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return integrations for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
|
||||
let integrations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace integrations",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrations,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return (integration) authorizations for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceIntegrationAuthorizations = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let authorizations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
authorizations = await IntegrationAuth.find({
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace integration authorizations",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
authorizations,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return service service tokens for workspace [workspaceId] belonging to user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceServiceTokens = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let serviceTokens;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
// ?? FIX.
|
||||
serviceTokens = await ServiceToken.find({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace service tokens",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
serviceTokens,
|
||||
});
|
||||
};
|
104
backend/src/controllers/v2/apiKeyDataController.ts
Normal file
104
backend/src/controllers/v2/apiKeyDataController.ts
Normal file
@ -0,0 +1,104 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response } from 'express';
|
||||
import crypto from 'crypto';
|
||||
import bcrypt from 'bcrypt';
|
||||
import {
|
||||
APIKeyData
|
||||
} from '../../models';
|
||||
import { getSaltRounds } from '../../config';
|
||||
|
||||
/**
|
||||
* Return API key data for user with id [req.user_id]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getAPIKeyData = async (req: Request, res: Response) => {
|
||||
let apiKeyData;
|
||||
try {
|
||||
apiKeyData = await APIKeyData.find({
|
||||
user: req.user._id
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get API key data'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apiKeyData
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new API key data for user with id [req.user._id]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const createAPIKeyData = async (req: Request, res: Response) => {
|
||||
let apiKey, apiKeyData;
|
||||
try {
|
||||
const { name, expiresIn } = req.body;
|
||||
|
||||
const secret = crypto.randomBytes(16).toString('hex');
|
||||
const secretHash = await bcrypt.hash(secret, await getSaltRounds());
|
||||
|
||||
const expiresAt = new Date();
|
||||
expiresAt.setSeconds(expiresAt.getSeconds() + expiresIn);
|
||||
|
||||
apiKeyData = await new APIKeyData({
|
||||
name,
|
||||
lastUsed: new Date(),
|
||||
expiresAt,
|
||||
user: req.user._id,
|
||||
secretHash
|
||||
}).save();
|
||||
|
||||
// return api key data without sensitive data
|
||||
apiKeyData = await APIKeyData.findById(apiKeyData._id);
|
||||
|
||||
if (!apiKeyData) throw new Error('Failed to find API key data');
|
||||
|
||||
apiKey = `ak.${apiKeyData._id.toString()}.${secret}`;
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to API key data'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apiKey,
|
||||
apiKeyData
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete API key data with id [apiKeyDataId].
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteAPIKeyData = async (req: Request, res: Response) => {
|
||||
let apiKeyData;
|
||||
try {
|
||||
const { apiKeyDataId } = req.params;
|
||||
|
||||
apiKeyData = await APIKeyData.findByIdAndDelete(apiKeyDataId);
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete API key data'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apiKeyData
|
||||
});
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user