mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-20 22:32:28 +00:00
Compare commits
1 Commits
add-system
...
sso
Author | SHA1 | Date | |
---|---|---|---|
5ed3965c79 |
@ -1,10 +0,0 @@
|
||||
backend/node_modules
|
||||
frontend/node_modules
|
||||
backend/frontend-build
|
||||
**/node_modules
|
||||
**/.next
|
||||
.dockerignore
|
||||
.git
|
||||
README.md
|
||||
.dockerignore
|
||||
**/Dockerfile
|
130
.env.example
130
.env.example
@ -1,34 +1,48 @@
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
# Required keys for platform encryption/decryption ops
|
||||
PRIVATE_KEY=replace_with_nacl_sk
|
||||
PUBLIC_KEY=replace_with_nacl_pk
|
||||
ENCRYPTION_KEY=replace_with_lengthy_secure_hex
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
|
||||
JWT_SIGNUP_SECRET=replace_with_lengthy_secure_hex
|
||||
JWT_REFRESH_SECRET=replace_with_lengthy_secure_hex
|
||||
JWT_AUTH_SECRET=replace_with_lengthy_secure_hex
|
||||
|
||||
# Postgres creds
|
||||
POSTGRES_PASSWORD=infisical
|
||||
POSTGRES_USER=infisical
|
||||
POSTGRES_DB=infisical
|
||||
# JWT lifetime
|
||||
# Optional lifetimes for JWT tokens expressed in seconds or a string
|
||||
# describing a time span (e.g. 60, "2 days", "10h", "7d")
|
||||
JWT_AUTH_LIFETIME=
|
||||
JWT_REFRESH_LIFETIME=
|
||||
JWT_SERVICE_SECRET=
|
||||
JWT_SIGNUP_LIFETIME=
|
||||
|
||||
# Optional lifetimes for OTP expressed in seconds
|
||||
EMAIL_TOKEN_LIFETIME=
|
||||
|
||||
# MongoDB
|
||||
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
|
||||
# to the MongoDB container instance or Mongo Cloud
|
||||
# Required
|
||||
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://redis:6379
|
||||
# Optional credentials for MongoDB container instance and Mongo-Express
|
||||
MONGO_USERNAME=root
|
||||
MONGO_PASSWORD=example
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
|
||||
SITE_URL=http://localhost:8080
|
||||
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_FROM_ADDRESS=
|
||||
SMTP_FROM_NAME=
|
||||
SMTP_USERNAME=
|
||||
# Mail/SMTP
|
||||
# Required to send emails
|
||||
# By default, SMTP_HOST is set to smtp.gmail.com
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_NAME=Team
|
||||
SMTP_USERNAME=team@infisical.com
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Integration
|
||||
@ -36,22 +50,9 @@ SMTP_PASSWORD=
|
||||
CLIENT_ID_HEROKU=
|
||||
CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITHUB_APP=
|
||||
CLIENT_SLUG_GITHUB_APP=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITHUB_APP=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
CLIENT_PRIVATE_KEY_GITHUB_APP=
|
||||
CLIENT_APP_ID_GITHUB_APP=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
SENTRY_DSN=
|
||||
@ -60,63 +61,10 @@ SENTRY_DSN=
|
||||
# Ignore - Not applicable for self-hosted version
|
||||
POSTHOG_HOST=
|
||||
POSTHOG_PROJECT_API_KEY=
|
||||
|
||||
# SSO-specific variables
|
||||
CLIENT_ID_GOOGLE_LOGIN=
|
||||
CLIENT_SECRET_GOOGLE_LOGIN=
|
||||
|
||||
CLIENT_ID_GITHUB_LOGIN=
|
||||
CLIENT_SECRET_GITHUB_LOGIN=
|
||||
|
||||
CLIENT_ID_GITLAB_LOGIN=
|
||||
CLIENT_SECRET_GITLAB_LOGIN=
|
||||
|
||||
CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||
OTEL_EXPORT_TYPE=prometheus
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||
|
||||
# App Connections
|
||||
|
||||
# aws assume-role connection
|
||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
||||
|
||||
# github oauth connection
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
||||
|
||||
#github app connection
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
STRIPE_SECRET_KEY=
|
||||
STRIPE_PUBLISHABLE_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
STRIPE_PRODUCT_CARD_AUTH=
|
||||
STRIPE_PRODUCT_PRO=
|
||||
STRIPE_PRODUCT_STARTER=
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=
|
@ -1,2 +0,0 @@
|
||||
DB_CONNECTION_URI=
|
||||
AUDIT_LOGS_DB_CONNECTION_URI=
|
@ -1,4 +0,0 @@
|
||||
REDIS_URL=redis://localhost:6379
|
||||
DB_CONNECTION_URI=postgres://infisical:infisical@localhost/infisical?sslmode=disable
|
||||
AUTH_SECRET=4bnfe4e407b8921c104518903515b218
|
||||
ENCRYPTION_KEY=4bnfe4e407b8921c104518903515b218
|
@ -1,4 +1,3 @@
|
||||
node_modules
|
||||
built
|
||||
healthcheck.js
|
||||
tailwind.config.js
|
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -8,7 +8,7 @@ assignees: ''
|
||||
---
|
||||
|
||||
### Feature description
|
||||
A clear and concise description of what the feature should be.
|
||||
A clear and concise description of what the the feature should be.
|
||||
|
||||
### Why would it be useful?
|
||||
Why would this feature be useful for Infisical users?
|
||||
|
BIN
.github/images/Deploy to AWS.png
vendored
BIN
.github/images/Deploy to AWS.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 2.3 KiB |
BIN
.github/images/deploy-aws-button.png
vendored
BIN
.github/images/deploy-aws-button.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 19 KiB |
BIN
.github/images/deploy-to-aws.png
vendored
BIN
.github/images/deploy-to-aws.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 2.8 KiB |
BIN
.github/images/do-k8-install-btn.png
vendored
BIN
.github/images/do-k8-install-btn.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 28 KiB |
25
.github/pull_request_template.md
vendored
25
.github/pull_request_template.md
vendored
@ -1,25 +0,0 @@
|
||||
# Description 📣
|
||||
|
||||
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. Here's how we expect a pull request to be : https://infisical.com/docs/contributing/getting-started/pull-requests -->
|
||||
|
||||
## Type ✨
|
||||
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Improvement
|
||||
- [ ] Breaking change
|
||||
- [ ] Documentation
|
||||
|
||||
# Tests 🛠️
|
||||
|
||||
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration. You may want to add screenshots when relevant and possible -->
|
||||
|
||||
```sh
|
||||
# Here's some code block to paste some code snippets
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/getting-started/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/getting-started/code-of-conduct). 📝
|
||||
|
||||
<!-- If you have any questions regarding contribution, here's the FAQ : https://infisical.com/docs/contributing/getting-started/faq -->
|
190
.github/resources/changelog-generator.py
vendored
190
.github/resources/changelog-generator.py
vendored
@ -1,190 +0,0 @@
|
||||
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
|
||||
import os
|
||||
import requests
|
||||
import re
|
||||
from openai import OpenAI
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
import uuid
|
||||
|
||||
# Constants
|
||||
REPO_OWNER = "infisical"
|
||||
REPO_NAME = "infisical"
|
||||
TOKEN = os.environ["GITHUB_TOKEN"]
|
||||
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
|
||||
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
|
||||
SLACK_MSG_COLOR = "#36a64f"
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {TOKEN}",
|
||||
"Accept": "application/vnd.github+json",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
|
||||
|
||||
def set_multiline_output(name, value):
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
delimiter = uuid.uuid1()
|
||||
print(f'{name}<<{delimiter}', file=fh)
|
||||
print(value, file=fh)
|
||||
print(delimiter, file=fh)
|
||||
|
||||
def post_changelog_to_slack(changelog, tag):
|
||||
slack_payload = {
|
||||
"text": "Hey team, it's changelog time! :wave:",
|
||||
"attachments": [
|
||||
{
|
||||
"color": SLACK_MSG_COLOR,
|
||||
"title": f"🗓️Infisical Changelog - {tag}",
|
||||
"text": changelog,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Failed to post changelog to Slack.")
|
||||
|
||||
def find_previous_release_tag(release_tag:str):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
|
||||
while not(previous_tag.startswith("infisical/")):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
|
||||
return previous_tag
|
||||
|
||||
def get_tag_creation_date(tag_name):
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
commit_sha = response.json()['object']['sha']
|
||||
|
||||
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
|
||||
commit_response = requests.get(commit_url, headers=headers)
|
||||
commit_response.raise_for_status()
|
||||
creation_date = commit_response.json()['commit']['author']['date']
|
||||
|
||||
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
|
||||
|
||||
|
||||
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
|
||||
# Use GitHub API to fetch PRs merged between the commits
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Error fetching PRs from GitHub API!")
|
||||
|
||||
prs = []
|
||||
for pr in response.json():
|
||||
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
|
||||
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
|
||||
prs.append(pr)
|
||||
|
||||
return prs
|
||||
|
||||
|
||||
def extract_commit_details_from_prs(prs):
|
||||
commit_details = []
|
||||
for pr in prs:
|
||||
commit_message = pr["title"]
|
||||
commit_url = pr["html_url"]
|
||||
pr_number = pr["number"]
|
||||
branch_name = pr["head"]["ref"]
|
||||
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
|
||||
|
||||
# If no issue numbers are found, add the PR details without issue numbers and URLs
|
||||
if not issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": None,
|
||||
"issue_url": None,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
for issue in issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": issue,
|
||||
}
|
||||
)
|
||||
|
||||
return commit_details
|
||||
|
||||
# Function to generate changelog using OpenAI
|
||||
def generate_changelog_with_openai(commit_details):
|
||||
commit_messages = []
|
||||
for details in commit_details:
|
||||
base_message = f"{details['pr_url']} - {details['message']}"
|
||||
# Add the issue URL if available
|
||||
# if details["issue_url"]:
|
||||
# base_message += f" (Linear Issue: {details['issue_url']})"
|
||||
commit_messages.append(base_message)
|
||||
|
||||
commit_list = "\n".join(commit_messages)
|
||||
prompt = """
|
||||
Generate a changelog for Infisical, opensource secretops
|
||||
The changelog should:
|
||||
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
|
||||
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
|
||||
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
|
||||
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
|
||||
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
|
||||
6. Linear Links: note that the Linear link is optional, include it only if provided.
|
||||
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
|
||||
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
|
||||
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
|
||||
And here's an example of the full changelog:
|
||||
|
||||
*Features*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Fixes & Improvements*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Technical Updates*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
|
||||
Stay tuned for more exciting updates coming soon!
|
||||
And here are the commits:
|
||||
{}
|
||||
""".format(
|
||||
commit_list
|
||||
)
|
||||
|
||||
client = OpenAI(api_key=OPENAI_API_KEY)
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
|
||||
|
||||
if "error" in response.choices[0].message:
|
||||
raise Exception("Error generating changelog with OpenAI!")
|
||||
|
||||
return response.choices[0].message.content.strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
# Get the latest and previous release tags
|
||||
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
|
||||
previous_tag = find_previous_release_tag(latest_tag)
|
||||
|
||||
latest_tag_date = get_tag_creation_date(latest_tag)
|
||||
previous_tag_date = get_tag_creation_date(previous_tag)
|
||||
|
||||
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
|
||||
pr_details = extract_commit_details_from_prs(prs)
|
||||
|
||||
# Generate changelog
|
||||
changelog = generate_changelog_with_openai(pr_details)
|
||||
|
||||
post_changelog_to_slack(changelog,latest_tag)
|
||||
# Print or post changelog to Slack
|
||||
# set_multiline_output("changelog", changelog)
|
||||
|
||||
except Exception as e:
|
||||
print(str(e))
|
3
.github/resources/docker-compose.be-test.yml
vendored
3
.github/resources/docker-compose.be-test.yml
vendored
@ -6,14 +6,13 @@ services:
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
image: infisical/infisical:test
|
||||
image: infisical/backend:test
|
||||
command: npm run start
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- MONGO_URL=mongodb://test:example@mongo:27017/?authSource=admin
|
||||
- MONGO_USERNAME=test
|
||||
- MONGO_PASSWORD=example
|
||||
- ENCRYPTION_KEY=a984ecdf82ec779e55dbcc21303a900f
|
||||
networks:
|
||||
- infisical-test
|
||||
|
||||
|
26
.github/resources/rename_migration_files.py
vendored
26
.github/resources/rename_migration_files.py
vendored
@ -1,26 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def rename_migrations():
|
||||
migration_folder = "./backend/src/db/migrations"
|
||||
with open("added_files.txt", "r") as file:
|
||||
changed_files = file.readlines()
|
||||
|
||||
# Find the latest file among the changed files
|
||||
latest_timestamp = datetime.now() # utc time
|
||||
for file_path in changed_files:
|
||||
file_path = file_path.strip()
|
||||
# each new file bump by 1s
|
||||
latest_timestamp = latest_timestamp + timedelta(seconds=1)
|
||||
|
||||
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
|
||||
old_filename = os.path.join(migration_folder, file_path)
|
||||
os.rename(old_filename, new_filename)
|
||||
print(f"Renamed {old_filename} to {new_filename}")
|
||||
|
||||
if len(changed_files) == 0:
|
||||
print("No new files added to migration folder")
|
||||
|
||||
if __name__ == "__main__":
|
||||
rename_migrations()
|
||||
|
57
.github/values.yaml
vendored
57
.github/values.yaml
vendored
@ -1,57 +0,0 @@
|
||||
## @section Common parameters
|
||||
##
|
||||
|
||||
## @param nameOverride Override release name
|
||||
##
|
||||
nameOverride: ""
|
||||
## @param fullnameOverride Override release fullname
|
||||
##
|
||||
fullnameOverride: ""
|
||||
|
||||
## @section Infisical backend parameters
|
||||
## Documentation : https://infisical.com/docs/self-hosting/deployments/kubernetes
|
||||
##
|
||||
|
||||
infisical:
|
||||
autoDatabaseSchemaMigration: false
|
||||
|
||||
enabled: false
|
||||
|
||||
name: infisical
|
||||
replicaCount: 3
|
||||
image:
|
||||
repository: infisical/staging_infisical
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
|
||||
deploymentAnnotations:
|
||||
secrets.infisical.com/auto-reload: "true"
|
||||
|
||||
kubeSecretRef: "managed-secret"
|
||||
|
||||
ingress:
|
||||
## @param ingress.enabled Enable ingress
|
||||
##
|
||||
enabled: true
|
||||
## @param ingress.ingressClassName Ingress class name
|
||||
##
|
||||
ingressClassName: nginx
|
||||
## @param ingress.nginx.enabled Ingress controller
|
||||
##
|
||||
# nginx:
|
||||
# enabled: true
|
||||
## @param ingress.annotations Ingress annotations
|
||||
##
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt-prod"
|
||||
hostName: "gamma.infisical.com"
|
||||
tls:
|
||||
- secretName: letsencrypt-prod
|
||||
hosts:
|
||||
- gamma.infisical.com
|
||||
|
||||
postgresql:
|
||||
enabled: false
|
||||
|
||||
redis:
|
||||
enabled: false
|
41
.github/workflows/be-test-report.yml
vendored
41
.github/workflows/be-test-report.yml
vendored
@ -1,41 +0,0 @@
|
||||
name: "Backend Test Report"
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Check Backend Pull Request"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
be-report:
|
||||
name: Backend test report
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: 📁 Download test results
|
||||
id: download-artifact
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
name: be-test-results
|
||||
path: backend
|
||||
workflow: check-be-pull-request.yml
|
||||
workflow_conclusion: success
|
||||
- name: 📋 Publish test results
|
||||
uses: dorny/test-reporter@v1
|
||||
with:
|
||||
name: Test Results
|
||||
path: reports/jest-*.xml
|
||||
reporter: jest-junit
|
||||
working-directory: backend
|
||||
- name: 📋 Publish coverage
|
||||
uses: ArtiomTr/jest-coverage-report-action@v2
|
||||
id: coverage
|
||||
with:
|
||||
output: comment, report-markdown
|
||||
coverage-file: coverage/report.json
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
working-directory: backend
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
message: ${{ steps.coverage.outputs.report }}
|
189
.github/workflows/build-binaries.yml
vendored
189
.github/workflows/build-binaries.yml
vendored
@ -1,189 +0,0 @@
|
||||
name: Build Binaries and Deploy
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version number"
|
||||
required: true
|
||||
type: string
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./backend
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [x64, arm64]
|
||||
os: [linux, win]
|
||||
include:
|
||||
- os: linux
|
||||
target: node20-linux
|
||||
- os: win
|
||||
target: node20-win
|
||||
runs-on: ${{ (matrix.arch == 'arm64' && matrix.os == 'linux') && 'ubuntu24-arm64' || 'ubuntu-latest' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install pkg
|
||||
run: npm install -g @yao-pkg/pkg
|
||||
|
||||
- name: Install dependencies (backend)
|
||||
run: npm install
|
||||
|
||||
- name: Install dependencies (frontend)
|
||||
run: npm install --prefix ../frontend
|
||||
|
||||
- name: Prerequisites for pkg
|
||||
run: npm run binary:build
|
||||
|
||||
- name: Package into node binary
|
||||
run: |
|
||||
if [ "${{ matrix.os }}" != "linux" ]; then
|
||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
||||
else
|
||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
||||
fi
|
||||
|
||||
# Set up .deb package structure (Debian/Ubuntu only)
|
||||
- name: Set up .deb package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core/DEBIAN
|
||||
mkdir -p infisical-core/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core/usr/local/bin/
|
||||
chmod +x infisical-core/usr/local/bin/infisical-core
|
||||
|
||||
- name: Create control file
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core/DEBIAN/control
|
||||
Package: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Section: base
|
||||
Priority: optional
|
||||
Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }}
|
||||
Maintainer: Infisical <daniel@infisical.com>
|
||||
Description: Infisical Core standalone executable (app.infisical.com)
|
||||
EOF
|
||||
|
||||
# Build .deb file (Debian/Ubunutu only)
|
||||
- name: Build .deb package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
dpkg-deb --build infisical-core
|
||||
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
|
||||
|
||||
### RPM
|
||||
|
||||
# Set up .rpm package structure
|
||||
- name: Set up .rpm package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core-rpm/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
|
||||
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
|
||||
|
||||
# Install RPM build tools
|
||||
- name: Install RPM build tools
|
||||
if: matrix.os == 'linux'
|
||||
run: sudo apt-get update && sudo apt-get install -y rpm
|
||||
|
||||
# Create .spec file for RPM
|
||||
- name: Create .spec file for RPM
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core.spec
|
||||
|
||||
%global _enable_debug_package 0
|
||||
%global debug_package %{nil}
|
||||
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
|
||||
|
||||
Name: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Release: 1%{?dist}
|
||||
Summary: Infisical Core standalone executable
|
||||
License: Proprietary
|
||||
URL: https://app.infisical.com
|
||||
|
||||
%description
|
||||
Infisical Core standalone executable (app.infisical.com)
|
||||
|
||||
%install
|
||||
mkdir -p %{buildroot}/usr/local/bin
|
||||
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
|
||||
|
||||
%files
|
||||
/usr/local/bin/infisical-core
|
||||
|
||||
%pre
|
||||
|
||||
%post
|
||||
|
||||
%preun
|
||||
|
||||
%postun
|
||||
EOF
|
||||
|
||||
# Build .rpm file
|
||||
- name: Build .rpm package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
# Create necessary directories
|
||||
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
|
||||
# Copy the binary directly to SOURCES
|
||||
cp ./binary/infisical-core rpmbuild/SOURCES/
|
||||
|
||||
# Run rpmbuild with verbose output
|
||||
rpmbuild -vv -bb \
|
||||
--define "_topdir $(pwd)/rpmbuild" \
|
||||
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
|
||||
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
|
||||
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
|
||||
infisical-core.spec
|
||||
|
||||
# Try to find the RPM file
|
||||
find rpmbuild -name "*.rpm"
|
||||
|
||||
# Move the RPM file if found
|
||||
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
|
||||
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
|
||||
else
|
||||
echo "RPM file not found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x" # Specify the Python version you need
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade cloudsmith-cli
|
||||
|
||||
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
|
||||
- name: Publish to Cloudsmith (Debian/Ubuntu)
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
|
||||
|
||||
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
|
||||
- name: Publish .rpm to Cloudsmith
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
|
||||
|
||||
# Publish .exe file to Cloudsmith (Windows only)
|
||||
- name: Publish to Cloudsmith (Windows)
|
||||
if: matrix.os == 'win'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}
|
123
.github/workflows/build-docker-image-to-prod.yml
vendored
123
.github/workflows/build-docker-image-to-prod.yml
vendored
@ -1,123 +0,0 @@
|
||||
name: Release production images (frontend, backend)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
- "!infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/infisical:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
- name: 🧪 Test backend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-backend-test
|
||||
- name: ⏻ Shut down backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: backend
|
||||
tags: |
|
||||
infisical/backend:${{ steps.commit.outputs.short }}
|
||||
infisical/backend:latest
|
||||
infisical/backend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build frontend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
load: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
project: 64mmf0n610
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
- name: ⏻ Spawn frontend container
|
||||
run: |
|
||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
||||
- name: 🧪 Test frontend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
||||
- name: ⏻ Shut down frontend container
|
||||
run: |
|
||||
docker stop infisical-frontend-test
|
||||
- name: 🏗️ Build frontend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
push: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: frontend
|
||||
tags: |
|
||||
infisical/frontend:${{ steps.commit.outputs.short }}
|
||||
infisical/frontend:latest
|
||||
infisical/frontend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
38
.github/workflows/build-patroni-docker-img.yml
vendored
38
.github/workflows/build-patroni-docker-img.yml
vendored
@ -1,38 +0,0 @@
|
||||
name: Build patroni
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
patroni-image:
|
||||
name: Build patroni
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: 'zalando/patroni'
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
infisical/patroni:${{ steps.commit.outputs.short }}
|
||||
infisical/patroni:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
|
103
.github/workflows/check-api-for-breaking-changes.yml
vendored
103
.github/workflows/check-api-for-breaking-changes.yml
vendored
@ -1,103 +0,0 @@
|
||||
name: "Check API For Breaking Changes"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/src/server/routes/**"
|
||||
- "backend/src/ee/routes/**"
|
||||
|
||||
jobs:
|
||||
check-be-api-changes:
|
||||
name: Check API Changes
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Checkout source
|
||||
uses: actions/checkout@v3
|
||||
# - name: Setup Node 20
|
||||
# uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: "20"
|
||||
# uncomment this when testing locally using nektos/act
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 📦Build the latest image
|
||||
run: docker build --tag infisical-api .
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
JWT_AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.21.5"
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
done
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
41
.github/workflows/check-be-pull-request.yml
vendored
Normal file
41
.github/workflows/check-be-pull-request.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Check Backend Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, synchronize ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- '!backend/README.md'
|
||||
- '!backend/.*'
|
||||
- 'backend/.eslintrc.js'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
check-be-pr:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
-
|
||||
name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
-
|
||||
name: 📦 Install dependencies
|
||||
run: npm ci --only-production --ignore-scripts
|
||||
working-directory: backend
|
||||
# -
|
||||
# name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
-
|
||||
name: 🏗️ Run build
|
||||
run: npm run build
|
||||
working-directory: backend
|
35
.github/workflows/check-be-ts-and-lint.yml
vendored
35
.github/workflows/check-be-ts-and-lint.yml
vendored
@ -1,35 +0,0 @@
|
||||
name: "Check Backend PR types and lint"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "!backend/README.md"
|
||||
- "!backend/.*"
|
||||
- "backend/.eslintrc.js"
|
||||
|
||||
jobs:
|
||||
check-be-pr:
|
||||
name: Check TS and Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Run type check
|
||||
run: npm run type:check
|
||||
working-directory: backend
|
||||
- name: Run lint check
|
||||
run: npm run lint
|
||||
working-directory: backend
|
41
.github/workflows/check-fe-pull-request.yml
vendored
Normal file
41
.github/workflows/check-fe-pull-request.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Check Frontend Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, synchronize ]
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '!frontend/README.md'
|
||||
- '!frontend/.*'
|
||||
- 'frontend/.eslintrc.js'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
check-fe-pr:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
-
|
||||
name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
-
|
||||
name: 📦 Install dependencies
|
||||
run: npm ci --only-production --ignore-scripts
|
||||
working-directory: frontend
|
||||
# -
|
||||
# name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: frontend
|
||||
-
|
||||
name: 🏗️ Run build
|
||||
run: npm run build
|
||||
working-directory: frontend
|
35
.github/workflows/check-fe-ts-and-lint.yml
vendored
35
.github/workflows/check-fe-ts-and-lint.yml
vendored
@ -1,35 +0,0 @@
|
||||
name: Check Frontend Type and Lint check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "frontend/**"
|
||||
- "!frontend/README.md"
|
||||
- "!frontend/.*"
|
||||
- "frontend/.eslintrc.js"
|
||||
|
||||
jobs:
|
||||
check-fe-ts-lint:
|
||||
name: Check Frontend Type and Lint check
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Type check
|
||||
run: npm run type:check
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Link check
|
||||
run: npm run lint:fix
|
||||
working-directory: frontend
|
@ -1,25 +0,0 @@
|
||||
name: Check migration file edited
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check any migration files are modified, renamed or duplicated.
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
|
||||
if [ -s edited_files.txt ]; then
|
||||
echo "Exiting migration files cannot be modified."
|
||||
cat edited_files.txt
|
||||
exit 1
|
||||
fi
|
22
.github/workflows/close_inactive_issues.yml
vendored
Normal file
22
.github/workflows/close_inactive_issues.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: Close inactive issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v4
|
||||
with:
|
||||
days-before-issue-stale: 30
|
||||
days-before-issue-close: 14
|
||||
stale-issue-label: "stale"
|
||||
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
86
.github/workflows/docker-image.yml
vendored
Normal file
86
.github/workflows/docker-image.yml
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
name: Push to Docker Hub
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/backend:test
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
- name: 🧪 Test backend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-backend-test
|
||||
- name: ⏻ Shut down backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: backend
|
||||
tags: infisical/backend:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 📦 Build frontend and export to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
load: true
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
- name: ⏻ Spawn frontend container
|
||||
run: |
|
||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
||||
- name: 🧪 Test frontend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
||||
- name: ⏻ Shut down frontend container
|
||||
run: |
|
||||
docker stop infisical-frontend-test
|
||||
- name: 🏗️ Build frontend and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
context: frontend
|
||||
tags: infisical/frontend:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
34
.github/workflows/generate-release-changelog.yml
vendored
34
.github/workflows/generate-release-changelog.yml
vendored
@ -1,34 +0,0 @@
|
||||
name: Generate Changelog
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
generate_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-tags: true
|
||||
fetch-depth: 0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12.0"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests openai
|
||||
- name: Generate Changelog and Post to Slack
|
||||
id: gen-changelog
|
||||
run: python .github/resources/changelog-generator.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
@ -1,22 +0,0 @@
|
||||
name: Release Infisical Core Helm chart
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
22
.github/workflows/helm_chart_release.yml
vendored
Normal file
22
.github/workflows/helm_chart_release.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: Release Helm Charts
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-to-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,115 +0,0 @@
|
||||
name: Release standalone docker image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
||||
infisical-fips-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical-fips:latest-postgres
|
||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.fips.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
41
.github/workflows/release_build.yml
vendored
Normal file
41
.github/workflows/release_build.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Go releaser
|
||||
|
||||
on:
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: git fetch --force --tags
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '>=1.19.3'
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- uses: goreleaser/goreleaser-action@v2
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --rm-dist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
132
.github/workflows/release_build_infisical_cli.yml
vendored
132
.github/workflows/release_build_infisical_cli.yml
vendored
@ -1,132 +0,0 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,16 +1,10 @@
|
||||
name: Release image + Helm chart K8s Operator
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
name: Release Docker image for K8 operator
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
@ -32,21 +26,4 @@ jobs:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
tags: infisical/kubernetes-operator:latest
|
47
.github/workflows/run-backend-tests.yml
vendored
47
.github/workflows/run-backend-tests.yml
vendored
@ -1,47 +0,0 @@
|
||||
name: "Run backend tests"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "!backend/README.md"
|
||||
- "!backend/.*"
|
||||
- "backend/.eslintrc.js"
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
check-be-pr:
|
||||
name: Run integration test
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
55
.github/workflows/run-cli-tests.yml
vendored
55
.github/workflows/run-cli-tests.yml
vendored
@ -1,55 +0,0 @@
|
||||
name: Go CLI Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "cli/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
workflow_call:
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID:
|
||||
required: true
|
||||
CLI_TESTS_UA_CLIENT_SECRET:
|
||||
required: true
|
||||
CLI_TESTS_SERVICE_TOKEN:
|
||||
required: true
|
||||
CLI_TESTS_PROJECT_ID:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
CLI_TESTS_USER_EMAIL:
|
||||
required: true
|
||||
CLI_TESTS_USER_PASSWORD:
|
||||
required: true
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
|
||||
required: true
|
||||
jobs:
|
||||
test:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21.x"
|
||||
- name: Install dependencies
|
||||
run: go get .
|
||||
- name: Test with the Go CLI
|
||||
env:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
27
.gitignore
vendored
27
.gitignore
vendored
@ -1,12 +1,10 @@
|
||||
# backend
|
||||
node_modules
|
||||
.env
|
||||
.env.test
|
||||
.env.dev
|
||||
.env.gamma
|
||||
.env.prod
|
||||
.env.infisical
|
||||
.env.migration
|
||||
|
||||
*~
|
||||
*.swp
|
||||
*.swo
|
||||
@ -14,8 +12,6 @@ node_modules
|
||||
.DS_Store
|
||||
|
||||
/dist
|
||||
/completions/
|
||||
/manpages/
|
||||
|
||||
# frontend
|
||||
|
||||
@ -29,12 +25,10 @@ node_modules
|
||||
.env
|
||||
|
||||
# testing
|
||||
coverage
|
||||
reports
|
||||
junit.xml
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
.next/
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
@ -58,18 +52,3 @@ yarn-error.log*
|
||||
|
||||
# Infisical init
|
||||
.infisical.json
|
||||
|
||||
.infisicalignore
|
||||
|
||||
# Editor specific
|
||||
.vscode/*
|
||||
.idea/*
|
||||
|
||||
frontend-build
|
||||
|
||||
*.tgz
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
||||
/npm/bin
|
||||
|
210
.goreleaser.yaml
210
.goreleaser.yaml
@ -6,83 +6,49 @@
|
||||
# - cd cli && go mod tidy
|
||||
# # you may remove this if you don't need go generate
|
||||
# - cd cli && go generate ./...
|
||||
before:
|
||||
hooks:
|
||||
- ./cli/scripts/completions.sh
|
||||
- ./cli/scripts/manpages.sh
|
||||
|
||||
monorepo:
|
||||
tag_prefix: infisical-cli/
|
||||
dir: cli
|
||||
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=/home/runner/work/osxcross/target/bin/o64-clang
|
||||
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
|
||||
goos:
|
||||
- darwin
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
- id: all-other-builds
|
||||
env:
|
||||
- env:
|
||||
- CGO_ENABLED=0
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
id: infisical
|
||||
goos:
|
||||
- darwin
|
||||
- freebsd
|
||||
- linux
|
||||
- netbsd
|
||||
- openbsd
|
||||
- windows
|
||||
goarch:
|
||||
- "386"
|
||||
- 386
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
goarm:
|
||||
- "6"
|
||||
- "7"
|
||||
- 6
|
||||
- 7
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: "386"
|
||||
- goos: windows
|
||||
goarch: "386"
|
||||
- goos: freebsd
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
files:
|
||||
- ../README*
|
||||
- ../LICENSE*
|
||||
- ../manpages/*
|
||||
- ../completions/*
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
mode: "replace"
|
||||
mode: 'replace'
|
||||
|
||||
checksum:
|
||||
name_template: "checksums.txt"
|
||||
|
||||
name_template: 'checksums.txt'
|
||||
snapshot:
|
||||
name_template: "{{ .Version }}-devel"
|
||||
name_template: "{{ incpatch .Version }}"
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^test:'
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
@ -90,7 +56,6 @@ snapshot:
|
||||
# - infisical
|
||||
# dir: "{{ dir .ArtifactPath }}"
|
||||
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
|
||||
|
||||
brews:
|
||||
- name: infisical
|
||||
tap:
|
||||
@ -102,55 +67,22 @@ brews:
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
- name: "infisical@{{.Version}}"
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- infisical
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: Apache 2.0
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
scoop:
|
||||
bucket:
|
||||
owner: Infisical
|
||||
@ -160,64 +92,20 @@ scoop:
|
||||
email: ai@infisical.com
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
license: MIT
|
||||
|
||||
aurs:
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
maintainers:
|
||||
- Infisical, Inc <support@infisical.com>
|
||||
license: MIT
|
||||
private_key: "{{ .Env.AUR_KEY }}"
|
||||
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
package: |-
|
||||
# bin
|
||||
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
# license
|
||||
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
|
||||
# completions
|
||||
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
|
||||
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
dockers:
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:latest-amd64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/amd64"
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/arm64"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- name_template: "infisical/cli:latest"
|
||||
image_templates:
|
||||
- "infisical/cli:latest-amd64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
license: Apache-2.0
|
||||
# dockers:
|
||||
# - dockerfile: goreleaser.dockerfile
|
||||
# goos: linux
|
||||
# goarch: amd64
|
||||
# ids:
|
||||
# - infisical
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}"
|
||||
# - "infisical/cli:{{ .Major }}"
|
||||
# - "infisical/cli:latest"
|
||||
# build_flag_templates:
|
||||
# - "--label=org.label-schema.schema-version=1.0"
|
||||
# - "--label=org.label-schema.version={{.Version}}"
|
||||
# - "--label=org.label-schema.name={{.ProjectName}}"
|
||||
# - "--platform=linux/amd64"
|
@ -1,12 +1,5 @@
|
||||
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
# Check if infisical is installed
|
||||
if ! command -v infisical >/dev/null 2>&1; then
|
||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -1,10 +0,0 @@
|
||||
.github/resources/docker-compose.be-test.yml:generic-api-key:16
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
@ -2,6 +2,6 @@
|
||||
|
||||
Thanks for taking the time to contribute! 😃 🚀
|
||||
|
||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/getting-started/overview) for instructions on how to contribute.
|
||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/overview) for instructions on how to contribute.
|
||||
|
||||
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀
|
||||
|
@ -1,184 +0,0 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
USER non-root-user
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages including ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
@ -1,181 +0,0 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
USER non-root-user
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
wget \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN groupadd --system --gid 1001 nodejs \
|
||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
27
Makefile
27
Makefile
@ -5,31 +5,10 @@ push:
|
||||
docker-compose -f docker-compose.yml push
|
||||
|
||||
up-dev:
|
||||
docker compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
docker-compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
docker-compose -f docker-compose.yml up --build
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
|
||||
reviewable-ui:
|
||||
cd frontend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable-api:
|
||||
cd backend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
up-dev-sso:
|
||||
docker compose -f docker-compose.dev.yml --profile sso up --build
|
||||
docker-compose down
|
||||
|
393
README.md
393
README.md
@ -1,33 +1,30 @@
|
||||
<h1 align="center">
|
||||
<img width="300" src="/img/logoname-black.svg#gh-light-mode-only" alt="infisical">
|
||||
<img width="300" src="/img/logoname-white.svg#gh-dark-mode-only" alt="infisical">
|
||||
</h1>
|
||||
<p align="center">
|
||||
<p align="center"><b>The open-source secret management platform</b>: Sync secrets/configs across your team/infrastructure and prevent secret leaks.</p>
|
||||
<p align="center">Open-source, E2EE, simple tool to manage and sync environment variables across your team and infrastructure.</p>
|
||||
</p>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://infisical.com/slack">Slack</a> |
|
||||
<a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g">Slack</a> |
|
||||
<a href="https://infisical.com/">Infisical Cloud</a> |
|
||||
<a href="https://infisical.com/docs/self-hosting/overview">Self-Hosting</a> |
|
||||
<a href="https://infisical.com/docs/documentation/getting-started/introduction">Docs</a> |
|
||||
<a href="https://www.infisical.com">Website</a> |
|
||||
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
|
||||
<a href="https://infisical.com/docs/getting-started/introduction">Docs</a> |
|
||||
<a href="https://www.infisical.com">Website</a>
|
||||
</h4>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
|
||||
<a href="https://github.com/medusajs/medusa/blob/master/LICENSE">
|
||||
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Medusa is released under the MIT license." />
|
||||
</a>
|
||||
<a href="https://github.com/infisical/infisical/blob/main/CONTRIBUTING.md">
|
||||
<img src="https://img.shields.io/badge/PRs-Welcome-brightgreen" alt="PRs welcome!" />
|
||||
</a>
|
||||
<a href="https://github.com/Infisical/infisical/issues">
|
||||
<a href="">
|
||||
<img src="https://img.shields.io/github/commit-activity/m/infisical/infisical" alt="git commit activity" />
|
||||
</a>
|
||||
<a href="https://cloudsmith.io/~infisical/repos/">
|
||||
<img src="https://img.shields.io/badge/Downloads-6.95M-orange" alt="Cloudsmith downloads" />
|
||||
</a>
|
||||
<a href="https://infisical.com/slack">
|
||||
<a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g">
|
||||
<img src="https://img.shields.io/badge/chat-on%20Slack-blueviolet" alt="Slack community channel" />
|
||||
</a>
|
||||
<a href="https://twitter.com/infisical">
|
||||
@ -35,125 +32,289 @@
|
||||
</a>
|
||||
</h4>
|
||||
|
||||
<img src="/img/infisical_github_repo2.png" width="100%" alt="Dashboard" />
|
||||
<img src="/img/infisical_github_repo.png" width="100%" alt="Dashboard" />
|
||||
|
||||
## Introduction
|
||||
**[Infisical](https://infisical.com)** is an open source, E2EE tool to help teams manage and sync environment variables across their development workflow and infrastructure. It's designed to be simple and take minutes to get going.
|
||||
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
|
||||
- **[User-Friendly Dashboard](https://infisical.com/docs/getting-started/dashboard/project)** to manage your team's environment variables within projects
|
||||
- **[Language-Agnostic CLI](https://infisical.com/docs/cli/overview)** that pulls and injects environment variables into your local workflow
|
||||
- **[Complete control over your data](https://infisical.com/docs/self-hosting/overview)** - host it yourself on any infrastructure
|
||||
- **Navigate Multiple Environments** per project (e.g. development, staging, production, etc.)
|
||||
- **Personal/Shared** scoping for environment variables
|
||||
- **[Integrations](https://infisical.com/docs/integrations/overview)** with CI/CD and production infrastructure (Heroku available, more coming soon)
|
||||
- 🔜 **1-Click Deploy** to Digital Ocean and Heroku
|
||||
- 🔜 **Authentication/Authorization** for projects (read/write controls soon)
|
||||
- 🔜 **Automatic Secret Rotation**
|
||||
- 🔜 **2FA**
|
||||
- 🔜 **Access Logs**
|
||||
- 🔜 **Slack Integration & MS Teams** integrations
|
||||
|
||||
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
And more.
|
||||
|
||||
## Features
|
||||
## 🚀 Get started
|
||||
|
||||
### Secrets Management:
|
||||
To quickly get started, visit our [get started guide](https://infisical.com/docs/getting-started/introduction).
|
||||
|
||||
- **[Dashboard](https://infisical.com/docs/documentation/platform/project)**: Manage secrets across projects and environments (e.g. development, production, etc.) through a user-friendly interface.
|
||||
- **[Native Integrations](https://infisical.com/docs/integrations/overview)**: Sync secrets to platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and use tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)**: Keep track of every secret and project state; roll back when needed.
|
||||
- **[Secret Rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview)**: Rotate secrets at regular intervals for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/secret-rotation/postgres), [MySQL](https://infisical.com/docs/documentation/platform/secret-rotation/mysql), [AWS IAM](https://infisical.com/docs/documentation/platform/secret-rotation/aws-iam), and more.
|
||||
- **[Dynamic Secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview)**: Generate ephemeral secrets on-demand for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/postgresql), [MySQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/mysql), [RabbitMQ](https://infisical.com/docs/documentation/platform/dynamic-secrets/rabbit-mq), and more.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)**: Prevent secrets from leaking to git.
|
||||
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic.
|
||||
<p>
|
||||
<a href="https://infisical.com/docs/self-hosting/overview" target="_blank"><img src="https://user-images.githubusercontent.com/78047717/206356882-2b773eed-b0da-4725-ae2f-83e3cd7f2713.png" height=120 /> </a>
|
||||
<a href="https://www.youtube.com/watch?v=JS3OKYU2078" target="_blank"><img src="https://user-images.githubusercontent.com/78047717/206356600-8833b128-6cae-408c-a703-07b2fc6aff4b.png" height=120 /> </a>
|
||||
<a href="https://app.infisical.com/signup" target="_blank"><img src="https://user-images.githubusercontent.com/78047717/206355970-f4c09062-b88f-452a-94e0-9c61a0651170.png" height=120></a>
|
||||
</p>
|
||||
|
||||
### Infisical (Internal) PKI:
|
||||
## 🔥 What's cool about this?
|
||||
|
||||
- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates.
|
||||
- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL.
|
||||
- **[Alerting](https://infisical.com/docs/documentation/platform/pki/alerting)**: Configure alerting for expiring CA and end-entity certificates.
|
||||
- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal.
|
||||
- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol.
|
||||
Infisical makes secret management simple and end-to-end encrypted by default. We're on a mission to make it more accessible to all developers, <i>not just security teams</i>.
|
||||
|
||||
### Infisical Key Management System (KMS):
|
||||
According to a [report](https://www.ekransystem.com/en/blog/secrets-management) in 2019, only 10% of organizations use secret management solutions despite all using digital secrets to some extent.
|
||||
|
||||
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
||||
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
||||
If you care about efficiency and security, then Infisical is right for you.
|
||||
|
||||
### Infisical SSH
|
||||
We are currently working hard to make Infisical more extensive. Need any integrations or want a new feature? Feel free to [create an issue](https://github.com/Infisical/infisical/issues) or [contribute](https://infisical.com/docs/contributing/overview) directly to the repository.
|
||||
|
||||
- **[Signed SSH Certificates](https://infisical.com/docs/documentation/platform/ssh)**: Issue ephemeral SSH credentials for secure, short-lived, and centralized access to infrastructure.
|
||||
## 🌱 Contributing
|
||||
|
||||
### General Platform:
|
||||
|
||||
- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)).
|
||||
- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform.
|
||||
- **[Self-hosting](https://infisical.com/docs/self-hosting/overview)**: Deploy Infisical on-prem or cloud with ease; keep data on your own infrastructure.
|
||||
- **[Infisical SDK](https://infisical.com/docs/sdks/overview)**: Interact with Infisical via client SDKs ([Node](https://infisical.com/docs/sdks/languages/node), [Python](https://github.com/Infisical/python-sdk-official?tab=readme-ov-file#infisical-python-sdk), [Go](https://infisical.com/docs/sdks/languages/go), [Ruby](https://infisical.com/docs/sdks/languages/ruby), [Java](https://infisical.com/docs/sdks/languages/java), [.NET](https://infisical.com/docs/sdks/languages/csharp))
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)**: Interact with Infisical via CLI; useful for injecting secrets into local development and CI/CD pipelines.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)**: Interact with Infisical via API.
|
||||
|
||||
## Getting started
|
||||
|
||||
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
To set up and run Infisical locally, make sure you have Git and Docker installed on your system. Then run the command for your system:
|
||||
|
||||
Linux/macOS:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
```
|
||||
|
||||
Windows Command Prompt:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
```
|
||||
|
||||
Create an account at `http://localhost:80`
|
||||
|
||||
### Scan and prevent secret leaks
|
||||
|
||||
On top managing secrets with Infisical, you can also [scan for over 140+ secret types]() in your files, directories and git repositories.
|
||||
|
||||
To scan your full git history, run:
|
||||
|
||||
```
|
||||
infisical scan --verbose
|
||||
```
|
||||
|
||||
Install pre commit hook to scan each commit before you push to your repository
|
||||
|
||||
```
|
||||
infisical scan install --pre-commit-hook
|
||||
```
|
||||
|
||||
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
||||
|
||||
## Open-source vs. paid
|
||||
|
||||
This repo available under the [MIT expat license](https://github.com/Infisical/infisical/blob/main/LICENSE), with the exception of the `ee` directory which will contain premium enterprise features requiring a Infisical license.
|
||||
|
||||
If you are interested in managed Infisical Cloud of self-hosted Enterprise Offering, take a look at [our website](https://infisical.com/) or [book a meeting with us](https://infisical.cal.com/vlad/infisical-demo).
|
||||
|
||||
## Security
|
||||
|
||||
Please do not file GitHub issues or post on our public forum for security vulnerabilities, as they are public!
|
||||
|
||||
Infisical takes security issues very seriously. If you have any concerns about Infisical or believe you have uncovered a vulnerability, please get in touch via the e-mail address security@infisical.com. In the message, try to provide a description of the issue and ideally a way of reproducing it. The security team will get back to you as soon as possible.
|
||||
|
||||
Note that this security address should be used only for undisclosed vulnerabilities. Please report any security problems to us before disclosing it publicly.
|
||||
|
||||
## Contributing
|
||||
|
||||
Whether it's big or small, we love contributions. Check out our guide to see how to [get started](https://infisical.com/docs/contributing/getting-started).
|
||||
Whether it's big or small, we love contributions ❤️ Check out our guide to see how to [get started](https://infisical.com/docs/contributing/overview).
|
||||
|
||||
Not sure where to get started? You can:
|
||||
|
||||
- Join our <a href="https://infisical.com/slack">Slack</a>, and ask us any questions there.
|
||||
- [Book a free, non-pressure pairing sessions with one of our teammates](mailto:tony@infisical.com?subject=Pairing%20session&body=I'd%20like%20to%20do%20a%20pairing%20session!)!
|
||||
- Join our <a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g">Slack</a>, and ask us any questions there.
|
||||
|
||||
## Resources
|
||||
## 💚 Community & Support
|
||||
|
||||
- [Docs](https://infisical.com/docs/documentation/getting-started/introduction) for comprehensive documentation and guides
|
||||
- [Slack](https://infisical.com/slack) for discussion with the community and Infisical team.
|
||||
- [GitHub](https://github.com/Infisical/infisical) for code, issues, and pull requests
|
||||
- [Twitter](https://twitter.com/infisical) for fast news
|
||||
- [YouTube](https://www.youtube.com/@infisical_os) for videos on secret management
|
||||
- [Blog](https://infisical.com/blog) for secret management insights, articles, tutorials, and updates
|
||||
- [Slack](https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g) (For live discussion with the community and the Infisical team)
|
||||
- [GitHub Discussions](https://github.com/Infisical/infisical/discussions) (For help with building and deeper conversations about features)
|
||||
- [GitHub Issues](https://github.com/Infisical/infisical-cli/issues) (For any bugs and errors you encounter using Infisical)
|
||||
- [Twitter](https://twitter.com/infisical) (Get news fast)
|
||||
|
||||
## 🐥 Status
|
||||
|
||||
- [x] Public Alpha: Anyone can sign up over at [infisical.com](https://infisical.com) but go easy on us, there are kinks and we're just getting started.
|
||||
- [ ] Public Beta: Stable enough for most non-enterprise use-cases.
|
||||
- [ ] Public: Production-ready.
|
||||
|
||||
We're currently in Public Alpha.
|
||||
|
||||
## 🔌 Integrations
|
||||
|
||||
We're currently setting the foundation and building [integrations](https://infisical.com/docs/integrations/overview) so secrets can be synced everywhere. Any help is welcome! :)
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Platforms </th>
|
||||
<th>Frameworks</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/platforms/docker?ref=github.com">
|
||||
✔️ Docker
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/platforms/docker-compose?ref=github.com">
|
||||
✔️ Docker Compose
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/cloud/heroku?ref=github.com">
|
||||
✔️ Heroku
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/cloud/vercel?ref=github.com">
|
||||
✔️ Vercel
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/platforms/kubernetes?ref=github.com">
|
||||
✔️ Kubernetes
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Fly.io
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 AWS
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 GitHub Actions (https://github.com/Infisical/infisical/issues/54)
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Railway
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 GCP
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 GitLab CI/CD
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 CircleCI
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Jenkins
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Digital Ocean
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Azure
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 TravisCI
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Netlify (https://github.com/Infisical/infisical/issues/55)
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Railway
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Bitbucket
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Supabase
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Serverless
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/react?ref=github.com">
|
||||
✔️ React
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/express?ref=github.com">
|
||||
✔️ Express
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/gatsby?ref=github.com">
|
||||
✔️ Gatsby
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/flask?ref=github.com">
|
||||
✔️ Flask
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/django?ref=github.com">
|
||||
✔️ Django
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/laravel?ref=github.com">
|
||||
✔️ Laravel
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/nestjs?ref=github.com">
|
||||
✔️ NestJS
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/remix?ref=github.com">
|
||||
✔️ Remix
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/nextjs?ref=github.com">
|
||||
✔️ Next.js
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/vite?ref=github.com">
|
||||
✔️ Vite
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/rails?ref=github.com">
|
||||
✔️ Ruby on Rails
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/vue?ref=github.com">
|
||||
✔️ Vue
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/fiber?ref=github.com">
|
||||
✔️ Fiber
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/nuxt?ref=github.com">
|
||||
✔️ Nuxt
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## 🏘 Open-source vs. paid
|
||||
|
||||
This repo is entirely MIT licensed, with the exception of the `ee` directory which will contain premium enterprise features requiring a Infisical license in the future. We're currently focused on developing non-enterprise offerings first that should suit most use-cases.
|
||||
|
||||
## 🛡 Security
|
||||
|
||||
Looking to report a security vulnerability? Please don't post about it in GitHub issue. Instead, refer to our [SECURITY.md](./SECURITY.md) file.
|
||||
|
||||
## 🚨 Stay Up-to-Date
|
||||
|
||||
Infisical officially launched as v.1.0 on November 21st, 2022. However, a lot of new features are coming very quickly. Watch **releases** of this repository to be notified about future updates:
|
||||
|
||||

|
||||
|
||||
## 🦸 Contributors
|
||||
|
||||
[//]: contributor-faces
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
|
||||
<a href="https://github.com/dangtony98"><img src="https://avatars.githubusercontent.com/u/25857006?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/mv-turtle"><img src="https://avatars.githubusercontent.com/u/78047717?s=96&v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/maidul98"><img src="https://avatars.githubusercontent.com/u/9300960?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/gangjun06"><img src="https://avatars.githubusercontent.com/u/50910815?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/reginaldbondoc"><img src="https://avatars.githubusercontent.com/u/7693108?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/SH5H"><img src="https://avatars.githubusercontent.com/u/25437192?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/asharonbaltazar"><img src="https://avatars.githubusercontent.com/u/58940073?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/edgarrmondragon"><img src="https://avatars.githubusercontent.com/u/16805946?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/arjunyel"><img src="https://avatars.githubusercontent.com/u/11153289?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/LemmyMwaura"><img src="https://avatars.githubusercontent.com/u/20738858?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/adrianmarinwork"><img src="https://avatars.githubusercontent.com/u/118568289?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/hanywang2"><img src="https://avatars.githubusercontent.com/u/44352119?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/tobias-mintlify"><img src="https://avatars.githubusercontent.com/u/110702161?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/0xflotus"><img src="https://avatars.githubusercontent.com/u/26602940?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/wanjohiryan"><img src="https://avatars.githubusercontent.com/u/71614375?v=4" width="50" height="50" alt=""/></a>
|
||||
|
10
SECURITY.md
10
SECURITY.md
@ -1,13 +1,9 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported versions
|
||||
## Supported Versions
|
||||
|
||||
We always recommend using the latest version of Infisical to ensure you get all security updates.
|
||||
|
||||
## Reporting vulnerabilities
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please do not file GitHub issues or post on our public forum for security vulnerabilities, as they are public!
|
||||
|
||||
Infisical takes security issues very seriously. If you have any concerns about Infisical or believe you have uncovered a vulnerability, please get in touch via the e-mail address security@infisical.com. In the message, try to provide a description of the issue and ideally a way of reproducing it. The security team will get back to you as soon as possible.
|
||||
|
||||
Note that this security address should be used only for undisclosed vulnerabilities. Please report any security problems to us before disclosing it publicly.
|
||||
Please report security vulnerabilities or concerns to team@infisical.com.
|
||||
|
@ -1,3 +1,2 @@
|
||||
vitest-environment-infisical.ts
|
||||
vitest.config.ts
|
||||
vitest.e2e.config.ts
|
||||
node_modules
|
||||
built
|
12
backend/.eslintrc
Normal file
12
backend/.eslintrc
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"no-console": 2
|
||||
}
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
/* eslint-env node */
|
||||
module.exports = {
|
||||
env: {
|
||||
es6: true,
|
||||
node: true
|
||||
},
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@typescript-eslint/recommended-type-checked",
|
||||
"airbnb-base",
|
||||
"airbnb-typescript/base",
|
||||
"plugin:prettier/recommended",
|
||||
"prettier"
|
||||
],
|
||||
plugins: ["@typescript-eslint", "simple-import-sort", "import"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
project: true,
|
||||
sourceType: "module",
|
||||
tsconfigRootDir: __dirname
|
||||
},
|
||||
root: true,
|
||||
overrides: [
|
||||
{
|
||||
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-argument": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-unsafe-call": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
rules: {
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"@typescript-eslint/no-unsafe-enum-comparison": "off",
|
||||
"no-void": "off",
|
||||
"consistent-return": "off", // my style
|
||||
"import/order": "off", // for simple-import-order
|
||||
"import/prefer-default-export": "off", // why
|
||||
"no-restricted-syntax": "off",
|
||||
// importing rules
|
||||
"simple-import-sort/exports": "error",
|
||||
"import/first": "error",
|
||||
"import/newline-after-import": "error",
|
||||
"import/no-duplicates": "error",
|
||||
"simple-import-sort/imports": [
|
||||
"warn",
|
||||
{
|
||||
groups: [
|
||||
// Side effect imports.
|
||||
["^\\u0000"],
|
||||
// Node.js builtins prefixed with `node:`.
|
||||
["^node:"],
|
||||
// Packages.
|
||||
// Things that start with a letter (or digit or underscore), or `@` followed by a letter.
|
||||
["^@?\\w"],
|
||||
["^@app"],
|
||||
["@lib"],
|
||||
["@server"],
|
||||
// Absolute imports and other imports such as Vue-style `@/foo`.
|
||||
// Anything not matched in another group.
|
||||
["^"],
|
||||
// Relative imports.
|
||||
// Anything that starts with a dot.
|
||||
["^\\."]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
1
backend/.gitignore
vendored
1
backend/.gitignore
vendored
@ -1 +0,0 @@
|
||||
dist
|
@ -1,7 +0,0 @@
|
||||
{
|
||||
"singleQuote": false,
|
||||
"printWidth": 120,
|
||||
"trailingComma": "none",
|
||||
"tabWidth": 2,
|
||||
"semi": true
|
||||
}
|
@ -1,66 +1,15 @@
|
||||
# Build stage
|
||||
FROM node:20-slim AS build
|
||||
FROM node:16-bullseye-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client
|
||||
COPY package.json package-lock.json ./
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-slim
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
EXPOSE 4000
|
||||
|
||||
CMD ["npm", "start"]
|
||||
CMD ["npm", "run", "start"]
|
||||
|
@ -1,72 +0,0 @@
|
||||
FROM node:20-slim
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
ARG SOFTHSM2_VERSION=2.5.0
|
||||
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
&& sh autogen.sh \
|
||||
&& ./configure --prefix=/usr/local --disable-gost \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package.json
|
||||
COPY package-lock.json package-lock.json
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
CMD ["npm", "run", "dev:docker"]
|
@ -1,4 +0,0 @@
|
||||
{
|
||||
"presets": ["@babel/preset-env", "@babel/preset-react"],
|
||||
"plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"]
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
|
||||
return {
|
||||
setItem: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
setItemWithExpiry: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
deleteItem: async (key) => {
|
||||
delete store[key];
|
||||
return 1;
|
||||
},
|
||||
getItem: async (key) => {
|
||||
const value = store[key];
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
};
|
||||
};
|
@ -1,31 +0,0 @@
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
export const mockQueue = (): TQueueServiceFactory => {
|
||||
const queues: Record<string, unknown> = {};
|
||||
const workers: Record<string, unknown> = {};
|
||||
const job: Record<string, unknown> = {};
|
||||
const events: Record<string, unknown> = {};
|
||||
|
||||
return {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
startPg: async () => {},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
getRepeatableJobs: async () => [],
|
||||
clearQueue: async () => {},
|
||||
stopJobById: async () => {},
|
||||
stopRepeatableJobByJobId: async () => true,
|
||||
stopRepeatableJobByKey: async () => true
|
||||
};
|
||||
};
|
@ -1,13 +0,0 @@
|
||||
import { TSmtpSendMail, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
export const mockSmtpServer = (): TSmtpService => {
|
||||
const storage: TSmtpSendMail[] = [];
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
},
|
||||
verify: async () => {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
@ -1,71 +0,0 @@
|
||||
import { OrgMembershipRole } from "@app/db/schemas";
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
export const createIdentity = async (name: string, role: string) => {
|
||||
const createIdentityRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v1/identities",
|
||||
body: {
|
||||
name,
|
||||
role,
|
||||
organizationId: seedData1.organization.id
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(createIdentityRes.statusCode).toBe(200);
|
||||
return createIdentityRes.json().identity;
|
||||
};
|
||||
|
||||
export const deleteIdentity = async (id: string) => {
|
||||
const deleteIdentityRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/identities/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(deleteIdentityRes.statusCode).toBe(200);
|
||||
return deleteIdentityRes.json().identity;
|
||||
};
|
||||
|
||||
describe("Identity v1", async () => {
|
||||
test("Create identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
|
||||
test("Update identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
|
||||
const updatedIdentity = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/identities/${newIdentity.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: "updated-mac-1",
|
||||
role: OrgMembershipRole.Member
|
||||
}
|
||||
});
|
||||
|
||||
expect(updatedIdentity.statusCode).toBe(200);
|
||||
expect(updatedIdentity.json().identity.name).toBe("updated-mac-1");
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
|
||||
test("Delete Identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
|
||||
const deletedIdentity = await deleteIdentity(newIdentity.id);
|
||||
expect(deletedIdentity.name).toBe("mac1");
|
||||
});
|
||||
});
|
@ -1,44 +0,0 @@
|
||||
import jsrp from "jsrp";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Login V1 Router", async () => {
|
||||
// eslint-disable-next-line
|
||||
const client = new jsrp.client();
|
||||
await new Promise((resolve) => {
|
||||
client.init({ username: seedData1.email, password: seedData1.password }, () => resolve(null));
|
||||
});
|
||||
let clientProof: string;
|
||||
|
||||
test("Login first phase", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v3/auth/login1",
|
||||
body: {
|
||||
email: "test@localhost.local",
|
||||
clientPublicKey: client.getPublicKey()
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("serverPublicKey");
|
||||
expect(payload).toHaveProperty("salt");
|
||||
client.setSalt(payload.salt);
|
||||
client.setServerPublicKey(payload.serverPublicKey);
|
||||
clientProof = client.getProof(); // called M1
|
||||
});
|
||||
|
||||
test("Login second phase", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v3/auth/login2",
|
||||
body: {
|
||||
email: seedData1.email,
|
||||
clientProof
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("token");
|
||||
});
|
||||
});
|
@ -1,19 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Org V1 Router", async () => {
|
||||
test("GET Org list", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v1/organization",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("organizations");
|
||||
expect(payload).toEqual({
|
||||
organizations: [expect.objectContaining({ name: seedData1.organization.name })]
|
||||
});
|
||||
});
|
||||
});
|
@ -1,132 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { DEFAULT_PROJECT_ENVS } from "@app/db/seeds/3-project";
|
||||
|
||||
const createProjectEnvironment = async (name: string, slug: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name,
|
||||
slug
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
return payload.environment;
|
||||
};
|
||||
|
||||
const deleteProjectEnvironment = async (envId: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${envId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
return payload.environment;
|
||||
};
|
||||
|
||||
describe("Project Environment Router", async () => {
|
||||
test("Get default environments", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("workspace");
|
||||
// check for default environments
|
||||
expect(payload).toEqual({
|
||||
workspace: expect.objectContaining({
|
||||
name: seedData1.project.name,
|
||||
id: seedData1.project.id,
|
||||
slug: seedData1.project.slug,
|
||||
environments: expect.arrayContaining([
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
|
||||
])
|
||||
})
|
||||
});
|
||||
// ensure only two default environments exist
|
||||
expect(payload.workspace.environments.length).toBe(3);
|
||||
});
|
||||
|
||||
const mockProjectEnv = { name: "temp", slug: "temp" }; // id will be filled in create op
|
||||
test("Create environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
expect(newEnvironment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
projectId: seedData1.project.id,
|
||||
position: DEFAULT_PROJECT_ENVS.length + 1,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteProjectEnvironment(newEnvironment.id);
|
||||
});
|
||||
|
||||
test("Update environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
const updatedName = { name: "temp#2", slug: "temp2" };
|
||||
const res = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${newEnvironment.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: updatedName.name,
|
||||
slug: updatedName.slug,
|
||||
position: 1
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
expect(payload.environment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: newEnvironment.id,
|
||||
name: updatedName.name,
|
||||
slug: updatedName.slug,
|
||||
projectId: seedData1.project.id,
|
||||
position: 1,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteProjectEnvironment(newEnvironment.id);
|
||||
});
|
||||
|
||||
test("Delete environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
const deletedProjectEnvironment = await deleteProjectEnvironment(newEnvironment.id);
|
||||
expect(deletedProjectEnvironment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: deletedProjectEnvironment.id,
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
position: 5,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
@ -1,36 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [{id:seedData1.id, type: ApproverType.User}],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@ -1,165 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createFolder = async (dto: { path: string; name: string }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
path: dto.path
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder;
|
||||
};
|
||||
|
||||
const deleteFolder = async (dto: { path: string; id: string }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: dto.path
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder;
|
||||
};
|
||||
|
||||
describe("Secret Folder Router", async () => {
|
||||
test.each([
|
||||
{ name: "folder1", path: "/" }, // one in root
|
||||
{ name: "folder1", path: "/level1/level2" }, // then create a deep one creating intermediate ones
|
||||
{ name: "folder2", path: "/" },
|
||||
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
|
||||
])("Create folder $name in $path", async ({ name, path }) => {
|
||||
const createdFolder = await createFolder({ path, name });
|
||||
// check for default environments
|
||||
expect(createdFolder).toEqual(
|
||||
expect.objectContaining({
|
||||
name,
|
||||
id: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteFolder({ path, id: createdFolder.id });
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
path: "/",
|
||||
expected: {
|
||||
folders: [{ name: "folder1" }, { name: "level1" }, { name: "folder2" }],
|
||||
length: 3
|
||||
}
|
||||
},
|
||||
{ path: "/level1/level2", expected: { folders: [{ name: "folder1" }], length: 1 } }
|
||||
])("Get folders $path", async ({ path, expected }) => {
|
||||
const newFolders = await Promise.all(expected.folders.map(({ name }) => createFolder({ name, path })));
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folders");
|
||||
expect(payload.folders.length >= expected.folders.length).toBeTruthy();
|
||||
expect(payload).toEqual({
|
||||
folders: expect.arrayContaining(expected.folders.map((el) => expect.objectContaining(el)))
|
||||
});
|
||||
|
||||
await Promise.all(newFolders.map(({ id }) => deleteFolder({ path, id })));
|
||||
});
|
||||
|
||||
test("Update a deep folder", async () => {
|
||||
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
|
||||
expect(newFolder).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: "folder-updated"
|
||||
})
|
||||
);
|
||||
|
||||
const resUpdatedFolders = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(resUpdatedFolders.statusCode).toBe(200);
|
||||
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
|
||||
expect(updatedFolderList).toHaveProperty("folders");
|
||||
expect(updatedFolderList.folders[0].name).toEqual("folder-updated");
|
||||
|
||||
await deleteFolder({ path: "/level1/level2", id: newFolder.id });
|
||||
});
|
||||
|
||||
test("Delete a deep folder", async () => {
|
||||
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${newFolder.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folder");
|
||||
expect(payload.folder).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: "folder-updated"
|
||||
})
|
||||
);
|
||||
|
||||
const resUpdatedFolders = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(resUpdatedFolders.statusCode).toBe(200);
|
||||
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
|
||||
expect(updatedFolderList).toHaveProperty("folders");
|
||||
expect(updatedFolderList.folders.length).toEqual(0);
|
||||
});
|
||||
});
|
@ -1,808 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: importEnv,
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
expect(payload.secretImports.length).toBe(2);
|
||||
expect(payload.secretImports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/secret-imports/${createdImport1.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
position: 2
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(updateImportRes.statusCode).toBe(200);
|
||||
const payload = JSON.parse(updateImportRes.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
// check for default environments
|
||||
expect(payload.secretImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
position: 2,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.stringMatching(prodImportDetails.envSlug),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
const secretImportsListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(secretImportsListRes.statusCode).toBe(200);
|
||||
const secretImportList = JSON.parse(secretImportsListRes.payload);
|
||||
expect(secretImportList).toHaveProperty("secretImports");
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
const secretImportsListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(secretImportsListRes.statusCode).toBe(200);
|
||||
const secretImportList = JSON.parse(secretImportsListRes.payload);
|
||||
expect(secretImportList).toHaveProperty("secretImports");
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
@ -1,406 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
@ -1,9 +0,0 @@
|
||||
describe("Status V1 Router", async () => {
|
||||
test("Simple check", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/status"
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
});
|
||||
});
|
@ -1,579 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { SecretType, TSecrets } from "@app/db/schemas";
|
||||
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
|
||||
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
|
||||
const createServiceToken = async (
|
||||
scopes: { environment: string; secretPath: string }[],
|
||||
permissions: ("read" | "write")[]
|
||||
) => {
|
||||
const projectKeyRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
const projectKeyEnc = JSON.parse(projectKeyRes.payload);
|
||||
|
||||
const userInfoRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v2/users/me",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
const { user: userInfo } = JSON.parse(userInfoRes.payload);
|
||||
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
|
||||
const projectKey = decryptAsymmetric({
|
||||
ciphertext: projectKeyEnc.encryptedKey,
|
||||
nonce: projectKeyEnc.nonce,
|
||||
publicKey: projectKeyEnc.sender.publicKey,
|
||||
privateKey
|
||||
});
|
||||
|
||||
const randomBytes = crypto.randomBytes(16).toString("hex");
|
||||
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
|
||||
const serviceTokenRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v2/service-token",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: "test-token",
|
||||
workspaceId: seedData1.project.id,
|
||||
scopes,
|
||||
encryptedKey: ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
permissions,
|
||||
expiresIn: null
|
||||
}
|
||||
});
|
||||
expect(serviceTokenRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenRes.json();
|
||||
expect(serviceTokenInfo).toHaveProperty("serviceToken");
|
||||
expect(serviceTokenInfo).toHaveProperty("serviceTokenData");
|
||||
return `${serviceTokenInfo.serviceToken}.${randomBytes}`;
|
||||
};
|
||||
|
||||
const deleteServiceToken = async () => {
|
||||
const serviceTokenListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/service-token-data`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(serviceTokenListRes.statusCode).toBe(200);
|
||||
const serviceTokens = JSON.parse(serviceTokenListRes.payload).serviceTokenData as { name: string; id: string }[];
|
||||
expect(serviceTokens.length).toBeGreaterThan(0);
|
||||
const serviceTokenInfo = serviceTokens.find(({ name }) => name === "test-token");
|
||||
expect(serviceTokenInfo).toBeDefined();
|
||||
|
||||
const deleteTokenRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v2/service-token/${serviceTokenInfo?.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(deleteTokenRes.statusCode).toBe(200);
|
||||
};
|
||||
|
||||
const createSecret = async (dto: {
|
||||
projectKey: string;
|
||||
path: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment: string;
|
||||
type?: SecretType;
|
||||
token: string;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.path,
|
||||
...encryptSecret(dto.projectKey, dto.key, dto.value, dto.comment)
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.token}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret;
|
||||
};
|
||||
|
||||
const deleteSecret = async (dto: { path: string; key: string; token: string }) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.token}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: dto.path
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret;
|
||||
};
|
||||
|
||||
describe("Service token secret ops", async () => {
|
||||
let serviceToken = "";
|
||||
let projectKey = "";
|
||||
let folderId = "";
|
||||
beforeAll(async () => {
|
||||
serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/**", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
|
||||
// this is ensure cli service token decryptiong working fine
|
||||
const serviceTokenInfoRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v2/service-token",
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(serviceTokenInfoRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenInfoRes.json();
|
||||
const serviceTokenParts = serviceToken.split(".");
|
||||
projectKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
key: serviceTokenParts[3],
|
||||
tag: serviceTokenInfo.tag,
|
||||
ciphertext: serviceTokenInfo.encryptedKey,
|
||||
iv: serviceTokenInfo.iv
|
||||
});
|
||||
|
||||
// create a deep folder
|
||||
const folderCreate = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: "folder",
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(folderCreate.statusCode).toBe(200);
|
||||
folderId = folderCreate.json().folder.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await deleteServiceToken();
|
||||
|
||||
// create a deep folder
|
||||
const deleteFolder = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${folderId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(deleteFolder.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
const testSecrets = [
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "ST-SEC",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "NESTED-ST-SEC",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
const getSecrets = async (environment: string, secretPath = "/") => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath,
|
||||
environment,
|
||||
workspaceId: seedData1.project.id
|
||||
}
|
||||
});
|
||||
const secrets: TSecrets[] = JSON.parse(res.payload).secrets || [];
|
||||
return secrets.map((el) => ({ ...decryptSecret(projectKey, el), type: el.type }));
|
||||
};
|
||||
|
||||
test.each(testSecrets)("Create secret in path $path", async ({ secret, path }) => {
|
||||
const createdSecret = await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const decryptedSecret = decryptSecret(projectKey, createdSecret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual(secret.value);
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
expect(decryptedSecret.version).toEqual(1);
|
||||
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Get secret by name in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
|
||||
const getSecByNameRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath: path,
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug
|
||||
}
|
||||
});
|
||||
expect(getSecByNameRes.statusCode).toBe(200);
|
||||
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
|
||||
expect(getSecretByNamePayload).toHaveProperty("secret");
|
||||
const decryptedSecret = decryptSecret(projectKey, getSecretByNamePayload.secret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual(secret.value);
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Update secret in path $path", async ({ path, secret }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const updateSecretReqBody = {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: path,
|
||||
...encryptSecret(projectKey, secret.key, "new-value", secret.comment)
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
const decryptedSecret = decryptSecret(projectKey, updatedSecretPayload.secret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual("new-value");
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
|
||||
// list secret should have updated value
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: "new-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Delete secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const deletedSecret = await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
const decryptedSecret = decryptSecret(projectKey, deletedSecret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk create secrets in path $path", async ({ secret, path }) => {
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(200);
|
||||
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
|
||||
expect(createSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.key}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
deleteSecret({ path, token: serviceToken, key: `BULK-${secret.key}-${i + 1}` })
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, ...secret, key: `BULK-${secret.key}-1`, path, token: serviceToken });
|
||||
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(400);
|
||||
|
||||
await deleteSecret({ path, key: `BULK-${secret.key}-1`, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk update secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
|
||||
)
|
||||
);
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, "update-value", secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.key}-${i + 1}`,
|
||||
value: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}`, token: serviceToken })
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
|
||||
)
|
||||
);
|
||||
|
||||
const deletedSharedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
expect(deletedSharedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.value}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Service token fail cases", async () => {
|
||||
test("Unauthorized secret path access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/nested/deep"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
|
||||
test("Unauthorized secret environment access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: "prod",
|
||||
secretPath: "/"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
|
||||
test("Unauthorized write operation", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read"]
|
||||
);
|
||||
const writeSecrets = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/NEW`,
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: "/",
|
||||
// doesn't matter project key because this will fail before that due to read only access
|
||||
...encryptSecret(crypto.randomBytes(16).toString("hex"), "NEW", "value", "")
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(writeSecrets.statusCode).toBe(403);
|
||||
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
||||
|
||||
// but read access should still work fine
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(200);
|
||||
await deleteServiceToken();
|
||||
});
|
||||
});
|
@ -1,86 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret Recursive Testing", async () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
const folderAndSecretNames = [
|
||||
{ name: "deep1", path: "/", expectedSecretCount: 4 },
|
||||
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
|
||||
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
|
||||
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
|
||||
];
|
||||
|
||||
beforeAll(async () => {
|
||||
const rootFolderIds: string[] = [];
|
||||
for (const folder of folderAndSecretNames) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const createdFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: folder.path,
|
||||
name: folder.name
|
||||
});
|
||||
|
||||
if (folder.path === "/") {
|
||||
rootFolderIds.push(createdFolder.id);
|
||||
}
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2({
|
||||
secretPath: folder.path,
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
key: folder.name,
|
||||
value: folder.name
|
||||
});
|
||||
}
|
||||
|
||||
return async () => {
|
||||
await Promise.all(
|
||||
rootFolderIds.map((id) =>
|
||||
deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
key: folderAndSecretNames[0].name
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
|
||||
const secrets = await getSecretsV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: path,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
recursive: true
|
||||
});
|
||||
|
||||
expect(secrets.secrets.length).toEqual(expectedSecretCount);
|
||||
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
|
||||
folderAndSecretNames
|
||||
.filter((el) => el.path.startsWith(path))
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.name,
|
||||
secretValue: el.name
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
@ -1,344 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@ -1,678 +0,0 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.path,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
const deleteSecret = async (dto: { path: string; key: string }) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: dto.path
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }])(
|
||||
"Secret V2 Architecture - $auth mode",
|
||||
async ({ auth }) => {
|
||||
let folderId = "";
|
||||
let authToken = "";
|
||||
const secretTestCases = [
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "SEC1",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "NESTED-SEC1",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "secret-key-2",
|
||||
value: `-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
|
||||
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
|
||||
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
|
||||
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
|
||||
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
|
||||
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
|
||||
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
|
||||
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
|
||||
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
|
||||
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
|
||||
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
|
||||
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
|
||||
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
|
||||
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
|
||||
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
|
||||
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
|
||||
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
|
||||
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
|
||||
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
|
||||
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
|
||||
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
|
||||
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
|
||||
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
|
||||
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
|
||||
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
|
||||
omQDpP86RX/hIIQ+JyLSaWYa
|
||||
-----END PRIVATE KEY-----`,
|
||||
comment:
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "secret-key-3",
|
||||
value: `-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
|
||||
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
|
||||
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
|
||||
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
|
||||
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
|
||||
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
|
||||
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
|
||||
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
|
||||
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
|
||||
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
|
||||
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
|
||||
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
|
||||
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
|
||||
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
|
||||
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
|
||||
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
|
||||
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
|
||||
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
|
||||
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
|
||||
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
|
||||
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
|
||||
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
|
||||
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
|
||||
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
|
||||
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
|
||||
omQDpP86RX/hIIQ+JyLSaWYa
|
||||
-----END PRIVATE KEY-----`,
|
||||
comment:
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "secret-key-3",
|
||||
value:
|
||||
"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGRvIGVpdXNtb2QgdGVtcG9yIGluY2lkaWR1bnQgdXQgbGFib3JlIGV0IGRvbG9yZSBtYWduYSBhbGlxdWEuIFV0IGVuaW0gYWQgbWluaW0gdmVuaWFtLCBxdWlzIG5vc3RydWQgZXhlcmNpdGF0aW9uCg==",
|
||||
comment: ""
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
beforeAll(async () => {
|
||||
if (auth === AuthMode.JWT) {
|
||||
authToken = jwtAuthToken;
|
||||
} else if (auth === AuthMode.IDENTITY_ACCESS_TOKEN) {
|
||||
const identityLogin = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v1/auth/universal-auth/login",
|
||||
body: {
|
||||
clientSecret: seedData1.machineIdentity.clientCredentials.secret,
|
||||
clientId: seedData1.machineIdentity.clientCredentials.id
|
||||
}
|
||||
});
|
||||
expect(identityLogin.statusCode).toBe(200);
|
||||
authToken = identityLogin.json().accessToken;
|
||||
}
|
||||
// create a deep folder
|
||||
const folderCreate = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: "folder",
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(folderCreate.statusCode).toBe(200);
|
||||
folderId = folderCreate.json().folder.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
const deleteFolder = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${folderId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(deleteFolder.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
const getSecrets = async (environment: string, secretPath = "/") => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath,
|
||||
environment,
|
||||
workspaceId: seedData1.projectV3.id
|
||||
}
|
||||
});
|
||||
const secrets: TRawSecret[] = JSON.parse(res.payload).secrets || [];
|
||||
return secrets;
|
||||
};
|
||||
|
||||
test.each(secretTestCases)("Create secret in path $path", async ({ secret, path }) => {
|
||||
const createdSecret = await createSecret({ path, ...secret });
|
||||
expect(createdSecret.secretKey).toEqual(secret.key);
|
||||
expect(createdSecret.secretValue).toEqual(secret.value);
|
||||
expect(createdSecret.secretComment || "").toEqual(secret.comment);
|
||||
expect(createdSecret.version).toEqual(1);
|
||||
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Get secret by name in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
|
||||
const getSecByNameRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath: path,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug
|
||||
}
|
||||
});
|
||||
expect(getSecByNameRes.statusCode).toBe(200);
|
||||
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
|
||||
expect(getSecretByNamePayload).toHaveProperty("secret");
|
||||
const decryptedSecret = getSecretByNamePayload.secret as TRawSecret;
|
||||
expect(decryptedSecret.secretKey).toEqual(secret.key);
|
||||
expect(decryptedSecret.secretValue).toEqual(secret.value);
|
||||
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
if (auth === AuthMode.JWT) {
|
||||
test.each(secretTestCases)(
|
||||
"Creating personal secret without shared throw error in path $path",
|
||||
async ({ secret }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Personal,
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/SEC2`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
const payload = JSON.parse(createSecRes.payload);
|
||||
expect(createSecRes.statusCode).toBe(400);
|
||||
expect(payload.error).toEqual("BadRequest");
|
||||
}
|
||||
);
|
||||
|
||||
test.each(secretTestCases)("Creating personal secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Personal,
|
||||
secretPath: path,
|
||||
secretKey: secret.key,
|
||||
secretValue: "personal-value",
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
|
||||
// list secrets should contain personal one and shared one
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: "personal-value",
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)(
|
||||
"Deleting personal one should not delete shared secret in path $path",
|
||||
async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret }); // shared one
|
||||
await createSecret({ path, ...secret, type: SecretType.Personal });
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.not.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
test.each(secretTestCases)("Update secret in path $path", async ({ path, secret }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
const updateSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: path,
|
||||
secretKey: secret.key,
|
||||
secretValue: "new-value",
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
const decryptedSecret = updatedSecretPayload.secret;
|
||||
expect(decryptedSecret.secretKey).toEqual(secret.key);
|
||||
expect(decryptedSecret.secretValue).toEqual("new-value");
|
||||
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
|
||||
|
||||
// list secret should have updated value
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: "new-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Delete secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
const deletedSecret = await deleteSecret({ path, key: secret.key });
|
||||
expect(deletedSecret.secretKey).toEqual(secret.key);
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk create secrets in path $path", async ({ secret, path }) => {
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(200);
|
||||
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
|
||||
expect(createSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ ...secret, key: `BULK-${secret.key}-1`, path });
|
||||
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(400);
|
||||
|
||||
await deleteSecret({ path, key: `BULK-${secret.key}-1` });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk update secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
);
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
mode: "upsert",
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[0].path
|
||||
}));
|
||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[1].path
|
||||
}));
|
||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
mode: "upsert",
|
||||
secrets: testSecrets
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
firstBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
secondBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
);
|
||||
|
||||
const deletedSharedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
expect(deletedSharedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.value}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
);
|
File diff suppressed because it is too large
Load Diff
@ -1,73 +0,0 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
@ -1,93 +0,0 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
@ -1,130 +0,0 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
recursive?: boolean;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true",
|
||||
recursive: String(dto.recursive || false)
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@ -1,123 +0,0 @@
|
||||
// eslint-disable-next-line
|
||||
import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import jwt from "jsonwebtoken";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = initLogger();
|
||||
const envConfig = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({
|
||||
db,
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
globalThis.jwtAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.ACCESS_TOKEN,
|
||||
userId: seedData1.id,
|
||||
tokenVersionId: seedData1.token.id,
|
||||
authMethod: AuthMethod.EMAIL,
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
envConfig.AUTH_SECRET,
|
||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
// @ts-expect-error type
|
||||
await globalThis.testServer.close();
|
||||
// @ts-expect-error type
|
||||
delete globalThis.testServer;
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
41
backend/environment.d.ts
vendored
Normal file
41
backend/environment.d.ts
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
export {};
|
||||
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
interface ProcessEnv {
|
||||
EMAIL_TOKEN_LIFETIME: string;
|
||||
ENCRYPTION_KEY: string;
|
||||
JWT_AUTH_LIFETIME: string;
|
||||
JWT_AUTH_SECRET: string;
|
||||
JWT_REFRESH_LIFETIME: string;
|
||||
JWT_REFRESH_SECRET: string;
|
||||
JWT_SERVICE_SECRET: string;
|
||||
JWT_SIGNUP_LIFETIME: string;
|
||||
JWT_SIGNUP_SECRET: string;
|
||||
MONGO_URL: string;
|
||||
NODE_ENV: 'development' | 'staging' | 'testing' | 'production';
|
||||
CLIENT_ID_HEROKU: string;
|
||||
CLIENT_ID_VERCEL: string;
|
||||
CLIENT_ID_NETLIFY: string;
|
||||
CLIENT_SECRET_HEROKU: string;
|
||||
CLIENT_SECRET_VERCEL: string;
|
||||
CLIENT_SECRET_NETLIFY: string;
|
||||
POSTHOG_HOST: string;
|
||||
POSTHOG_PROJECT_API_KEY: string;
|
||||
PRIVATE_KEY: string;
|
||||
PUBLIC_KEY: string;
|
||||
SENTRY_DSN: string;
|
||||
SITE_URL: string;
|
||||
SMTP_HOST: string;
|
||||
SMTP_NAME: string;
|
||||
SMTP_PASSWORD: string;
|
||||
SMTP_USERNAME: string;
|
||||
STRIPE_PRODUCT_CARD_AUTH: string;
|
||||
STRIPE_PRODUCT_PRO: string;
|
||||
STRIPE_PRODUCT_STARTER: string;
|
||||
STRIPE_PUBLISHABLE_KEY: string;
|
||||
STRIPE_SECRET_KEY: string;
|
||||
STRIPE_WEBHOOK_SECRET: string;
|
||||
}
|
||||
}
|
||||
}
|
24
backend/healthcheck.js
Normal file
24
backend/healthcheck.js
Normal file
@ -0,0 +1,24 @@
|
||||
const http = require('http');
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const options = {
|
||||
host: 'localhost',
|
||||
port: PORT,
|
||||
timeout: 2000,
|
||||
path: '/healthcheck'
|
||||
};
|
||||
|
||||
const healthCheck = http.request(options, (res) => {
|
||||
console.log(`HEALTHCHECK STATUS: ${res.statusCode}`);
|
||||
if (res.statusCode == 200) {
|
||||
process.exit(0);
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
healthCheck.on('error', function (err) {
|
||||
console.error(`HEALTH CHECK ERROR: ${err}`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
healthCheck.end();
|
BIN
backend/img/dashboard.png
Normal file
BIN
backend/img/dashboard.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 493 KiB |
@ -1,6 +1,6 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ext": ".ts,.js",
|
||||
"ignore": [],
|
||||
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
|
||||
}
|
||||
"watch": ["src"],
|
||||
"ext": ".ts,.js",
|
||||
"ignore": [],
|
||||
"exec": "ts-node ./src/index.ts"
|
||||
}
|
34899
backend/package-lock.json
generated
34899
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,235 +1,72 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "./dist/main.mjs",
|
||||
"bin": "dist/main.js",
|
||||
"pkg": {
|
||||
"scripts": [
|
||||
"dist/**/*.js",
|
||||
"../frontend/node_modules/next/**/*.js",
|
||||
"../frontend/.next/*/**/*.js",
|
||||
"../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js"
|
||||
],
|
||||
"assets": [
|
||||
"dist/**",
|
||||
"!dist/**/*.js",
|
||||
"node_modules/**",
|
||||
"../frontend/node_modules/**",
|
||||
"../frontend/.next/**",
|
||||
"!../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
|
||||
"../frontend/public/**"
|
||||
],
|
||||
"outputPath": "binary"
|
||||
},
|
||||
"scripts": {
|
||||
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
|
||||
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
|
||||
"binary:babel-backend": " babel ./dist -d ./dist",
|
||||
"binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server",
|
||||
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
|
||||
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node --enable-source-maps dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
||||
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.18.10",
|
||||
"@babel/core": "^7.18.10",
|
||||
"@babel/plugin-syntax-import-attributes": "^7.24.7",
|
||||
"@babel/preset-env": "^7.18.10",
|
||||
"@babel/preset-react": "^7.24.7",
|
||||
"@types/bcrypt": "^5.0.2",
|
||||
"@types/jmespath": "^0.15.2",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
"@types/jsrp": "^0.2.6",
|
||||
"@types/libsodium-wrappers": "^0.7.13",
|
||||
"@types/lodash.isequal": "^4.5.8",
|
||||
"@types/node": "^20.9.5",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/passport-github": "^1.1.12",
|
||||
"@types/passport-google-oauth20": "^2.0.14",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/pkcs11js": "^1.0.4",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
"@types/sjcl": "^1.0.34",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
"@yao-pkg/pkg": "^5.12.0",
|
||||
"babel-plugin-transform-import-meta": "^2.2.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-config-airbnb-typescript": "^17.1.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-simple-import-sort": "^10.0.0",
|
||||
"nodemon": "^3.0.2",
|
||||
"pino-pretty": "^10.2.3",
|
||||
"prompt-sync": "^4.2.0",
|
||||
"rimraf": "^5.0.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsc-alias": "^1.8.8",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-kms": "^3.609.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/static": "^7.0.4",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.6.7",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dd-trace": "^5.40.0",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
"google-auth-library": "^9.9.0",
|
||||
"googleapis": "^137.1.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"isomorphic-dompurify": "^2.22.0",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@sentry/node": "^7.14.0",
|
||||
"@sentry/tracing": "^7.19.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"axios": "^1.1.3",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-js": "^4.1.1",
|
||||
"dotenv": "^16.0.1",
|
||||
"express": "^4.18.1",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"jsrp": "^0.2.4",
|
||||
"jwks-rsa": "^3.1.0",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"ldif": "0.5.1",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"otplib": "^12.0.1",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-boss": "^10.1.5",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"pkcs11js": "^2.1.6",
|
||||
"pkijs": "^3.2.4",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.3.8",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"sjcl": "^1.0.8",
|
||||
"smee-client": "^2.0.0",
|
||||
"snowflake-sdk": "^1.14.0",
|
||||
"tedious": "^18.2.1",
|
||||
"mongoose": "^6.7.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.1.0",
|
||||
"query-string": "^7.1.3",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.22.4"
|
||||
"typescript": "^4.9.3"
|
||||
},
|
||||
"name": "infisical-api",
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
"scripts": {
|
||||
"prepare": "cd .. && npm install",
|
||||
"start": "npm run build && node build/index.js",
|
||||
"dev": "nodemon",
|
||||
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"lint-and-fix": "eslint . --ext .ts --fix",
|
||||
"lint-staged": "lint-staged"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/Infisical/infisical-api.git"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Infisical/infisical-api/issues"
|
||||
},
|
||||
"homepage": "https://github.com/Infisical/infisical-api#readme",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"@posthog/plugin-scaffold": "^1.3.4",
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/cors": "^2.8.12",
|
||||
"@types/express": "^4.17.14",
|
||||
"@types/jsonwebtoken": "^8.5.9",
|
||||
"@types/node": "^18.11.3",
|
||||
"@types/nodemailer": "^6.4.6",
|
||||
"@types/swagger-jsdoc": "^6.0.1",
|
||||
"@types/swagger-ui-express": "^4.1.3",
|
||||
"@typescript-eslint/eslint-plugin": "^5.40.1",
|
||||
"@typescript-eslint/parser": "^5.40.1",
|
||||
"eslint": "^8.26.0",
|
||||
"install": "^0.13.0",
|
||||
"jest": "^29.3.1",
|
||||
"nodemon": "^2.0.19",
|
||||
"npm": "^8.19.3",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
}
|
||||
|
@ -1,146 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { mkdirSync, writeFileSync } from "fs";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
|
||||
const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
type ComponentType = 1 | 2 | 3;
|
||||
|
||||
console.log(`
|
||||
Component List
|
||||
--------------
|
||||
0. Exit
|
||||
1. Service component
|
||||
2. DAL component
|
||||
3. Router component
|
||||
`);
|
||||
|
||||
function getComponentType(): ComponentType {
|
||||
while (true) {
|
||||
const input = prompt("Select a component (0-3): ");
|
||||
const componentType = parseInt(input, 10);
|
||||
|
||||
if (componentType === 0) {
|
||||
console.log("Exiting the program. Goodbye!");
|
||||
process.exit(0);
|
||||
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
|
||||
return componentType;
|
||||
} else {
|
||||
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
|
||||
}
|
||||
}
|
||||
}
|
||||
const componentType = getComponentType();
|
||||
|
||||
if (componentType === 1) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
const dir = path.join(__dirname, `../src/services/${componentName}`);
|
||||
const pascalCase = componentName
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
const camelCase = componentName
|
||||
.split("-")
|
||||
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
|
||||
.join("");
|
||||
const dalTypeName = `T${pascalCase}DALFactory`;
|
||||
const dalName = `${camelCase}DALFactory`;
|
||||
const serviceTypeName = `T${pascalCase}ServiceFactory`;
|
||||
const serviceName = `${camelCase}ServiceFactory`;
|
||||
|
||||
mkdirSync(dir);
|
||||
|
||||
writeFileSync(
|
||||
path.join(dir, `${componentName}-dal.ts`),
|
||||
`import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
|
||||
|
||||
export const ${dalName} = (db: TDbClient) => {
|
||||
|
||||
return { };
|
||||
};
|
||||
`
|
||||
);
|
||||
|
||||
writeFileSync(
|
||||
path.join(dir, `${componentName}-service.ts`),
|
||||
`import { ${dalTypeName} } from "./${componentName}-dal";
|
||||
|
||||
type ${serviceTypeName}Dep = {
|
||||
${camelCase}DAL: ${dalTypeName};
|
||||
};
|
||||
|
||||
export type ${serviceTypeName} = ReturnType<typeof ${serviceName}>;
|
||||
|
||||
export const ${serviceName} = ({ ${camelCase}DAL }: ${serviceTypeName}Dep) => {
|
||||
return {};
|
||||
};
|
||||
`
|
||||
);
|
||||
writeFileSync(path.join(dir, `${componentName}-types.ts`), "");
|
||||
} else if (componentType === 2) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
const componentPath = prompt("Path wrt service folder: ");
|
||||
const pascalCase = componentName
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
const camelCase = componentName
|
||||
.split("-")
|
||||
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
|
||||
.join("");
|
||||
const dalTypeName = `T${pascalCase}DALFactory`;
|
||||
const dalName = `${camelCase}DALFactory`;
|
||||
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/services", componentPath, `${componentName}-dal.ts`),
|
||||
`import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
|
||||
|
||||
export const ${dalName} = (db: TDbClient) => {
|
||||
|
||||
return { };
|
||||
};
|
||||
`
|
||||
);
|
||||
} else if (componentType === 3) {
|
||||
const name = prompt("Enter router name: ");
|
||||
const version = prompt("Version number: ");
|
||||
const pascalCase = name
|
||||
.split("-")
|
||||
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
|
||||
.join("");
|
||||
writeFileSync(
|
||||
path.join(__dirname, `../src/server/routes/v${Number(version)}/${name}-router.ts`),
|
||||
`import { z } from "zod";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
|
||||
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {}
|
||||
});
|
||||
};
|
||||
`
|
||||
);
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
import slugify from "@sindresorhus/slugify"
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for migration: ");
|
||||
|
||||
// Remove spaces from migration name and replace with hyphens
|
||||
const formattedMigrationName = slugify(migrationName);
|
||||
|
||||
execSync(
|
||||
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
@ -1,16 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { execSync } from "child_process";
|
||||
import { readdirSync } from "fs";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for seedfile: ");
|
||||
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seeds")).length || 1;
|
||||
execSync(
|
||||
`npx knex seed:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${
|
||||
fileCounter + 1
|
||||
}-${migrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
@ -1,164 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
import knex from "knex";
|
||||
import { writeFileSync } from "fs";
|
||||
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../.env.migration")
|
||||
});
|
||||
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: process.env.DB_CONNECTION_URI
|
||||
});
|
||||
|
||||
const getZodPrimitiveType = (type: string) => {
|
||||
switch (type) {
|
||||
case "uuid":
|
||||
return "z.string().uuid()";
|
||||
case "character varying":
|
||||
return "z.string()";
|
||||
case "ARRAY":
|
||||
return "z.string().array()";
|
||||
case "boolean":
|
||||
return "z.boolean()";
|
||||
case "jsonb":
|
||||
return "z.unknown()";
|
||||
case "json":
|
||||
return "z.unknown()";
|
||||
case "timestamp with time zone":
|
||||
return "z.date()";
|
||||
case "integer":
|
||||
return "z.number()";
|
||||
case "bigint":
|
||||
return "z.coerce.number()";
|
||||
case "text":
|
||||
return "z.string()";
|
||||
case "bytea":
|
||||
return "zodBuffer";
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
};
|
||||
|
||||
const getZodDefaultValue = (type: unknown, value: string | number | boolean | Object) => {
|
||||
if (!value || value === "null") return;
|
||||
switch (type) {
|
||||
case "uuid":
|
||||
return `.default("00000000-0000-0000-0000-000000000000")`;
|
||||
case "character varying": {
|
||||
if (value === "gen_random_uuid()") return;
|
||||
if (typeof value === "string" && value.includes("::")) {
|
||||
return `.default(${value.split("::")[0]})`;
|
||||
}
|
||||
return `.default(${value})`;
|
||||
}
|
||||
case "ARRAY":
|
||||
return `.default(${value})`;
|
||||
case "boolean":
|
||||
return `.default(${value})`;
|
||||
case "jsonb":
|
||||
return "z.string()";
|
||||
case "json":
|
||||
return "z.string()";
|
||||
case "timestamp with time zone": {
|
||||
if (value === "CURRENT_TIMESTAMP") return;
|
||||
return "z.string().datetime()";
|
||||
}
|
||||
case "integer": {
|
||||
if ((value as string).includes("nextval")) return;
|
||||
return `.default(${value})`;
|
||||
}
|
||||
case "bigint": {
|
||||
if ((value as string).includes("nextval")) return;
|
||||
return `.default(${parseInt((value as string).split("::")[0].slice(1, -1), 10)})`;
|
||||
}
|
||||
case "text":
|
||||
if (typeof value === "string" && value.includes("::")) {
|
||||
return `.default(${value.split("::")[0]})`;
|
||||
}
|
||||
return `.default(${value})`;
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
const tables = (
|
||||
await db("information_schema.tables")
|
||||
.whereRaw("table_schema = current_schema()")
|
||||
.select<{ tableName: string }[]>("table_name as tableName")
|
||||
.orderBy("table_name")
|
||||
).filter(
|
||||
(el) =>
|
||||
!el.tableName.includes("_migrations") &&
|
||||
!el.tableName.includes("audit_logs_") &&
|
||||
el.tableName !== "intermediate_audit_logs"
|
||||
);
|
||||
|
||||
for (let i = 0; i < tables.length; i += 1) {
|
||||
const { tableName } = tables[i];
|
||||
const columns = await db(tableName).columnInfo();
|
||||
const columnNames = Object.keys(columns);
|
||||
|
||||
let schema = "";
|
||||
const zodImportSet = new Set<string>();
|
||||
for (let colNum = 0; colNum < columnNames.length; colNum++) {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
||||
// don't put optional on id
|
||||
if (colInfo.defaultValue && columnName !== "id") {
|
||||
const { defaultValue } = colInfo;
|
||||
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
|
||||
if (zSchema) {
|
||||
ztype = ztype.concat(zSchema);
|
||||
}
|
||||
}
|
||||
if (colInfo.nullable) {
|
||||
ztype = ztype.concat(".nullable().optional()");
|
||||
}
|
||||
schema = schema.concat(
|
||||
`${!schema ? "\n" : ""} ${columnName}: ${ztype}${colNum === columnNames.length - 1 ? "" : ","}\n`
|
||||
);
|
||||
}
|
||||
|
||||
const dashcase = tableName.split("_").join("-");
|
||||
const pascalCase = tableName
|
||||
.split("_")
|
||||
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
|
||||
|
||||
const zodImports = Array.from(zodImportSet);
|
||||
|
||||
// the insert and update are changed to zod input type to use default cases
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
|
||||
`// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ${pascalCase}Schema = z.object({${schema}});
|
||||
|
||||
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
|
||||
export type T${pascalCase}Insert = Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>;
|
||||
export type T${pascalCase}Update = Partial<Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>>;
|
||||
`
|
||||
);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
main();
|
@ -1,103 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import promptSync from "prompt-sync";
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import { existsSync } from "fs";
|
||||
|
||||
const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const sanitizeInputParam = (value: string) => {
|
||||
// Escape double quotes and wrap the entire value in double quotes
|
||||
if (value) {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return '""';
|
||||
};
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||
const exportPort = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||
);
|
||||
const exportUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||
const exportDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||
const importUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||
const importDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||
__dirname,
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
};
|
||||
|
||||
const main = () => {
|
||||
const action = prompt(
|
||||
"Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: "
|
||||
);
|
||||
if (action === "1") {
|
||||
exportDb();
|
||||
} else if (action === "2") {
|
||||
importDbForOrg();
|
||||
} else {
|
||||
console.log("Invalid action");
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
@ -1,27 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-shadow */
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
function replaceMjsOccurrences(directory: string) {
|
||||
fs.readdir(directory, (err, files) => {
|
||||
if (err) throw err;
|
||||
files.forEach((file) => {
|
||||
const filePath = path.join(directory, file);
|
||||
if (fs.statSync(filePath).isDirectory()) {
|
||||
replaceMjsOccurrences(filePath);
|
||||
} else {
|
||||
fs.readFile(filePath, "utf8", (err, data) => {
|
||||
if (err) throw err;
|
||||
const result = data.replace(/\.mjs/g, ".js");
|
||||
fs.writeFile(filePath, result, "utf8", (err) => {
|
||||
if (err) throw err;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Updated: ${filePath}`);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
replaceMjsOccurrences("dist");
|
@ -1,7 +0,0 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
18
backend/src/@types/fastify-zod.d.ts
vendored
18
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,18 +0,0 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
type FastifyZodProvider = FastifyInstance<
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<CustomLogger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
// used only for testing
|
||||
const testServer: FastifyZodProvider;
|
||||
const jwtAuthToken: string;
|
||||
}
|
241
backend/src/@types/fastify.d.ts
vendored
241
backend/src/@types/fastify.d.ts
vendored
@ -1,241 +0,0 @@
|
||||
import "fastify";
|
||||
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
|
||||
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
|
||||
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||
import { TProjectKeyServiceFactory } from "@app/services/project-key/project-key-service";
|
||||
import { TProjectMembershipServiceFactory } from "@app/services/project-membership/project-membership-service";
|
||||
import { TProjectRoleServiceFactory } from "@app/services/project-role/project-role-service";
|
||||
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
}
|
||||
|
||||
interface FastifyRequest {
|
||||
realIp: string;
|
||||
// used for mfa session authentication
|
||||
mfa: {
|
||||
userId: string;
|
||||
orgId?: string;
|
||||
user: TUsers;
|
||||
};
|
||||
// identity injection. depending on which kinda of token the information is filled in auth
|
||||
auth: TAuthMode;
|
||||
permission: {
|
||||
authMethod: ActorAuthMethod;
|
||||
type: ActorType;
|
||||
id: string;
|
||||
orgId: string;
|
||||
};
|
||||
rateLimits: RateLimitConfiguration;
|
||||
// passport data
|
||||
passportUser: {
|
||||
isUserCompleted: string;
|
||||
providerAuthToken: string;
|
||||
};
|
||||
kmipUser: {
|
||||
projectId: string;
|
||||
clientId: string;
|
||||
name: string;
|
||||
};
|
||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
redis: Redis;
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
password: TAuthPasswordFactory;
|
||||
signup: TAuthSignupFactory;
|
||||
authToken: TAuthTokenServiceFactory;
|
||||
permission: TPermissionServiceFactory;
|
||||
org: TOrgServiceFactory;
|
||||
orgRole: TOrgRoleServiceFactory;
|
||||
oidc: TOidcConfigServiceFactory;
|
||||
superAdmin: TSuperAdminServiceFactory;
|
||||
user: TUserServiceFactory;
|
||||
group: TGroupServiceFactory;
|
||||
groupProject: TGroupProjectServiceFactory;
|
||||
apiKey: TApiKeyServiceFactory;
|
||||
pkiAlert: TPkiAlertServiceFactory;
|
||||
project: TProjectServiceFactory;
|
||||
projectMembership: TProjectMembershipServiceFactory;
|
||||
projectEnv: TProjectEnvServiceFactory;
|
||||
projectKey: TProjectKeyServiceFactory;
|
||||
projectRole: TProjectRoleServiceFactory;
|
||||
secret: TSecretServiceFactory;
|
||||
secretReplication: TSecretReplicationServiceFactory;
|
||||
secretTag: TSecretTagServiceFactory;
|
||||
secretImport: TSecretImportServiceFactory;
|
||||
projectBot: TProjectBotServiceFactory;
|
||||
folder: TSecretFolderServiceFactory;
|
||||
integration: TIntegrationServiceFactory;
|
||||
integrationAuth: TIntegrationAuthServiceFactory;
|
||||
webhook: TWebhookServiceFactory;
|
||||
serviceToken: TServiceTokenServiceFactory;
|
||||
identity: TIdentityServiceFactory;
|
||||
identityAccessToken: TIdentityAccessTokenServiceFactory;
|
||||
identityProject: TIdentityProjectServiceFactory;
|
||||
identityTokenAuth: TIdentityTokenAuthServiceFactory;
|
||||
identityUa: TIdentityUaServiceFactory;
|
||||
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||
identityJwtAuth: TIdentityJwtAuthServiceFactory;
|
||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||
secretApprovalRequest: TSecretApprovalRequestServiceFactory;
|
||||
secretRotation: TSecretRotationServiceFactory;
|
||||
snapshot: TSecretSnapshotServiceFactory;
|
||||
saml: TSamlConfigServiceFactory;
|
||||
scim: TScimServiceFactory;
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
|
||||
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
certificateEst: TCertificateEstServiceFactory;
|
||||
pkiCollection: TPkiCollectionServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
secretBlindIndex: TSecretBlindIndexServiceFactory;
|
||||
telemetry: TTelemetryServiceFactory;
|
||||
dynamicSecret: TDynamicSecretServiceFactory;
|
||||
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
hsm: THsmServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
slack: TSlackServiceFactory;
|
||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||
cmek: TCmekServiceFactory;
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
projectTemplate: TProjectTemplateServiceFactory;
|
||||
totp: TTotpServiceFactory;
|
||||
appConnection: TAppConnectionServiceFactory;
|
||||
secretSync: TSecretSyncServiceFactory;
|
||||
kmip: TKmipServiceFactory;
|
||||
kmipOperation: TKmipOperationServiceFactory;
|
||||
gateway: TGatewayServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
store: {
|
||||
user: Pick<TUserDALFactory, "findById">;
|
||||
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
|
||||
};
|
||||
}
|
||||
}
|
4
backend/src/@types/hdb.d.ts
vendored
4
backend/src/@types/hdb.d.ts
vendored
@ -1,4 +0,0 @@
|
||||
declare module "hdb" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function createClient(options): any;
|
||||
}
|
954
backend/src/@types/knex.d.ts
vendored
954
backend/src/@types/knex.d.ts
vendored
@ -1,954 +0,0 @@
|
||||
import { Knex as KnexOriginal } from "knex";
|
||||
|
||||
import {
|
||||
TableName,
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate,
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate,
|
||||
TAccessApprovalRequestsUpdate,
|
||||
TApiKeys,
|
||||
TApiKeysInsert,
|
||||
TApiKeysUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate,
|
||||
TAuditLogsUpdate,
|
||||
TAuthTokens,
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate,
|
||||
TAuthTokensInsert,
|
||||
TAuthTokensUpdate,
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate,
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate,
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate,
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate,
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate,
|
||||
TCertificates,
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TCertificateTemplateEstConfigs,
|
||||
TCertificateTemplateEstConfigsInsert,
|
||||
TCertificateTemplateEstConfigsUpdate,
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
TGitAppOrg,
|
||||
TGitAppOrgInsert,
|
||||
TGitAppOrgUpdate,
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate,
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate,
|
||||
TGroups,
|
||||
TGroupsInsert,
|
||||
TGroupsUpdate,
|
||||
TIdentities,
|
||||
TIdentitiesInsert,
|
||||
TIdentitiesUpdate,
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate,
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate,
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate,
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityJwtAuths,
|
||||
TIdentityJwtAuthsInsert,
|
||||
TIdentityJwtAuthsUpdate,
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate,
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate,
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate,
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate,
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate,
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate,
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate,
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate,
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate,
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TInternalKms,
|
||||
TInternalKmsInsert,
|
||||
TInternalKmsUpdate,
|
||||
TKmipClientCertificates,
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate,
|
||||
TKmipClients,
|
||||
TKmipClientsInsert,
|
||||
TKmipClientsUpdate,
|
||||
TKmipOrgConfigs,
|
||||
TKmipOrgConfigsInsert,
|
||||
TKmipOrgConfigsUpdate,
|
||||
TKmipOrgServerCertificates,
|
||||
TKmipOrgServerCertificatesInsert,
|
||||
TKmipOrgServerCertificatesUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate,
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate,
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOidcConfigs,
|
||||
TOidcConfigsInsert,
|
||||
TOidcConfigsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
TOrgBots,
|
||||
TOrgBotsInsert,
|
||||
TOrgBotsUpdate,
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate,
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate,
|
||||
TOrgRoles,
|
||||
TOrgRolesInsert,
|
||||
TOrgRolesUpdate,
|
||||
TPkiAlerts,
|
||||
TPkiAlertsInsert,
|
||||
TPkiAlertsUpdate,
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate,
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate,
|
||||
TProjectBots,
|
||||
TProjectBotsInsert,
|
||||
TProjectBotsUpdate,
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate,
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate,
|
||||
TProjectKeys,
|
||||
TProjectKeysInsert,
|
||||
TProjectKeysUpdate,
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate,
|
||||
TProjectRoles,
|
||||
TProjectRolesInsert,
|
||||
TProjectRolesUpdate,
|
||||
TProjects,
|
||||
TProjectsInsert,
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate,
|
||||
TProjectSplitBackfillIds,
|
||||
TProjectSplitBackfillIdsInsert,
|
||||
TProjectSplitBackfillIdsUpdate,
|
||||
TProjectsUpdate,
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate,
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate,
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TResourceMetadata,
|
||||
TResourceMetadataInsert,
|
||||
TResourceMetadataUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
TScimTokens,
|
||||
TScimTokensInsert,
|
||||
TScimTokensUpdate,
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate,
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate,
|
||||
TSecretApprovalRequestSecretTagsV2,
|
||||
TSecretApprovalRequestSecretTagsV2Insert,
|
||||
TSecretApprovalRequestSecretTagsV2Update,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate,
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate,
|
||||
TSecretApprovalRequestsSecretsV2,
|
||||
TSecretApprovalRequestsSecretsV2Insert,
|
||||
TSecretApprovalRequestsSecretsV2Update,
|
||||
TSecretApprovalRequestsUpdate,
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate,
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate,
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate,
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate,
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate,
|
||||
TSecretReferencesV2,
|
||||
TSecretReferencesV2Insert,
|
||||
TSecretReferencesV2Update,
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate,
|
||||
TSecretRotationOutputV2,
|
||||
TSecretRotationOutputV2Insert,
|
||||
TSecretRotationOutputV2Update,
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
TSecretsInsert,
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate,
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate,
|
||||
TSecretSnapshotSecretsV2,
|
||||
TSecretSnapshotSecretsV2Insert,
|
||||
TSecretSnapshotSecretsV2Update,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate,
|
||||
TSecretsUpdate,
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate,
|
||||
TSecretTags,
|
||||
TSecretTagsInsert,
|
||||
TSecretTagsUpdate,
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate,
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate,
|
||||
TSecretVersionV2TagJunction,
|
||||
TSecretVersionV2TagJunctionInsert,
|
||||
TSecretVersionV2TagJunctionUpdate,
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate,
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate,
|
||||
TSshCertificateAuthorities,
|
||||
TSshCertificateAuthoritiesInsert,
|
||||
TSshCertificateAuthoritiesUpdate,
|
||||
TSshCertificateAuthoritySecrets,
|
||||
TSshCertificateAuthoritySecretsInsert,
|
||||
TSshCertificateAuthoritySecretsUpdate,
|
||||
TSshCertificateBodies,
|
||||
TSshCertificateBodiesInsert,
|
||||
TSshCertificateBodiesUpdate,
|
||||
TSshCertificates,
|
||||
TSshCertificatesInsert,
|
||||
TSshCertificatesUpdate,
|
||||
TSshCertificateTemplates,
|
||||
TSshCertificateTemplatesInsert,
|
||||
TSshCertificateTemplatesUpdate,
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
TTotpConfigs,
|
||||
TTotpConfigsInsert,
|
||||
TTotpConfigsUpdate,
|
||||
TTrustedIps,
|
||||
TTrustedIpsInsert,
|
||||
TTrustedIpsUpdate,
|
||||
TUserActions,
|
||||
TUserActionsInsert,
|
||||
TUserActionsUpdate,
|
||||
TUserAliases,
|
||||
TUserAliasesInsert,
|
||||
TUserAliasesUpdate,
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate,
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate,
|
||||
TUsers,
|
||||
TUsersInsert,
|
||||
TUsersUpdate,
|
||||
TWebhooks,
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate,
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
|
||||
import {
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
} from "@app/db/schemas/secret-v2-tag-junction";
|
||||
import {
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
} from "@app/db/schemas/secret-versions-v2";
|
||||
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
interface QueryInterface {
|
||||
primaryNode(): KnexOriginal;
|
||||
replicaNode(): KnexOriginal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateAuthorities,
|
||||
TSshCertificateAuthoritiesInsert,
|
||||
TSshCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateAuthoritySecrets,
|
||||
TSshCertificateAuthoritySecretsInsert,
|
||||
TSshCertificateAuthoritySecretsUpdate
|
||||
>;
|
||||
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateTemplates,
|
||||
TSshCertificateTemplatesInsert,
|
||||
TSshCertificateTemplatesUpdate
|
||||
>;
|
||||
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificates,
|
||||
TSshCertificatesInsert,
|
||||
TSshCertificatesUpdate
|
||||
>;
|
||||
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateBodies,
|
||||
TSshCertificateBodiesInsert,
|
||||
TSshCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate
|
||||
>;
|
||||
[TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplateEstConfigs,
|
||||
TCertificateTemplateEstConfigsInsert,
|
||||
TCertificateTemplateEstConfigsUpdate
|
||||
>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
|
||||
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate
|
||||
>;
|
||||
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate
|
||||
>;
|
||||
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate
|
||||
>;
|
||||
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate
|
||||
>;
|
||||
[TableName.Organization]: KnexOriginal.CompositeTableType<
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate
|
||||
>;
|
||||
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate
|
||||
>;
|
||||
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.Environment]: KnexOriginal.CompositeTableType<
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate
|
||||
>;
|
||||
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate
|
||||
>;
|
||||
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate
|
||||
>;
|
||||
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate
|
||||
>;
|
||||
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate
|
||||
>;
|
||||
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate
|
||||
>;
|
||||
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityTokenAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityJwtAuths,
|
||||
TIdentityJwtAuthsInsert,
|
||||
TIdentityJwtAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate
|
||||
>;
|
||||
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate
|
||||
>;
|
||||
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
|
||||
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate
|
||||
>;
|
||||
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate
|
||||
>;
|
||||
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate
|
||||
>;
|
||||
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate
|
||||
>;
|
||||
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate
|
||||
>;
|
||||
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate
|
||||
>;
|
||||
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate
|
||||
>;
|
||||
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate
|
||||
>;
|
||||
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
[TableName.SecretV2]: KnexOriginal.CompositeTableType<TSecretsV2, TSecretsV2Insert, TSecretsV2Update>;
|
||||
[TableName.SecretVersionV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
>;
|
||||
[TableName.SecretReferenceV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferencesV2,
|
||||
TSecretReferencesV2Insert,
|
||||
TSecretReferencesV2Update
|
||||
>;
|
||||
// Junction tables
|
||||
[TableName.SecretV2JnTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
>;
|
||||
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionV2Tag]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionV2TagJunction,
|
||||
TSecretVersionV2TagJunctionInsert,
|
||||
TSecretVersionV2TagJunctionUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecretV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotSecretsV2,
|
||||
TSecretSnapshotSecretsV2Insert,
|
||||
TSecretSnapshotSecretsV2Update
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsSecretsV2,
|
||||
TSecretApprovalRequestsSecretsV2Insert,
|
||||
TSecretApprovalRequestsSecretsV2Update
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTagV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTagsV2,
|
||||
TSecretApprovalRequestSecretTagsV2Insert,
|
||||
TSecretApprovalRequestSecretTagsV2Update
|
||||
>;
|
||||
[TableName.SecretRotationOutputV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationOutputV2,
|
||||
TSecretRotationOutputV2Insert,
|
||||
TSecretRotationOutputV2Update
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.InternalKms]: KnexOriginal.CompositeTableType<TInternalKms, TInternalKmsInsert, TInternalKmsUpdate>;
|
||||
[TableName.ExternalKms]: KnexOriginal.CompositeTableType<TExternalKms, TExternalKmsInsert, TExternalKmsUpdate>;
|
||||
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate
|
||||
>;
|
||||
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType<
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
>;
|
||||
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate
|
||||
>;
|
||||
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
|
||||
TProjectSplitBackfillIds,
|
||||
TProjectSplitBackfillIdsInsert,
|
||||
TProjectSplitBackfillIdsUpdate
|
||||
>;
|
||||
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
|
||||
TResourceMetadata,
|
||||
TResourceMetadataInsert,
|
||||
TResourceMetadataUpdate
|
||||
>;
|
||||
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
|
||||
TAppConnections,
|
||||
TAppConnectionsInsert,
|
||||
TAppConnectionsUpdate
|
||||
>;
|
||||
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
|
||||
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
|
||||
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmipOrgConfigs,
|
||||
TKmipOrgConfigsInsert,
|
||||
TKmipOrgConfigsUpdate
|
||||
>;
|
||||
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
|
||||
TKmipOrgServerCertificates,
|
||||
TKmipOrgServerCertificatesInsert,
|
||||
TKmipOrgServerCertificatesUpdate
|
||||
>;
|
||||
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
|
||||
TKmipClientCertificates,
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate
|
||||
>;
|
||||
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
|
||||
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate
|
||||
>;
|
||||
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate
|
||||
>;
|
||||
}
|
||||
}
|
4
backend/src/@types/ldif.d.ts
vendored
4
backend/src/@types/ldif.d.ts
vendored
@ -1,4 +0,0 @@
|
||||
declare module "ldif" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function parse(input: string, ...args: any[]): any;
|
||||
}
|
1
backend/src/@types/passport-gitlab2.d.ts
vendored
1
backend/src/@types/passport-gitlab2.d.ts
vendored
@ -1 +0,0 @@
|
||||
declare module "passport-gitlab2";
|
@ -1,105 +0,0 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { PgSqlLock } from "./keystore/keystore";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
type TArgs = {
|
||||
auditLogDb?: Knex;
|
||||
applicationDb: Knex;
|
||||
logger: Logger;
|
||||
};
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
const migrationConfig = {
|
||||
directory: path.join(__dirname, "./db/migrations"),
|
||||
loadExtensions: [".mjs", ".ts"],
|
||||
tableName: "infisical_migrations"
|
||||
};
|
||||
|
||||
const migrationStatusCheckErrorHandler = (err: Error) => {
|
||||
// happens for first time in which the migration table itself is not created yet
|
||||
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
||||
if (err?.message?.includes("does not exist")) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
};
|
||||
|
||||
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
||||
try {
|
||||
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
||||
if (isProduction) {
|
||||
const migrationTable = migrationConfig.tableName;
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldRunMigration = Boolean(
|
||||
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
||||
); // db.length - code.length
|
||||
if (!shouldRunMigration) {
|
||||
logger.info("No migrations pending: Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (auditLogDb) {
|
||||
await auditLogDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running audit log migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await auditLogDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished audit log migrations.");
|
||||
});
|
||||
}
|
||||
|
||||
await applicationDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running application migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await applicationDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished application migrations.");
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Boot up migration failed");
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
79
backend/src/config/index.ts
Normal file
79
backend/src/config/index.ts
Normal file
@ -0,0 +1,79 @@
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const EMAIL_TOKEN_LIFETIME = process.env.EMAIL_TOKEN_LIFETIME! || '86400';
|
||||
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY!;
|
||||
const JWT_AUTH_LIFETIME = process.env.JWT_AUTH_LIFETIME! || '10d';
|
||||
const JWT_AUTH_SECRET = process.env.JWT_AUTH_SECRET!;
|
||||
const JWT_REFRESH_LIFETIME = process.env.JWT_REFRESH_LIFETIME! || '90d';
|
||||
const JWT_REFRESH_SECRET = process.env.JWT_REFRESH_SECRET!;
|
||||
const JWT_SERVICE_SECRET = process.env.JWT_SERVICE_SECRET!;
|
||||
const JWT_SIGNUP_LIFETIME = process.env.JWT_SIGNUP_LIFETIME! || '15m';
|
||||
const JWT_SIGNUP_SECRET = process.env.JWT_SIGNUP_SECRET!;
|
||||
const MONGO_URL = process.env.MONGO_URL!;
|
||||
const NODE_ENV = process.env.NODE_ENV! || 'production';
|
||||
const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
|
||||
const CLIENT_ID_HEROKU = process.env.CLIENT_ID_HEROKU!;
|
||||
const CLIENT_ID_VERCEL = process.env.CLIENT_ID_VERCEL!;
|
||||
const CLIENT_ID_NETLIFY = process.env.CLIENT_ID_NETLIFY!;
|
||||
const CLIENT_SECRET_VERCEL = process.env.CLIENT_SECRET_VERCEL!;
|
||||
const CLIENT_SECRET_NETLIFY = process.env.CLIENT_SECRET_NETLIFY!;
|
||||
const CLIENT_SLUG_VERCEL= process.env.CLIENT_SLUG_VERCEL!;
|
||||
const POSTHOG_HOST = process.env.POSTHOG_HOST! || 'https://app.posthog.com';
|
||||
const POSTHOG_PROJECT_API_KEY =
|
||||
process.env.POSTHOG_PROJECT_API_KEY! ||
|
||||
'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
const PRIVATE_KEY = process.env.PRIVATE_KEY!;
|
||||
const PUBLIC_KEY = process.env.PUBLIC_KEY!;
|
||||
const SENTRY_DSN = process.env.SENTRY_DSN!;
|
||||
const SITE_URL = process.env.SITE_URL!;
|
||||
const SMTP_HOST = process.env.SMTP_HOST! || 'smtp.gmail.com';
|
||||
const SMTP_PORT = process.env.SMTP_PORT! || 587;
|
||||
const SMTP_NAME = process.env.SMTP_NAME!;
|
||||
const SMTP_USERNAME = process.env.SMTP_USERNAME!;
|
||||
const SMTP_PASSWORD = process.env.SMTP_PASSWORD!;
|
||||
const STRIPE_PRODUCT_CARD_AUTH = process.env.STRIPE_PRODUCT_CARD_AUTH!;
|
||||
const STRIPE_PRODUCT_PRO = process.env.STRIPE_PRODUCT_PRO!;
|
||||
const STRIPE_PRODUCT_STARTER = process.env.STRIPE_PRODUCT_STARTER!;
|
||||
const STRIPE_PUBLISHABLE_KEY = process.env.STRIPE_PUBLISHABLE_KEY!;
|
||||
const STRIPE_SECRET_KEY = process.env.STRIPE_SECRET_KEY!;
|
||||
const STRIPE_WEBHOOK_SECRET = process.env.STRIPE_WEBHOOK_SECRET!;
|
||||
const TELEMETRY_ENABLED = process.env.TELEMETRY_ENABLED! !== 'false' && true;
|
||||
|
||||
export {
|
||||
PORT,
|
||||
EMAIL_TOKEN_LIFETIME,
|
||||
ENCRYPTION_KEY,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_LIFETIME,
|
||||
JWT_REFRESH_SECRET,
|
||||
JWT_SERVICE_SECRET,
|
||||
JWT_SIGNUP_LIFETIME,
|
||||
JWT_SIGNUP_SECRET,
|
||||
MONGO_URL,
|
||||
NODE_ENV,
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_VERCEL,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_SECRET_HEROKU,
|
||||
CLIENT_SECRET_VERCEL,
|
||||
CLIENT_SECRET_NETLIFY,
|
||||
CLIENT_SLUG_VERCEL,
|
||||
POSTHOG_HOST,
|
||||
POSTHOG_PROJECT_API_KEY,
|
||||
PRIVATE_KEY,
|
||||
PUBLIC_KEY,
|
||||
SENTRY_DSN,
|
||||
SITE_URL,
|
||||
SMTP_HOST,
|
||||
SMTP_PORT,
|
||||
SMTP_NAME,
|
||||
SMTP_USERNAME,
|
||||
SMTP_PASSWORD,
|
||||
STRIPE_PRODUCT_CARD_AUTH,
|
||||
STRIPE_PRODUCT_PRO,
|
||||
STRIPE_PRODUCT_STARTER,
|
||||
STRIPE_PUBLISHABLE_KEY,
|
||||
STRIPE_SECRET_KEY,
|
||||
STRIPE_WEBHOOK_SECRET,
|
||||
TELEMETRY_ENABLED
|
||||
};
|
222
backend/src/controllers/authController.ts
Normal file
222
backend/src/controllers/authController.ts
Normal file
@ -0,0 +1,222 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
import { Request, Response } from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
const jsrp = require('jsrp');
|
||||
import { User } from '../models';
|
||||
import { createToken, issueTokens, clearTokens } from '../helpers/auth';
|
||||
import {
|
||||
NODE_ENV,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_SECRET
|
||||
} from '../config';
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
}
|
||||
|
||||
const clientPublicKeys: any = {};
|
||||
|
||||
/**
|
||||
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login1 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
clientPublicKey
|
||||
}: { email: string; clientPublicKey: string } = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
() => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
clientPublicKeys[email] = {
|
||||
clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt)
|
||||
};
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
salt: user.salt
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to start authentication process'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log in user step 2: complete step 2 of SRP protocol and return token and their (encrypted)
|
||||
* private key
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login2 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, clientProof } = req.body;
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +publicKey +encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: clientPublicKeys[email].serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(clientPublicKeys[email].clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
// return (access) token in response
|
||||
return res.status(200).send({
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log out user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const logout = async (req: Request, res: Response) => {
|
||||
try {
|
||||
await clearTokens({
|
||||
userId: req.user._id.toString()
|
||||
});
|
||||
|
||||
// clear httpOnly cookie
|
||||
res.cookie('jid', '', {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to logout'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully logged out.'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return user is authenticated
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const checkAuth = async (req: Request, res: Response) =>
|
||||
res.status(200).send({
|
||||
message: 'Authenticated'
|
||||
});
|
||||
|
||||
/**
|
||||
* Return new token by redeeming refresh token
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getNewToken = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const refreshToken = req.cookies.jid;
|
||||
|
||||
if (!refreshToken) {
|
||||
throw new Error('Failed to find token in request cookies');
|
||||
}
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(refreshToken, JWT_REFRESH_SECRET)
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
_id: decodedToken.userId
|
||||
}).select('+publicKey');
|
||||
|
||||
if (!user) throw new Error('Failed to authenticate unfound user');
|
||||
if (!user?.publicKey)
|
||||
throw new Error('Failed to authenticate not fully set up account');
|
||||
|
||||
const token = createToken({
|
||||
payload: {
|
||||
userId: decodedToken.userId
|
||||
},
|
||||
expiresIn: JWT_AUTH_LIFETIME,
|
||||
secret: JWT_AUTH_SECRET
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
token
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Invalid request'
|
||||
});
|
||||
}
|
||||
};
|
107
backend/src/controllers/botController.ts
Normal file
107
backend/src/controllers/botController.ts
Normal file
@ -0,0 +1,107 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Bot, BotKey } from '../models';
|
||||
import { createBot } from '../helpers/bot';
|
||||
|
||||
interface BotKey {
|
||||
encryptedKey: string;
|
||||
nonce: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bot for workspace with id [workspaceId]. If a workspace bot doesn't exist,
|
||||
* then create and return a new bot.
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getBotByWorkspaceId = async (req: Request, res: Response) => {
|
||||
let bot;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
bot = await Bot.findOne({
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!bot) {
|
||||
// case: bot doesn't exist for workspace with id [workspaceId]
|
||||
// -> create a new bot and return it
|
||||
bot = await createBot({
|
||||
name: 'Infisical Bot',
|
||||
workspaceId
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get bot for workspace'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
bot
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return bot with id [req.bot._id] with active state set to [isActive].
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const setBotActiveState = async (req: Request, res: Response) => {
|
||||
let bot;
|
||||
try {
|
||||
const { isActive, botKey }: { isActive: boolean, botKey: BotKey } = req.body;
|
||||
|
||||
if (isActive) {
|
||||
// bot state set to active -> share workspace key with bot
|
||||
if (!botKey?.encryptedKey || !botKey?.nonce) {
|
||||
return res.status(400).send({
|
||||
message: 'Failed to set bot state to active - missing bot key'
|
||||
});
|
||||
}
|
||||
|
||||
await BotKey.findOneAndUpdate({
|
||||
workspace: req.bot.workspace
|
||||
}, {
|
||||
encryptedKey: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
sender: req.user._id,
|
||||
bot: req.bot._id,
|
||||
workspace: req.bot.workspace
|
||||
}, {
|
||||
upsert: true,
|
||||
new: true
|
||||
});
|
||||
} else {
|
||||
// case: bot state set to inactive -> delete bot's workspace key
|
||||
await BotKey.deleteOne({
|
||||
bot: req.bot._id
|
||||
});
|
||||
}
|
||||
|
||||
bot = await Bot.findOneAndUpdate({
|
||||
_id: req.bot._id
|
||||
}, {
|
||||
isActive
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
|
||||
if (!bot) throw new Error('Failed to update bot active state');
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update bot active state'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
bot
|
||||
});
|
||||
};
|
35
backend/src/controllers/index.ts
Normal file
35
backend/src/controllers/index.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import * as authController from './authController';
|
||||
import * as botController from './botController';
|
||||
import * as integrationAuthController from './integrationAuthController';
|
||||
import * as integrationController from './integrationController';
|
||||
import * as keyController from './keyController';
|
||||
import * as membershipController from './membershipController';
|
||||
import * as membershipOrgController from './membershipOrgController';
|
||||
import * as organizationController from './organizationController';
|
||||
import * as passwordController from './passwordController';
|
||||
import * as secretController from './secretController';
|
||||
import * as serviceTokenController from './serviceTokenController';
|
||||
import * as signupController from './signupController';
|
||||
import * as stripeController from './stripeController';
|
||||
import * as userActionController from './userActionController';
|
||||
import * as userController from './userController';
|
||||
import * as workspaceController from './workspaceController';
|
||||
|
||||
export {
|
||||
authController,
|
||||
botController,
|
||||
integrationAuthController,
|
||||
integrationController,
|
||||
keyController,
|
||||
membershipController,
|
||||
membershipOrgController,
|
||||
organizationController,
|
||||
passwordController,
|
||||
secretController,
|
||||
serviceTokenController,
|
||||
signupController,
|
||||
stripeController,
|
||||
userActionController,
|
||||
userController,
|
||||
workspaceController
|
||||
};
|
104
backend/src/controllers/integrationAuthController.ts
Normal file
104
backend/src/controllers/integrationAuthController.ts
Normal file
@ -0,0 +1,104 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import axios from 'axios';
|
||||
import { readFileSync } from 'fs';
|
||||
import { IntegrationAuth, Integration } from '../models';
|
||||
import { INTEGRATION_SET, INTEGRATION_OPTIONS, ENV_DEV } from '../variables';
|
||||
import { IntegrationService } from '../services';
|
||||
import { getApps, revokeAccess } from '../integrations';
|
||||
|
||||
export const getIntegrationOptions = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange as part of integration [integration] for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const oAuthExchange = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const { workspaceId, code, integration } = req.body;
|
||||
|
||||
if (!INTEGRATION_SET.has(integration))
|
||||
throw new Error('Failed to validate integration');
|
||||
|
||||
await IntegrationService.handleOAuthExchange({
|
||||
workspaceId,
|
||||
integration,
|
||||
code
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get OAuth2 code-token exchange'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully enabled integration authorization'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of applications allowed for integration with integration authorization id [integrationAuthId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
let apps;
|
||||
try {
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integration authorization applications'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apps
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete integration authorization with id [integrationAuthId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { integrationAuthId } = req.params;
|
||||
|
||||
await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration authorization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully deleted integration authorization'
|
||||
});
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user