mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-24 00:15:26 +00:00
Compare commits
195 Commits
add-cert-t
...
ldap
Author | SHA1 | Date | |
---|---|---|---|
5c8b886d7b | |||
51a5bf8181 | |||
822d0692db | |||
e527d99654 | |||
628c641580 | |||
40ccab6576 | |||
9cc3e58561 | |||
1f3fded404 | |||
74b5e8cbeb | |||
522a03c2ad | |||
56bf82e4f6 | |||
163ccd6cdb | |||
06f3a6d262 | |||
b641bbf229 | |||
feb7563eab | |||
7594929042 | |||
f1b7653a52 | |||
0cb6d052e0 | |||
ceb135fc94 | |||
b75289f074 | |||
de86705e64 | |||
6ca56143d9 | |||
ef0e652557 | |||
89e109e404 | |||
48062d9680 | |||
d11fda3be5 | |||
0df5f845fb | |||
ca59488b62 | |||
3a05ae4b27 | |||
dd009182e8 | |||
8ac7a29893 | |||
8a17cd3f5d | |||
99fe43f459 | |||
2e3b10ccfc | |||
79196b0081 | |||
b76ff28414 | |||
2894cf791a | |||
c040b0ca9a | |||
15f60aa7dd | |||
6f68d304ea | |||
0b98feea50 | |||
43d40d7475 | |||
309a106f13 | |||
74d73590a1 | |||
b42b5614c9 | |||
72b89cb989 | |||
36d8b22598 | |||
201dcd971c | |||
ab90745312 | |||
622106045e | |||
e64302b789 | |||
901a7fc294 | |||
359694dd47 | |||
57489a7578 | |||
a4205a8662 | |||
dbf177d667 | |||
f078aec54c | |||
5dfe62e306 | |||
b89925c61c | |||
440a58a49b | |||
6d0bea6d5f | |||
10a40c8ab2 | |||
b910ceacfc | |||
cb66386e13 | |||
889df3dcb1 | |||
ae53f03f71 | |||
7ae024724d | |||
0b2bc1d345 | |||
da5eca3e68 | |||
3375d3ff85 | |||
35a5c9a67f | |||
7d495cfea5 | |||
2eca9d8200 | |||
4d707eee8a | |||
76bd85efa7 | |||
d140e4f3c9 | |||
80623c03f4 | |||
ed6c6e8d1e | |||
7e044ad9ff | |||
8f2b54514c | |||
5f5f46eddf | |||
3174896d37 | |||
919e184305 | |||
c7d08745fc | |||
d6d780a7b4 | |||
03e965ec5a | |||
cd0df2d617 | |||
e72e6dd6ee | |||
327c5e2429 | |||
f29dd6effa | |||
7987a1ea2b | |||
e6036175c1 | |||
171a70ddc1 | |||
a845f4ee5c | |||
71cd4425b4 | |||
deb22bf8ad | |||
8e25631fb0 | |||
0912903e0d | |||
1b1a95ab78 | |||
cf4f26ab90 | |||
84249f535b | |||
c7bbe82f4a | |||
d8d2741868 | |||
f45074a2dd | |||
564b6b8ef6 | |||
fafd963a8a | |||
9e38076d45 | |||
d3a6da187b | |||
7a90fa472d | |||
756c1e5098 | |||
0dd34eae60 | |||
846e2f21cc | |||
d8860e1ce3 | |||
68296c1b99 | |||
2192985291 | |||
16acace648 | |||
e3e4a98cd6 | |||
4afb20ad0d | |||
60134cf8ac | |||
22d5f97793 | |||
3fa529dcb0 | |||
d12c4b7580 | |||
5feb942d79 | |||
b6f3cf512e | |||
4dbee7df06 | |||
323c412f5e | |||
ae2706542c | |||
d5861493bf | |||
53044f3d39 | |||
93268f5767 | |||
318dedb987 | |||
291edf71aa | |||
342665783e | |||
6a7241d7d1 | |||
51fb680f9c | |||
0710c9a84a | |||
e46bce1520 | |||
3919393d33 | |||
c8b7c37aee | |||
2641fccce5 | |||
213f2ed29b | |||
4dcd000dd1 | |||
c2fe6eb90c | |||
f64cb10282 | |||
a0ea2627ed | |||
5c40b538af | |||
8dd94a4e10 | |||
041c4a20a0 | |||
4a2a5f42a8 | |||
9fcdf17a04 | |||
97ac8cb45a | |||
e952659415 | |||
1f3f061a06 | |||
5096ce3bdc | |||
621683f787 | |||
f63850e9e9 | |||
4ee0a2ec6c | |||
9569d3971a | |||
443b8f747b | |||
803393c385 | |||
8e95189fd2 | |||
c5f38b6ade | |||
30a1c5ac86 | |||
bbad2ba047 | |||
1445df7015 | |||
ae4a2089d5 | |||
0b924b6e45 | |||
1fcac4cadf | |||
155e315347 | |||
3dce03180f | |||
4748b546c2 | |||
fb8c4bd415 | |||
48bf41ac8c | |||
1ad916a784 | |||
c91456838e | |||
79efe64504 | |||
3641875b24 | |||
a04a9a1bd3 | |||
04d729df92 | |||
5ca1b1d77e | |||
2d9526ad8d | |||
768cc64af6 | |||
a28431bfe7 | |||
91068229bf | |||
9ba4b939a4 | |||
1c088b3a58 | |||
a33c50b75a | |||
8c31566e17 | |||
bfee74ff4e | |||
97a7b66c6c | |||
639c78358f | |||
5053069bfc | |||
b1d049c677 | |||
9012012503 | |||
a8678c14e8 |
.env.example
.github
.goreleaser.yamlMakefilebackend
package-lock.jsonpackage.json
src
@types
db
ee
routes/v1
services
audit-log
ldap-config
license
permission
saml-config
scim
secret-approval-request
secret-scanning/secret-scanning-queue
server
lib
plugins
routes
services
auth
identity-ua
integration-auth
integration-app-list.tsintegration-auth-service.tsintegration-auth-types.tsintegration-list.tsintegration-sync-secret.ts
org
project-bot
project-membership
project
secret-folder
secret
smtp/templates
super-admin
telemetry
user-alias
user
cli
agent-config.yaml
docker-compose.dev.ymlpackages
docs
changelog
cli/commands
documentation/platform
images
infisical-agent
integrations/cloud
internals
mint.jsonself-hosting
frontend
package-lock.jsonpackage.json
package-lock.jsonpackage.jsonsrc
components
basic/table
v2
Checkbox
Menu
RadioGroup
UpgradeProjectAlert
context/OrgPermissionContext
hooks/api
layouts
pages
views
IntegrationsPage/components/IntegrationsSection
Login
Org/MembersPage/components
OrgMembersTab/components/OrgMembersSection
OrgRoleTabSection/OrgRoleModifySection
Project/MembersPage/components/MemberListTab
SecretMainPage
SecretMainPage.tsx
components
SecretOverviewPage
Settings
OrgSettingsPage/components/OrgAuthTab
PersonalSettingsPage
ChangePasswordSection
PersonalAuthTab
SecuritySection
ProjectSettingsPage/components/ProjectNameChangeSection
Signup
@ -4,7 +4,7 @@
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
|
||||
# Required
|
||||
DB_CONNECTION_URI=postgres://infisical:infisical@db:5432/infisical
|
||||
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
|
190
.github/resources/changelog-generator.py
vendored
Normal file
190
.github/resources/changelog-generator.py
vendored
Normal file
@ -0,0 +1,190 @@
|
||||
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
|
||||
import os
|
||||
import requests
|
||||
import re
|
||||
from openai import OpenAI
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
import uuid
|
||||
|
||||
# Constants
|
||||
REPO_OWNER = "infisical"
|
||||
REPO_NAME = "infisical"
|
||||
TOKEN = os.environ["GITHUB_TOKEN"]
|
||||
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
|
||||
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
|
||||
SLACK_MSG_COLOR = "#36a64f"
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {TOKEN}",
|
||||
"Accept": "application/vnd.github+json",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
|
||||
|
||||
def set_multiline_output(name, value):
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
delimiter = uuid.uuid1()
|
||||
print(f'{name}<<{delimiter}', file=fh)
|
||||
print(value, file=fh)
|
||||
print(delimiter, file=fh)
|
||||
|
||||
def post_changelog_to_slack(changelog, tag):
|
||||
slack_payload = {
|
||||
"text": "Hey team, it's changelog time! :wave:",
|
||||
"attachments": [
|
||||
{
|
||||
"color": SLACK_MSG_COLOR,
|
||||
"title": f"🗓️Infisical Changelog - {tag}",
|
||||
"text": changelog,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Failed to post changelog to Slack.")
|
||||
|
||||
def find_previous_release_tag(release_tag:str):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
|
||||
while not(previous_tag.startswith("infisical/")):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
|
||||
return previous_tag
|
||||
|
||||
def get_tag_creation_date(tag_name):
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
commit_sha = response.json()['object']['sha']
|
||||
|
||||
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
|
||||
commit_response = requests.get(commit_url, headers=headers)
|
||||
commit_response.raise_for_status()
|
||||
creation_date = commit_response.json()['commit']['author']['date']
|
||||
|
||||
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
|
||||
|
||||
|
||||
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
|
||||
# Use GitHub API to fetch PRs merged between the commits
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Error fetching PRs from GitHub API!")
|
||||
|
||||
prs = []
|
||||
for pr in response.json():
|
||||
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
|
||||
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
|
||||
prs.append(pr)
|
||||
|
||||
return prs
|
||||
|
||||
|
||||
def extract_commit_details_from_prs(prs):
|
||||
commit_details = []
|
||||
for pr in prs:
|
||||
commit_message = pr["title"]
|
||||
commit_url = pr["html_url"]
|
||||
pr_number = pr["number"]
|
||||
branch_name = pr["head"]["ref"]
|
||||
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
|
||||
|
||||
# If no issue numbers are found, add the PR details without issue numbers and URLs
|
||||
if not issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": None,
|
||||
"issue_url": None,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
for issue in issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": issue,
|
||||
}
|
||||
)
|
||||
|
||||
return commit_details
|
||||
|
||||
# Function to generate changelog using OpenAI
|
||||
def generate_changelog_with_openai(commit_details):
|
||||
commit_messages = []
|
||||
for details in commit_details:
|
||||
base_message = f"{details['pr_url']} - {details['message']}"
|
||||
# Add the issue URL if available
|
||||
# if details["issue_url"]:
|
||||
# base_message += f" (Linear Issue: {details['issue_url']})"
|
||||
commit_messages.append(base_message)
|
||||
|
||||
commit_list = "\n".join(commit_messages)
|
||||
prompt = """
|
||||
Generate a changelog for Infisical, opensource secretops
|
||||
The changelog should:
|
||||
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
|
||||
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
|
||||
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
|
||||
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
|
||||
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
|
||||
6. Linear Links: note that the Linear link is optional, include it only if provided.
|
||||
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
|
||||
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
|
||||
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
|
||||
And here's an example of the full changelog:
|
||||
|
||||
*Features*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Fixes & Improvements*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Technical Updates*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
|
||||
Stay tuned for more exciting updates coming soon!
|
||||
And here are the commits:
|
||||
{}
|
||||
""".format(
|
||||
commit_list
|
||||
)
|
||||
|
||||
client = OpenAI(api_key=OPENAI_API_KEY)
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
|
||||
|
||||
if "error" in response.choices[0].message:
|
||||
raise Exception("Error generating changelog with OpenAI!")
|
||||
|
||||
return response.choices[0].message.content.strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
# Get the latest and previous release tags
|
||||
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
|
||||
previous_tag = find_previous_release_tag(latest_tag)
|
||||
|
||||
latest_tag_date = get_tag_creation_date(latest_tag)
|
||||
previous_tag_date = get_tag_creation_date(previous_tag)
|
||||
|
||||
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
|
||||
pr_details = extract_commit_details_from_prs(prs)
|
||||
|
||||
# Generate changelog
|
||||
changelog = generate_changelog_with_openai(pr_details)
|
||||
|
||||
post_changelog_to_slack(changelog,latest_tag)
|
||||
# Print or post changelog to Slack
|
||||
# set_multiline_output("changelog", changelog)
|
||||
|
||||
except Exception as e:
|
||||
print(str(e))
|
2
.github/values.yaml
vendored
2
.github/values.yaml
vendored
@ -27,7 +27,7 @@ infisical:
|
||||
deploymentAnnotations:
|
||||
secrets.infisical.com/auto-reload: "true"
|
||||
|
||||
kubeSecretRef: "infisical-gamma-secrets"
|
||||
kubeSecretRef: "managed-secret"
|
||||
|
||||
ingress:
|
||||
## @param ingress.enabled Enable ingress
|
||||
|
34
.github/workflows/generate-release-changelog.yml
vendored
Normal file
34
.github/workflows/generate-release-changelog.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: Generate Changelog
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
generate_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-tags: true
|
||||
fetch-depth: 0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12.0"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests openai
|
||||
- name: Generate Changelog and Post to Slack
|
||||
id: gen-changelog
|
||||
run: python .github/resources/changelog-generator.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
@ -23,6 +23,8 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
|
@ -190,10 +190,34 @@ dockers:
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Version }}"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}"
|
||||
- "infisical/cli:{{ .Major }}"
|
||||
- "infisical/cli:latest"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:latest-amd64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/amd64"
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/arm64"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- name_template: "infisical/cli:latest"
|
||||
image_templates:
|
||||
- "infisical/cli:latest-amd64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
|
3
Makefile
3
Makefile
@ -7,6 +7,9 @@ push:
|
||||
up-dev:
|
||||
docker compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
|
190
backend/package-lock.json
generated
190
backend/package-lock.json
generated
@ -47,16 +47,16 @@
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mysql2": "^3.9.1",
|
||||
"nanoid": "^5.0.4",
|
||||
"node-cache": "^5.1.2",
|
||||
"nodemailer": "^6.9.9",
|
||||
"ora": "^7.0.1",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"posthog-node": "^3.6.0",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"smee-client": "^2.0.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
@ -3974,6 +3974,14 @@
|
||||
"integrity": "sha512-2h3tFvkbHksiNcDiUdcJ08gXWG10fnahp30GJ2Tbt4vd4pfsbfkoKTaTbYykFoppaJ6DL3914nQ3PU1vVIlBRQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/ldapjs": {
|
||||
"version": "2.2.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/ldapjs/-/ldapjs-2.2.5.tgz",
|
||||
"integrity": "sha512-Lv/nD6QDCmcT+V1vaTRnEKE8UgOilVv5pHcQuzkU1LcRe4mbHHuUo/KHi0LKrpdHhQY8FJzryF38fcVdeUIrzg==",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/libsodium-wrappers": {
|
||||
"version": "0.7.13",
|
||||
"resolved": "https://registry.npmjs.org/@types/libsodium-wrappers/-/libsodium-wrappers-0.7.13.tgz",
|
||||
@ -5121,6 +5129,22 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/asn1": {
|
||||
"version": "0.2.6",
|
||||
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
|
||||
"integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
|
||||
"dependencies": {
|
||||
"safer-buffer": "~2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/assert-plus": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
|
||||
"integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/assertion-error": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
|
||||
@ -5287,6 +5311,17 @@
|
||||
"axios": "0.x || 1.x"
|
||||
}
|
||||
},
|
||||
"node_modules/backoff": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/backoff/-/backoff-2.5.0.tgz",
|
||||
"integrity": "sha512-wC5ihrnUXmR2douXmXLCe5O3zg3GKIyvRi/hi58a/XyRxVI+3/yM0PYueQOZXPXQ9pxBislYkw+sF9b7C/RuMA==",
|
||||
"dependencies": {
|
||||
"precond": "0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
@ -5332,6 +5367,11 @@
|
||||
"node": ">= 10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bcryptjs": {
|
||||
"version": "2.4.3",
|
||||
"resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz",
|
||||
"integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ=="
|
||||
},
|
||||
"node_modules/before-after-hook": {
|
||||
"version": "2.2.3",
|
||||
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
|
||||
@ -5706,14 +5746,6 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/clone": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz",
|
||||
"integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/cluster-key-slot": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
|
||||
@ -5824,6 +5856,11 @@
|
||||
"node": ">=6.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/core-util-is": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
||||
"integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ=="
|
||||
},
|
||||
"node_modules/create-hash": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
|
||||
@ -6894,6 +6931,14 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
|
||||
},
|
||||
"node_modules/extsprintf": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.4.1.tgz",
|
||||
"integrity": "sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA==",
|
||||
"engines": [
|
||||
"node >=0.6.0"
|
||||
]
|
||||
},
|
||||
"node_modules/fast-content-type-parse": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-1.1.0.tgz",
|
||||
@ -8632,6 +8677,57 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/ldap-filter": {
|
||||
"version": "0.3.3",
|
||||
"resolved": "https://registry.npmjs.org/ldap-filter/-/ldap-filter-0.3.3.tgz",
|
||||
"integrity": "sha512-/tFkx5WIn4HuO+6w9lsfxq4FN3O+fDZeO9Mek8dCD8rTUpqzRa766BOBO7BcGkn3X86m5+cBm1/2S/Shzz7gMg==",
|
||||
"dependencies": {
|
||||
"assert-plus": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapauth-fork": {
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/ldapauth-fork/-/ldapauth-fork-5.0.5.tgz",
|
||||
"integrity": "sha512-LWUk76+V4AOZbny/3HIPQtGPWZyA3SW2tRhsWIBi9imP22WJktKLHV1ofd8Jo/wY7Ve6vAT7FCI5mEn3blZTjw==",
|
||||
"dependencies": {
|
||||
"@types/ldapjs": "^2.2.2",
|
||||
"bcryptjs": "^2.4.0",
|
||||
"ldapjs": "^2.2.1",
|
||||
"lru-cache": "^7.10.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapauth-fork/node_modules/lru-cache": {
|
||||
"version": "7.18.3",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapjs": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/ldapjs/-/ldapjs-2.3.3.tgz",
|
||||
"integrity": "sha512-75QiiLJV/PQqtpH+HGls44dXweviFwQ6SiIK27EqzKQ5jU/7UFrl2E5nLdQ3IYRBzJ/AVFJI66u0MZ0uofKYwg==",
|
||||
"dependencies": {
|
||||
"abstract-logging": "^2.0.0",
|
||||
"asn1": "^0.2.4",
|
||||
"assert-plus": "^1.0.0",
|
||||
"backoff": "^2.5.0",
|
||||
"ldap-filter": "^0.3.3",
|
||||
"once": "^1.4.0",
|
||||
"vasync": "^2.2.0",
|
||||
"verror": "^1.8.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/leven": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",
|
||||
@ -9258,17 +9354,6 @@
|
||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
|
||||
"integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA=="
|
||||
},
|
||||
"node_modules/node-cache": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/node-cache/-/node-cache-5.1.2.tgz",
|
||||
"integrity": "sha512-t1QzWwnk4sjLWaQAS8CHgOJ+RAfmHpxFWmc36IWTiWHQfs0w5JDMBS1b1ZxQteo0vVVuWJvIUKHDkkeK7vIGCg==",
|
||||
"dependencies": {
|
||||
"clone": "2.x"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||
@ -9775,6 +9860,18 @@
|
||||
"node": ">= 0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/passport-ldapauth": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/passport-ldapauth/-/passport-ldapauth-3.0.1.tgz",
|
||||
"integrity": "sha512-TRRx3BHi8GC8MfCT9wmghjde/EGeKjll7zqHRRfGRxXbLcaDce2OftbQrFG7/AWaeFhR6zpZHtBQ/IkINdLVjQ==",
|
||||
"dependencies": {
|
||||
"ldapauth-fork": "^5.0.1",
|
||||
"passport-strategy": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/passport-oauth2": {
|
||||
"version": "1.7.0",
|
||||
"resolved": "https://registry.npmjs.org/passport-oauth2/-/passport-oauth2-1.7.0.tgz",
|
||||
@ -10318,9 +10415,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/posthog-node": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/posthog-node/-/posthog-node-3.6.0.tgz",
|
||||
"integrity": "sha512-N/4//SIQR4fhwbHnDdJ2rQCYdu9wo0EVPK4lVgZswp5R/E42RKlpuO6ZfPsBl+Bcg06OYiOd/WR/jLV90FCoSw==",
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/posthog-node/-/posthog-node-3.6.2.tgz",
|
||||
"integrity": "sha512-tVIaShR3SxBx17AlAUS86jQTweKuJIFRedBB504fCz7YPnXJTYSrVcUHn5IINE2wu4jUQimQK6ihQr90Djrdrg==",
|
||||
"dependencies": {
|
||||
"axios": "^1.6.2",
|
||||
"rusha": "^0.8.14"
|
||||
@ -10329,6 +10426,14 @@
|
||||
"node": ">=15.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/precond": {
|
||||
"version": "0.2.3",
|
||||
"resolved": "https://registry.npmjs.org/precond/-/precond-0.2.3.tgz",
|
||||
"integrity": "sha512-QCYG84SgGyGzqJ/vlMsxeXd/pgL/I94ixdNFyh1PusWmTCyVfPJjZ1K1jvHtsbfnXQs2TSkEP2fR7QiMZAnKFQ==",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/prelude-ls": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
||||
@ -12779,6 +12884,43 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/vasync": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/vasync/-/vasync-2.2.1.tgz",
|
||||
"integrity": "sha512-Hq72JaTpcTFdWiNA4Y22Amej2GH3BFmBaKPPlDZ4/oC8HNn2ISHLkFrJU4Ds8R3jcUi7oo5Y9jcMHKjES+N9wQ==",
|
||||
"engines": [
|
||||
"node >=0.6.0"
|
||||
],
|
||||
"dependencies": {
|
||||
"verror": "1.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/vasync/node_modules/verror": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
|
||||
"integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==",
|
||||
"engines": [
|
||||
"node >=0.6.0"
|
||||
],
|
||||
"dependencies": {
|
||||
"assert-plus": "^1.0.0",
|
||||
"core-util-is": "1.0.2",
|
||||
"extsprintf": "^1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/verror": {
|
||||
"version": "1.10.1",
|
||||
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.1.tgz",
|
||||
"integrity": "sha512-veufcmxri4e3XSrT0xwfUR7kguIkaxBeosDg00yDWhk49wdwkSUrvvsm7nc75e1PUyvIeZj6nS8VQRYz2/S4Xg==",
|
||||
"dependencies": {
|
||||
"assert-plus": "^1.0.0",
|
||||
"core-util-is": "1.0.2",
|
||||
"extsprintf": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.0.12",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.0.12.tgz",
|
||||
|
@ -108,16 +108,16 @@
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mysql2": "^3.9.1",
|
||||
"nanoid": "^5.0.4",
|
||||
"node-cache": "^5.1.2",
|
||||
"nodemailer": "^6.9.9",
|
||||
"ora": "^7.0.1",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"posthog-node": "^3.6.0",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"smee-client": "^2.0.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
|
3
backend/src/@types/fastify.d.ts
vendored
3
backend/src/@types/fastify.d.ts
vendored
@ -3,6 +3,7 @@ import "fastify";
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
@ -69,6 +70,7 @@ declare module "fastify" {
|
||||
};
|
||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
@ -107,6 +109,7 @@ declare module "fastify" {
|
||||
snapshot: TSecretSnapshotServiceFactory;
|
||||
saml: TSamlConfigServiceFactory;
|
||||
scim: TScimServiceFactory;
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
|
8
backend/src/@types/knex.d.ts
vendored
8
backend/src/@types/knex.d.ts
vendored
@ -50,6 +50,9 @@ import {
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
@ -161,6 +164,9 @@ import {
|
||||
TUserActions,
|
||||
TUserActionsInsert,
|
||||
TUserActionsUpdate,
|
||||
TUserAliases,
|
||||
TUserAliasesInsert,
|
||||
TUserAliasesUpdate,
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate,
|
||||
@ -175,6 +181,7 @@ import {
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
@ -318,6 +325,7 @@ declare module "knex/types/tables" {
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
|
||||
|
@ -1,3 +1,5 @@
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-nocheck
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
@ -8,10 +10,12 @@ export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.uuid("instanceId").notNullable().defaultTo(knex.fn.uuid());
|
||||
});
|
||||
// this is updated to avoid race condition on replication
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore
|
||||
await knex(TableName.SuperAdmin).update({ id: ADMIN_CONFIG_UUID }).whereNotNull("id").limit(1);
|
||||
|
||||
const superUserConfigExists = await knex(TableName.SuperAdmin).where("id", ADMIN_CONFIG_UUID).first();
|
||||
if (!superUserConfigExists) {
|
||||
// eslint-disable-next-line
|
||||
await knex(TableName.SuperAdmin).update({ id: ADMIN_CONFIG_UUID }).whereNotNull("id").limit(1);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
|
@ -0,0 +1,15 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
t.datetime("lastUsed");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
t.dropColumn("lastUsed");
|
||||
});
|
||||
}
|
68
backend/src/db/migrations/20240311210135_ldap-config.ts
Normal file
68
backend/src/db/migrations/20240311210135_ldap-config.ts
Normal file
@ -0,0 +1,68 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.LdapConfig))) {
|
||||
await knex.schema.createTable(TableName.LdapConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("orgId").notNullable().unique();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.boolean("isActive").notNullable();
|
||||
t.string("url").notNullable();
|
||||
t.string("encryptedBindDN").notNullable();
|
||||
t.string("bindDNIV").notNullable();
|
||||
t.string("bindDNTag").notNullable();
|
||||
t.string("encryptedBindPass").notNullable();
|
||||
t.string("bindPassIV").notNullable();
|
||||
t.string("bindPassTag").notNullable();
|
||||
t.string("searchBase").notNullable();
|
||||
t.text("encryptedCACert").notNullable();
|
||||
t.string("caCertIV").notNullable();
|
||||
t.string("caCertTag").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.LdapConfig);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.UserAliases))) {
|
||||
await knex.schema.createTable(TableName.UserAliases, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("userId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.string("username").notNullable();
|
||||
t.string("aliasType").notNullable();
|
||||
t.string("externalId").notNullable();
|
||||
t.specificType("emails", "text[]");
|
||||
t.uuid("orgId").nullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.UserAliases);
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.string("username").unique();
|
||||
t.string("email").nullable().alter();
|
||||
t.dropUnique(["email"]);
|
||||
});
|
||||
|
||||
await knex(TableName.Users).update("username", knex.ref("email"));
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.string("username").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.LdapConfig);
|
||||
await knex.schema.dropTableIfExists(TableName.UserAliases);
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.dropColumn("username");
|
||||
// t.string("email").notNullable().alter();
|
||||
});
|
||||
await dropOnUpdateTrigger(knex, TableName.LdapConfig);
|
||||
}
|
@ -14,6 +14,7 @@ export * from "./identity-universal-auths";
|
||||
export * from "./incident-contacts";
|
||||
export * from "./integration-auths";
|
||||
export * from "./integrations";
|
||||
export * from "./ldap-configs";
|
||||
export * from "./models";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-memberships";
|
||||
@ -52,6 +53,7 @@ export * from "./service-tokens";
|
||||
export * from "./super-admin";
|
||||
export * from "./trusted-ips";
|
||||
export * from "./user-actions";
|
||||
export * from "./user-aliases";
|
||||
export * from "./user-encryption-keys";
|
||||
export * from "./users";
|
||||
export * from "./webhooks";
|
||||
|
@ -27,7 +27,8 @@ export const IntegrationsSchema = z.object({
|
||||
envId: z.string().uuid(),
|
||||
secretPath: z.string().default("/"),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIntegrations = z.infer<typeof IntegrationsSchema>;
|
||||
|
31
backend/src/db/schemas/ldap-configs.ts
Normal file
31
backend/src/db/schemas/ldap-configs.ts
Normal file
@ -0,0 +1,31 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const LdapConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
isActive: z.boolean(),
|
||||
url: z.string(),
|
||||
encryptedBindDN: z.string(),
|
||||
bindDNIV: z.string(),
|
||||
bindDNTag: z.string(),
|
||||
encryptedBindPass: z.string(),
|
||||
bindPassIV: z.string(),
|
||||
bindPassTag: z.string(),
|
||||
searchBase: z.string(),
|
||||
encryptedCACert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
export type TLdapConfigsInsert = Omit<z.input<typeof LdapConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TLdapConfigsUpdate = Partial<Omit<z.input<typeof LdapConfigsSchema>, TImmutableDBKeys>>;
|
@ -2,6 +2,7 @@ import { z } from "zod";
|
||||
|
||||
export enum TableName {
|
||||
Users = "users",
|
||||
UserAliases = "user_aliases",
|
||||
UserEncryptionKey = "user_encryption_keys",
|
||||
AuthTokens = "auth_tokens",
|
||||
AuthTokenSession = "auth_token_sessions",
|
||||
@ -50,6 +51,7 @@ export enum TableName {
|
||||
SecretRotation = "secret_rotations",
|
||||
SecretRotationOutput = "secret_rotation_outputs",
|
||||
SamlConfig = "saml_configs",
|
||||
LdapConfig = "ldap_configs",
|
||||
AuditLog = "audit_logs",
|
||||
GitAppInstallSession = "git_app_install_sessions",
|
||||
GitAppOrg = "git_app_org",
|
||||
|
24
backend/src/db/schemas/user-aliases.ts
Normal file
24
backend/src/db/schemas/user-aliases.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const UserAliasesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
userId: z.string().uuid(),
|
||||
username: z.string(),
|
||||
aliasType: z.string(),
|
||||
externalId: z.string(),
|
||||
emails: z.string().array().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TUserAliases = z.infer<typeof UserAliasesSchema>;
|
||||
export type TUserAliasesInsert = Omit<z.input<typeof UserAliasesSchema>, TImmutableDBKeys>;
|
||||
export type TUserAliasesUpdate = Partial<Omit<z.input<typeof UserAliasesSchema>, TImmutableDBKeys>>;
|
@ -9,7 +9,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const UsersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
email: z.string(),
|
||||
email: z.string().nullable().optional(),
|
||||
authMethods: z.string().array().nullable().optional(),
|
||||
superAdmin: z.boolean().default(false).nullable().optional(),
|
||||
firstName: z.string().nullable().optional(),
|
||||
@ -20,7 +20,8 @@ export const UsersSchema = z.object({
|
||||
devices: z.unknown().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isGhost: z.boolean().default(false)
|
||||
isGhost: z.boolean().default(false),
|
||||
username: z.string()
|
||||
});
|
||||
|
||||
export type TUsers = z.infer<typeof UsersSchema>;
|
||||
|
@ -21,6 +21,7 @@ export let userPublicKey: string | undefined;
|
||||
|
||||
export const seedData1 = {
|
||||
id: "3dafd81d-4388-432b-a4c5-f735616868c1",
|
||||
username: process.env.TEST_USER_USERNAME || "test@localhost.local",
|
||||
email: process.env.TEST_USER_EMAIL || "test@localhost.local",
|
||||
password: process.env.TEST_USER_PASSWORD || "testInfisical@1",
|
||||
organization: {
|
||||
|
@ -9,7 +9,12 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
await knex(TableName.Users).del();
|
||||
await knex(TableName.UserEncryptionKey).del();
|
||||
await knex(TableName.SuperAdmin).del();
|
||||
await knex(TableName.SuperAdmin).insert([{ initialized: true, allowSignUp: true }]);
|
||||
|
||||
await knex(TableName.SuperAdmin).insert([
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore
|
||||
{ id: "00000000-0000-0000-0000-000000000000", initialized: true, allowSignUp: true }
|
||||
]);
|
||||
// Inserts seed entries
|
||||
const [user] = await knex(TableName.Users)
|
||||
.insert([
|
||||
@ -17,6 +22,7 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore
|
||||
id: seedData1.id,
|
||||
username: seedData1.username,
|
||||
email: seedData1.email,
|
||||
superAdmin: true,
|
||||
firstName: "test",
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { registerLdapRouter } from "./ldap-router";
|
||||
import { registerLicenseRouter } from "./license-router";
|
||||
import { registerOrgRoleRouter } from "./org-role-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
@ -35,6 +36,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
await server.register(registerSamlRouter, { prefix: "/sso" });
|
||||
await server.register(registerScimRouter, { prefix: "/scim" });
|
||||
await server.register(registerLdapRouter, { prefix: "/ldap" });
|
||||
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
|
||||
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
|
||||
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
|
||||
|
192
backend/src/ee/routes/v1/ldap-router.ts
Normal file
192
backend/src/ee/routes/v1/ldap-router.ts
Normal file
@ -0,0 +1,192 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
// All the any rules are disabled because passport typesense with fastify is really poor
|
||||
|
||||
import { IncomingMessage } from "node:http";
|
||||
|
||||
import { Authenticator } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import { FastifyRequest } from "fastify";
|
||||
import LdapStrategy from "passport-ldapauth";
|
||||
import { z } from "zod";
|
||||
|
||||
import { LdapConfigsSchema } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const passport = new Authenticator({ key: "ldap", userProperty: "passportUser" });
|
||||
await server.register(fastifySession, { secret: appCfg.COOKIE_SECRET_SIGN_KEY });
|
||||
await server.register(passport.initialize());
|
||||
await server.register(passport.secureSession());
|
||||
|
||||
const getLdapPassportOpts = (req: FastifyRequest, done: any) => {
|
||||
const { organizationSlug } = req.body as {
|
||||
organizationSlug: string;
|
||||
};
|
||||
|
||||
process.nextTick(async () => {
|
||||
try {
|
||||
const { opts, ldapConfig } = await server.services.ldap.bootLdap(organizationSlug);
|
||||
req.ldapConfig = ldapConfig;
|
||||
done(null, opts);
|
||||
} catch (err) {
|
||||
done(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
passport.use(
|
||||
new LdapStrategy(
|
||||
getLdapPassportOpts as any,
|
||||
// eslint-disable-next-line
|
||||
async (req: IncomingMessage, user, cb) => {
|
||||
try {
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
|
||||
externalId: user.uidNumber,
|
||||
username: user.uid,
|
||||
firstName: user.givenName,
|
||||
lastName: user.sn,
|
||||
emails: user.mail ? [user.mail] : [],
|
||||
relayState: ((req as unknown as FastifyRequest).body as { RelayState?: string }).RelayState,
|
||||
orgId: (req as unknown as FastifyRequest).ldapConfig.organization
|
||||
});
|
||||
|
||||
return cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
return cb(err, false);
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
server.route({
|
||||
url: "/login",
|
||||
method: "POST",
|
||||
schema: {
|
||||
body: z.object({
|
||||
organizationSlug: z.string().trim()
|
||||
})
|
||||
},
|
||||
preValidation: passport.authenticate("ldapauth", {
|
||||
session: false
|
||||
// failureFlash: true,
|
||||
// failureRedirect: "/login/provider/error"
|
||||
// this is due to zod type difference
|
||||
}) as any,
|
||||
handler: (req, res) => {
|
||||
let nextUrl;
|
||||
if (req.passportUser.isUserCompleted) {
|
||||
nextUrl = `${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`;
|
||||
} else {
|
||||
nextUrl = `${appCfg.SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`;
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
nextUrl
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/config",
|
||||
method: "GET",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
organizationId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string(),
|
||||
organization: z.string(),
|
||||
isActive: z.boolean(),
|
||||
url: z.string(),
|
||||
bindDN: z.string(),
|
||||
bindPass: z.string(),
|
||||
searchBase: z.string(),
|
||||
caCert: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ldap = await server.services.ldap.getLdapCfgWithPermissionCheck({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.query.organizationId,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
return ldap;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/config",
|
||||
method: "POST",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z.object({
|
||||
organizationId: z.string().trim(),
|
||||
isActive: z.boolean(),
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
searchBase: z.string().trim(),
|
||||
caCert: z.string().trim().default("")
|
||||
}),
|
||||
response: {
|
||||
200: LdapConfigsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ldap = await server.services.ldap.createLdapCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.body.organizationId,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
return ldap;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/config",
|
||||
method: "PATCH",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z.object({
|
||||
organizationId: z.string().trim(),
|
||||
isActive: z.boolean().optional(),
|
||||
url: z.string().trim().optional(),
|
||||
bindDN: z.string().trim().optional(),
|
||||
bindPass: z.string().trim().optional(),
|
||||
searchBase: z.string().trim().optional(),
|
||||
caCert: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: LdapConfigsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ldap = await server.services.ldap.updateLdapCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.body.organizationId,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
return ldap;
|
||||
}
|
||||
});
|
||||
};
|
@ -27,6 +27,7 @@ type TSAMLConfig = {
|
||||
cert: string;
|
||||
audience: string;
|
||||
wantAuthnResponseSigned?: boolean;
|
||||
wantAssertionsSigned?: boolean;
|
||||
disableRequestedAuthnContext?: boolean;
|
||||
};
|
||||
|
||||
@ -82,6 +83,10 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
samlConfig.audience = `spn:${ssoConfig.issuer}`;
|
||||
}
|
||||
}
|
||||
if (ssoConfig.authProvider === SamlProviders.GOOGLE_SAML) {
|
||||
samlConfig.wantAssertionsSigned = false;
|
||||
}
|
||||
|
||||
(req as unknown as FastifyRequest).ssoConfig = ssoConfig;
|
||||
done(null, samlConfig);
|
||||
} catch (error) {
|
||||
@ -94,14 +99,14 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
async (req, profile, cb) => {
|
||||
try {
|
||||
if (!profile) throw new BadRequestError({ message: "Missing profile" });
|
||||
const { firstName } = profile;
|
||||
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
|
||||
|
||||
if (!email || !firstName) {
|
||||
if (!profile.email || !profile.firstName) {
|
||||
throw new BadRequestError({ message: "Invalid request. Missing email or first name" });
|
||||
}
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
|
||||
username: profile.nameID ?? email,
|
||||
email,
|
||||
firstName: profile.firstName as string,
|
||||
lastName: profile.lastName as string,
|
||||
|
@ -122,7 +122,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
@ -168,7 +168,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
@ -198,13 +198,15 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
// emails: z.array( // optional?
|
||||
// z.object({
|
||||
// primary: z.boolean(),
|
||||
// value: z.string().email(),
|
||||
// type: z.string().trim()
|
||||
// })
|
||||
// ),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
// displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
@ -231,8 +233,11 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
email: req.body.userName,
|
||||
username: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
orgId: req.permission.orgId as string
|
||||
|
@ -24,7 +24,7 @@ export const auditLogQueueServiceFactory = ({
|
||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 5
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
@ -46,6 +46,7 @@ export const auditLogQueueServiceFactory = ({
|
||||
const ttl = plan.auditLogsRetentionDays * MS_IN_DAY;
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
if (ttl === 0) return;
|
||||
|
||||
await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
|
@ -92,7 +92,8 @@ export enum EventType {
|
||||
|
||||
interface UserActorMetadata {
|
||||
userId: string;
|
||||
email: string;
|
||||
email?: string | null;
|
||||
username: string;
|
||||
}
|
||||
|
||||
interface ServiceActorMetadata {
|
||||
|
11
backend/src/ee/services/ldap-config/ldap-config-dal.ts
Normal file
11
backend/src/ee/services/ldap-config/ldap-config-dal.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TLdapConfigDALFactory = ReturnType<typeof ldapConfigDALFactory>;
|
||||
|
||||
export const ldapConfigDALFactory = (db: TDbClient) => {
|
||||
const ldapCfgOrm = ormify(db, TableName.LdapConfig);
|
||||
|
||||
return { ...ldapCfgOrm };
|
||||
};
|
429
backend/src/ee/services/ldap-config/ldap-config-service.ts
Normal file
429
backend/src/ee/services/ldap-config/ldap-config-service.ts
Normal file
@ -0,0 +1,429 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TLdapConfigsUpdate } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TLdapConfigDALFactory } from "./ldap-config-dal";
|
||||
import { TCreateLdapCfgDTO, TLdapLoginDTO, TUpdateLdapCfgDTO } from "./ldap-config-types";
|
||||
|
||||
type TLdapConfigServiceFactoryDep = {
|
||||
ldapConfigDAL: TLdapConfigDALFactory;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById">;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
|
||||
|
||||
export const ldapConfigServiceFactory = ({
|
||||
ldapConfigDAL,
|
||||
orgDAL,
|
||||
orgBotDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TLdapConfigServiceFactoryDep) => {
|
||||
const createLdapCfg = async ({
|
||||
actor,
|
||||
actorId,
|
||||
orgId,
|
||||
actorOrgId,
|
||||
isActive,
|
||||
url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase,
|
||||
caCert
|
||||
}: TCreateLdapCfgDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.ldap)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create LDAP configuration due to plan restriction. Upgrade plan to create LDAP configuration."
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
|
||||
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.create({
|
||||
orgId,
|
||||
isActive,
|
||||
url,
|
||||
encryptedBindDN,
|
||||
bindDNIV,
|
||||
bindDNTag,
|
||||
encryptedBindPass,
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
searchBase,
|
||||
encryptedCACert,
|
||||
caCertIV,
|
||||
caCertTag
|
||||
});
|
||||
|
||||
return ldapConfig;
|
||||
};
|
||||
|
||||
const updateLdapCfg = async ({
|
||||
actor,
|
||||
actorId,
|
||||
orgId,
|
||||
actorOrgId,
|
||||
isActive,
|
||||
url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase,
|
||||
caCert
|
||||
}: TUpdateLdapCfgDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.ldap)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update LDAP configuration due to plan restriction. Upgrade plan to update LDAP configuration."
|
||||
});
|
||||
|
||||
const updateQuery: TLdapConfigsUpdate = {
|
||||
isActive,
|
||||
url,
|
||||
searchBase
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
||||
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
if (bindDN) {
|
||||
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
|
||||
updateQuery.encryptedBindDN = encryptedBindDN;
|
||||
updateQuery.bindDNIV = bindDNIV;
|
||||
updateQuery.bindDNTag = bindDNTag;
|
||||
}
|
||||
|
||||
if (bindPass) {
|
||||
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
|
||||
updateQuery.encryptedBindPass = encryptedBindPass;
|
||||
updateQuery.bindPassIV = bindPassIV;
|
||||
updateQuery.bindPassTag = bindPassTag;
|
||||
}
|
||||
|
||||
if (caCert) {
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
updateQuery.encryptedCACert = encryptedCACert;
|
||||
updateQuery.caCertIV = caCertIV;
|
||||
updateQuery.caCertTag = caCertTag;
|
||||
}
|
||||
|
||||
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
|
||||
|
||||
return ldapConfig;
|
||||
};
|
||||
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean }) => {
|
||||
const ldapConfig = await ldapConfigDAL.findOne(filter);
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: ldapConfig.orgId });
|
||||
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const {
|
||||
encryptedBindDN,
|
||||
bindDNIV,
|
||||
bindDNTag,
|
||||
encryptedBindPass,
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
encryptedCACert,
|
||||
caCertIV,
|
||||
caCertTag
|
||||
} = ldapConfig;
|
||||
|
||||
let bindDN = "";
|
||||
if (encryptedBindDN && bindDNIV && bindDNTag) {
|
||||
bindDN = decryptSymmetric({
|
||||
ciphertext: encryptedBindDN,
|
||||
key,
|
||||
tag: bindDNTag,
|
||||
iv: bindDNIV
|
||||
});
|
||||
}
|
||||
|
||||
let bindPass = "";
|
||||
if (encryptedBindPass && bindPassIV && bindPassTag) {
|
||||
bindPass = decryptSymmetric({
|
||||
ciphertext: encryptedBindPass,
|
||||
key,
|
||||
tag: bindPassTag,
|
||||
iv: bindPassIV
|
||||
});
|
||||
}
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCACert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCACert,
|
||||
key,
|
||||
tag: caCertTag,
|
||||
iv: caCertIV
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
id: ldapConfig.id,
|
||||
organization: ldapConfig.orgId,
|
||||
isActive: ldapConfig.isActive,
|
||||
url: ldapConfig.url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
caCert
|
||||
};
|
||||
};
|
||||
|
||||
const getLdapCfgWithPermissionCheck = async ({ actor, actorId, orgId, actorOrgId }: TOrgPermission) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
|
||||
return getLdapCfg({
|
||||
orgId
|
||||
});
|
||||
};
|
||||
|
||||
const bootLdap = async (organizationSlug: string) => {
|
||||
const organization = await orgDAL.findOne({ slug: organizationSlug });
|
||||
if (!organization) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
const ldapConfig = await getLdapCfg({
|
||||
orgId: organization.id,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
const opts = {
|
||||
server: {
|
||||
url: ldapConfig.url,
|
||||
bindDN: ldapConfig.bindDN,
|
||||
bindCredentials: ldapConfig.bindPass,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: "(uid={{username}})",
|
||||
searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
|
||||
...(ldapConfig.caCert !== ""
|
||||
? {
|
||||
tlsOptions: {
|
||||
ca: [ldapConfig.caCert]
|
||||
}
|
||||
}
|
||||
: {})
|
||||
},
|
||||
passReqToCallback: true
|
||||
};
|
||||
|
||||
return { opts, ldapConfig };
|
||||
};
|
||||
|
||||
const ldapLogin = async ({ externalId, username, firstName, lastName, emails, orgId, relayState }: TLdapLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: AuthMethod.LDAP
|
||||
});
|
||||
|
||||
const organization = await orgDAL.findOrgById(orgId);
|
||||
if (!organization) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
if (userAlias) {
|
||||
await userDAL.transaction(async (tx) => {
|
||||
const [orgMembership] = await orgDAL.findMembership({ userId: userAlias.userId }, { tx });
|
||||
if (!orgMembership) {
|
||||
await orgDAL.createMembership(
|
||||
{
|
||||
userId: userAlias.userId,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
userAlias = await userDAL.transaction(async (tx) => {
|
||||
const uniqueUsername = await normalizeUsername(username, userDAL);
|
||||
const newUser = await userDAL.create(
|
||||
{
|
||||
username: uniqueUsername,
|
||||
email: emails[0],
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [AuthMethod.LDAP],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newUserAlias = await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
username,
|
||||
aliasType: AuthMethod.LDAP,
|
||||
externalId,
|
||||
emails,
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await orgDAL.createMembership(
|
||||
{
|
||||
userId: newUser.id,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: OrgMembershipStatus.Invited
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return newUserAlias;
|
||||
});
|
||||
}
|
||||
|
||||
const user = await userDAL.findOne({ id: userAlias.userId });
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
authMethod: AuthMethod.LDAP,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
? {
|
||||
callbackPort: (JSON.parse(relayState) as { callbackPort: string }).callbackPort
|
||||
}
|
||||
: {})
|
||||
},
|
||||
appCfg.AUTH_SECRET,
|
||||
{
|
||||
expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME
|
||||
}
|
||||
);
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
return {
|
||||
createLdapCfg,
|
||||
updateLdapCfg,
|
||||
getLdapCfgWithPermissionCheck,
|
||||
getLdapCfg,
|
||||
// getLdapPassportOpts,
|
||||
ldapLogin,
|
||||
bootLdap
|
||||
};
|
||||
};
|
30
backend/src/ee/services/ldap-config/ldap-config-types.ts
Normal file
30
backend/src/ee/services/ldap-config/ldap-config-types.ts
Normal file
@ -0,0 +1,30 @@
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
export type TCreateLdapCfgDTO = {
|
||||
isActive: boolean;
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
searchBase: string;
|
||||
caCert: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TUpdateLdapCfgDTO = Partial<{
|
||||
isActive: boolean;
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
searchBase: string;
|
||||
caCert: string;
|
||||
}> &
|
||||
TOrgPermission;
|
||||
|
||||
export type TLdapLoginDTO = {
|
||||
externalId: string;
|
||||
username: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
emails: string[];
|
||||
orgId: string;
|
||||
relayState?: string;
|
||||
};
|
@ -18,6 +18,8 @@ export const getDefaultOnPremFeatures = () => {
|
||||
auditLogs: false,
|
||||
auditLogsRetentionDays: 0,
|
||||
samlSSO: false,
|
||||
scim: false,
|
||||
ldap: false,
|
||||
status: null,
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
|
@ -25,6 +25,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
auditLogsRetentionDays: 0,
|
||||
samlSSO: false,
|
||||
scim: false,
|
||||
ldap: false,
|
||||
status: null,
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
|
@ -5,8 +5,8 @@
|
||||
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import NodeCache from "node-cache";
|
||||
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@ -39,6 +39,7 @@ type TLicenseServiceFactoryDep = {
|
||||
orgDAL: Pick<TOrgDALFactory, "findOrgById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseDAL: TLicenseDALFactory;
|
||||
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem">;
|
||||
};
|
||||
|
||||
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
|
||||
@ -46,12 +47,18 @@ export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
|
||||
const LICENSE_SERVER_CLOUD_LOGIN = "/api/auth/v1/license-server-login";
|
||||
const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/license-login";
|
||||
|
||||
const FEATURE_CACHE_KEY = (orgId: string, projectId?: string) => `${orgId}-${projectId || ""}`;
|
||||
export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }: TLicenseServiceFactoryDep) => {
|
||||
const LICENSE_SERVER_CLOUD_PLAN_TTL = 30; // 30 second
|
||||
const FEATURE_CACHE_KEY = (orgId: string) => `infisical-cloud-plan-${orgId}`;
|
||||
|
||||
export const licenseServiceFactory = ({
|
||||
orgDAL,
|
||||
permissionService,
|
||||
licenseDAL,
|
||||
keyStore
|
||||
}: TLicenseServiceFactoryDep) => {
|
||||
let isValidLicense = false;
|
||||
let instanceType = InstanceType.OnPrem;
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
const featureStore = new NodeCache({ stdTTL: 60 });
|
||||
|
||||
const appCfg = getConfig();
|
||||
const licenseServerCloudApi = setupLicenceRequestWithStore(
|
||||
@ -75,6 +82,7 @@ export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }:
|
||||
isValidLicense = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (appCfg.LICENSE_KEY) {
|
||||
const token = await licenseServerOnPremApi.refreshLicence();
|
||||
if (token) {
|
||||
@ -100,22 +108,21 @@ export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }:
|
||||
logger.info(`getPlan: attempting to fetch plan for [orgId=${orgId}] [projectId=${projectId}]`);
|
||||
try {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const cachedPlan = featureStore.get<TFeatureSet>(FEATURE_CACHE_KEY(orgId, projectId));
|
||||
if (cachedPlan) return cachedPlan;
|
||||
const cachedPlan = await keyStore.getItem(FEATURE_CACHE_KEY(orgId));
|
||||
if (cachedPlan) return JSON.parse(cachedPlan) as TFeatureSet;
|
||||
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org) throw new BadRequestError({ message: "Org not found" });
|
||||
const {
|
||||
data: { currentPlan }
|
||||
} = await licenseServerCloudApi.request.get<{ currentPlan: TFeatureSet }>(
|
||||
`/api/license-server/v1/customers/${org.customerId}/cloud-plan`,
|
||||
{
|
||||
params: {
|
||||
workspaceId: projectId
|
||||
}
|
||||
}
|
||||
`/api/license-server/v1/customers/${org.customerId}/cloud-plan`
|
||||
);
|
||||
await keyStore.setItemWithExpiry(
|
||||
FEATURE_CACHE_KEY(org.id),
|
||||
LICENSE_SERVER_CLOUD_PLAN_TTL,
|
||||
JSON.stringify(currentPlan)
|
||||
);
|
||||
featureStore.set(FEATURE_CACHE_KEY(org.id, projectId), currentPlan);
|
||||
return currentPlan;
|
||||
}
|
||||
} catch (error) {
|
||||
@ -123,26 +130,31 @@ export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }:
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`,
|
||||
error
|
||||
);
|
||||
await keyStore.setItemWithExpiry(
|
||||
FEATURE_CACHE_KEY(orgId),
|
||||
LICENSE_SERVER_CLOUD_PLAN_TTL,
|
||||
JSON.stringify(onPremFeatures)
|
||||
);
|
||||
return onPremFeatures;
|
||||
}
|
||||
return onPremFeatures;
|
||||
};
|
||||
|
||||
const refreshPlan = async (orgId: string, projectId?: string) => {
|
||||
const refreshPlan = async (orgId: string) => {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
featureStore.del(FEATURE_CACHE_KEY(orgId, projectId));
|
||||
await getPlan(orgId, projectId);
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
await getPlan(orgId);
|
||||
}
|
||||
};
|
||||
|
||||
const generateOrgCustomerId = async (orgName: string, email: string) => {
|
||||
const generateOrgCustomerId = async (orgName: string, email?: string | null) => {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const {
|
||||
data: { customerId }
|
||||
} = await licenseServerCloudApi.request.post<{ customerId: string }>(
|
||||
"/api/license-server/v1/customers",
|
||||
{
|
||||
email,
|
||||
email: email ?? "",
|
||||
name: orgName
|
||||
},
|
||||
{ timeout: 5000, signal: AbortSignal.timeout(5000) }
|
||||
@ -166,7 +178,7 @@ export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }:
|
||||
quantity: count
|
||||
});
|
||||
}
|
||||
featureStore.del(orgId);
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
} else if (instanceType === InstanceType.EnterpriseOnPrem) {
|
||||
const usedSeats = await licenseDAL.countOfOrgMembers(null);
|
||||
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { usedSeats });
|
||||
@ -215,7 +227,7 @@ export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }:
|
||||
`/api/license-server/v1/customers/${organization.customerId}/session/trial`,
|
||||
{ success_url }
|
||||
);
|
||||
featureStore.del(FEATURE_CACHE_KEY(orgId));
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
return { url };
|
||||
};
|
||||
|
||||
|
@ -26,6 +26,7 @@ export type TFeatureSet = {
|
||||
auditLogsRetentionDays: 0;
|
||||
samlSSO: false;
|
||||
scim: false;
|
||||
ldap: false;
|
||||
status: null;
|
||||
trial_end: null;
|
||||
has_used_trial: true;
|
||||
|
@ -17,6 +17,7 @@ export enum OrgPermissionSubjects {
|
||||
IncidentAccount = "incident-contact",
|
||||
Sso = "sso",
|
||||
Scim = "scim",
|
||||
Ldap = "ldap",
|
||||
Billing = "billing",
|
||||
SecretScanning = "secret-scanning",
|
||||
Identity = "identity"
|
||||
@ -31,6 +32,7 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.IncidentAccount]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Identity];
|
||||
@ -76,6 +78,11 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Scim);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Scim);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Ldap);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Billing);
|
||||
|
@ -5,6 +5,7 @@ import {
|
||||
OrgMembershipRole,
|
||||
OrgMembershipStatus,
|
||||
SecretKeyEncoding,
|
||||
TableName,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsUpdate
|
||||
} from "@app/db/schemas";
|
||||
@ -31,7 +32,7 @@ import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } f
|
||||
|
||||
type TSamlConfigServiceFactoryDep = {
|
||||
samlConfigDAL: TSamlConfigDALFactory;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findUserByEmail" | "transaction" | "updateById">;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
@ -69,7 +70,7 @@ export const samlConfigServiceFactory = ({
|
||||
if (!plan.samlSSO)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update SAML SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
|
||||
"Failed to create SAML SSO configuration due to plan restriction. Upgrade plan to create SSO configuration."
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
@ -122,7 +123,6 @@ export const samlConfigServiceFactory = ({
|
||||
|
||||
const { ciphertext: encryptedEntryPoint, iv: entryPointIV, tag: entryPointTag } = encryptSymmetric(entryPoint, key);
|
||||
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
|
||||
|
||||
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
|
||||
const samlConfig = await samlConfigDAL.create({
|
||||
orgId,
|
||||
@ -300,16 +300,30 @@ export const samlConfigServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const samlLogin = async ({ firstName, email, lastName, authProvider, orgId, relayState }: TSamlLoginDTO) => {
|
||||
const samlLogin = async ({
|
||||
username,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
authProvider,
|
||||
orgId,
|
||||
relayState
|
||||
}: TSamlLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
let user = await userDAL.findUserByEmail(email);
|
||||
let user = await userDAL.findOne({ username });
|
||||
|
||||
const organization = await orgDAL.findOrgById(orgId);
|
||||
if (!organization) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
if (user) {
|
||||
await userDAL.transaction(async (tx) => {
|
||||
const [orgMembership] = await orgDAL.findMembership({ userId: user.id, orgId }, { tx });
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
userId: user.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (!orgMembership) {
|
||||
await orgDAL.createMembership(
|
||||
{
|
||||
@ -335,6 +349,7 @@ export const samlConfigServiceFactory = ({
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.create(
|
||||
{
|
||||
username,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
@ -357,7 +372,7 @@ export const samlConfigServiceFactory = ({
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
email: user.email,
|
||||
username: user.username,
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
|
@ -4,7 +4,8 @@ import { ActorType } from "@app/services/auth/auth-type";
|
||||
export enum SamlProviders {
|
||||
OKTA_SAML = "okta-saml",
|
||||
AZURE_SAML = "azure-saml",
|
||||
JUMPCLOUD_SAML = "jumpcloud-saml"
|
||||
JUMPCLOUD_SAML = "jumpcloud-saml",
|
||||
GOOGLE_SAML = "google-saml"
|
||||
}
|
||||
|
||||
export type TCreateSamlCfgDTO = {
|
||||
@ -36,7 +37,8 @@ export type TGetSamlCfgDTO =
|
||||
};
|
||||
|
||||
export type TSamlLoginDTO = {
|
||||
email: string;
|
||||
username: string;
|
||||
email?: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
authProvider: string;
|
||||
|
@ -20,34 +20,38 @@ export const buildScimUserList = ({
|
||||
|
||||
export const buildScimUser = ({
|
||||
userId,
|
||||
username,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
email,
|
||||
active
|
||||
}: {
|
||||
userId: string;
|
||||
username: string;
|
||||
email?: string | null;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
email: string;
|
||||
active: boolean;
|
||||
}): TScimUser => {
|
||||
return {
|
||||
const scimUser = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
id: userId,
|
||||
userName: email,
|
||||
userName: username,
|
||||
displayName: `${firstName} ${lastName}`,
|
||||
name: {
|
||||
givenName: firstName,
|
||||
middleName: null,
|
||||
familyName: lastName
|
||||
},
|
||||
emails: [
|
||||
{
|
||||
primary: true,
|
||||
value: email,
|
||||
type: "work"
|
||||
}
|
||||
],
|
||||
emails: email
|
||||
? [
|
||||
{
|
||||
primary: true,
|
||||
value: email,
|
||||
type: "work"
|
||||
}
|
||||
]
|
||||
: [],
|
||||
active,
|
||||
groups: [],
|
||||
meta: {
|
||||
@ -55,4 +59,6 @@ export const buildScimUser = ({
|
||||
location: null
|
||||
}
|
||||
};
|
||||
|
||||
return scimUser;
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus } from "@app/db/schemas";
|
||||
import { OrgMembershipRole, OrgMembershipStatus, TableName } from "@app/db/schemas";
|
||||
import { TScimDALFactory } from "@app/ee/services/scim/scim-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
@ -146,15 +146,16 @@ export const scimServiceFactory = ({
|
||||
|
||||
const users = await orgDAL.findMembership(
|
||||
{
|
||||
orgId,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId,
|
||||
...parseFilter(filter)
|
||||
},
|
||||
findOpts
|
||||
);
|
||||
|
||||
const scimUsers = users.map(({ userId, firstName, lastName, email }) =>
|
||||
const scimUsers = users.map(({ userId, username, firstName, lastName, email }) =>
|
||||
buildScimUser({
|
||||
userId: userId ?? "",
|
||||
username,
|
||||
firstName: firstName ?? "",
|
||||
lastName: lastName ?? "",
|
||||
email,
|
||||
@ -173,7 +174,7 @@ export const scimServiceFactory = ({
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
userId,
|
||||
orgId
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
})
|
||||
.catch(() => {
|
||||
throw new ScimRequestError({
|
||||
@ -196,14 +197,15 @@ export const scimServiceFactory = ({
|
||||
|
||||
return buildScimUser({
|
||||
userId: membership.userId as string,
|
||||
username: membership.username,
|
||||
email: membership.email ?? "",
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
email: membership.email,
|
||||
active: true
|
||||
});
|
||||
};
|
||||
|
||||
const createScimUser = async ({ firstName, lastName, email, orgId }: TCreateScimUserDTO) => {
|
||||
const createScimUser = async ({ username, email, firstName, lastName, orgId }: TCreateScimUserDTO) => {
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org)
|
||||
@ -219,12 +221,18 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
|
||||
let user = await userDAL.findOne({
|
||||
email
|
||||
username
|
||||
});
|
||||
|
||||
if (user) {
|
||||
await userDAL.transaction(async (tx) => {
|
||||
const [orgMembership] = await orgDAL.findMembership({ userId: user.id, orgId }, { tx });
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
userId: user.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (orgMembership)
|
||||
throw new ScimRequestError({
|
||||
detail: "User already exists in the database",
|
||||
@ -248,6 +256,7 @@ export const scimServiceFactory = ({
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.create(
|
||||
{
|
||||
username,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
@ -272,21 +281,25 @@ export const scimServiceFactory = ({
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.ScimUserProvisioned,
|
||||
subjectLine: "Infisical organization invitation",
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
organizationName: org.name,
|
||||
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}`
|
||||
}
|
||||
});
|
||||
|
||||
if (email) {
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.ScimUserProvisioned,
|
||||
subjectLine: "Infisical organization invitation",
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
organizationName: org.name,
|
||||
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}`
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return buildScimUser({
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
firstName: user.firstName as string,
|
||||
lastName: user.lastName as string,
|
||||
email: user.email,
|
||||
email: user.email ?? "",
|
||||
active: true
|
||||
});
|
||||
};
|
||||
@ -295,7 +308,7 @@ export const scimServiceFactory = ({
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
userId,
|
||||
orgId
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
})
|
||||
.catch(() => {
|
||||
throw new ScimRequestError({
|
||||
@ -342,9 +355,10 @@ export const scimServiceFactory = ({
|
||||
|
||||
return buildScimUser({
|
||||
userId: membership.userId as string,
|
||||
username: membership.username,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
email: membership.email,
|
||||
active
|
||||
});
|
||||
};
|
||||
@ -353,7 +367,7 @@ export const scimServiceFactory = ({
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
userId,
|
||||
orgId
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
})
|
||||
.catch(() => {
|
||||
throw new ScimRequestError({
|
||||
@ -387,9 +401,10 @@ export const scimServiceFactory = ({
|
||||
|
||||
return buildScimUser({
|
||||
userId: membership.userId as string,
|
||||
username: membership.username,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
email: membership.email,
|
||||
active
|
||||
});
|
||||
};
|
||||
|
@ -32,7 +32,8 @@ export type TGetScimUserDTO = {
|
||||
};
|
||||
|
||||
export type TCreateScimUserDTO = {
|
||||
email: string;
|
||||
username: string;
|
||||
email?: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
orgId: string;
|
||||
|
@ -12,9 +12,11 @@ import { groupBy, pick, unique } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
import { TSecretQueueFactory } from "@app/services/secret/secret-queue";
|
||||
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
|
||||
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
|
||||
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
@ -44,10 +46,12 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
secretApprovalRequestSecretDAL: TSecretApprovalRequestSecretDALFactory;
|
||||
secretApprovalRequestReviewerDAL: TSecretApprovalRequestReviewerDALFactory;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findById" | "findSecretPathByFolderIds">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById">;
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret">;
|
||||
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
|
||||
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany">;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
|
||||
secretService: Pick<
|
||||
TSecretServiceFactory,
|
||||
@ -64,8 +68,10 @@ export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretAppro
|
||||
|
||||
export const secretApprovalRequestServiceFactory = ({
|
||||
secretApprovalRequestDAL,
|
||||
secretDAL,
|
||||
folderDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
secretApprovalRequestReviewerDAL,
|
||||
secretApprovalRequestSecretDAL,
|
||||
secretBlindIndexDAL,
|
||||
@ -335,7 +341,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
tags: el?.tags.map(({ id }) => id),
|
||||
version: 1,
|
||||
type: SecretType.Shared
|
||||
}))
|
||||
})),
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
})
|
||||
: [];
|
||||
const updatedSecrets = secretUpdationCommits.length
|
||||
@ -367,7 +377,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
"secretBlindIndex"
|
||||
])
|
||||
}
|
||||
}))
|
||||
})),
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
})
|
||||
: [];
|
||||
const deletedSecret = secretDeletionCommits.length
|
||||
@ -455,7 +469,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
inputSecrets: createdSecrets,
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
commits.push(
|
||||
@ -482,7 +497,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
inputSecrets: updatedSecrets,
|
||||
folderId,
|
||||
isNew: false,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
// now find any secret that needs to update its name
|
||||
@ -492,7 +508,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
inputSecrets: nameUpdatedSecrets,
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
const secsGroupedByBlindIndex = groupBy(secretsToBeUpdated, (el) => el.secretBlindIndex as string);
|
||||
@ -531,7 +548,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
inputSecrets: deletedSecrets,
|
||||
folderId,
|
||||
isNew: false,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
const secretsGroupedByBlindIndex = groupBy(secrets, (i) => {
|
||||
if (!i.secretBlindIndex) throw new BadRequestError({ message: "Missing secret blind index" });
|
||||
|
@ -64,7 +64,7 @@ export const secretScanningQueueFactory = ({
|
||||
orgId: organizationId,
|
||||
role: OrgMembershipRole.Admin
|
||||
});
|
||||
return adminsOfWork.map((userObject) => userObject.email);
|
||||
return adminsOfWork.filter((userObject) => userObject.email).map((userObject) => userObject.email as string);
|
||||
};
|
||||
|
||||
queueService.start(QueueName.SecretPushEventScan, async (job) => {
|
||||
@ -149,7 +149,7 @@ export const secretScanningQueueFactory = ({
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.SecretLeakIncident,
|
||||
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
|
||||
recipients: adminEmails,
|
||||
recipients: adminEmails.filter((email) => email).map((email) => email),
|
||||
substitutions: {
|
||||
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
|
||||
pusher_email: pusher.email,
|
||||
@ -221,7 +221,7 @@ export const secretScanningQueueFactory = ({
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.SecretLeakIncident,
|
||||
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
|
||||
recipients: adminEmails,
|
||||
recipients: adminEmails.filter((email) => email).map((email) => email),
|
||||
substitutions: {
|
||||
numberOfSecrets: findings.length
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import { ActorType } from "@app/services/auth/auth-type";
|
||||
// this is a unique id for sending posthog event
|
||||
export const getTelemetryDistinctId = (req: FastifyRequest) => {
|
||||
if (req.auth.actor === ActorType.USER) {
|
||||
return req.auth.user.email;
|
||||
return req.auth.user.username;
|
||||
}
|
||||
if (req.auth.actor === ActorType.IDENTITY) {
|
||||
return `identity-${req.auth.identityId}`;
|
||||
|
@ -44,6 +44,7 @@ export const injectAuditLogInfo = fp(async (server: FastifyZodProvider) => {
|
||||
type: ActorType.USER,
|
||||
metadata: {
|
||||
email: req.auth.user.email,
|
||||
username: req.auth.user.username,
|
||||
userId: req.permission.id
|
||||
}
|
||||
};
|
||||
|
@ -5,6 +5,8 @@ import { registerV1EERoutes } from "@app/ee/routes/v1";
|
||||
import { auditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal";
|
||||
import { auditLogQueueServiceFactory } from "@app/ee/services/audit-log/audit-log-queue";
|
||||
import { auditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { ldapConfigDALFactory } from "@app/ee/services/ldap-config/ldap-config-dal";
|
||||
import { ldapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { licenseDALFactory } from "@app/ee/services/license/license-dal";
|
||||
import { licenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
|
||||
@ -102,6 +104,7 @@ import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-
|
||||
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
import { userServiceFactory } from "@app/services/user/user-service";
|
||||
import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
import { webhookDALFactory } from "@app/services/webhook/webhook-dal";
|
||||
import { webhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
|
||||
@ -126,6 +129,7 @@ export const registerRoutes = async (
|
||||
|
||||
// db layers
|
||||
const userDAL = userDALFactory(db);
|
||||
const userAliasDAL = userAliasDALFactory(db);
|
||||
const authDAL = authDALFactory(db);
|
||||
const authTokenDAL = tokenDALFactory(db);
|
||||
const orgDAL = orgDALFactory(db);
|
||||
@ -166,12 +170,13 @@ export const registerRoutes = async (
|
||||
|
||||
const auditLogDAL = auditLogDALFactory(db);
|
||||
const trustedIpDAL = trustedIpDALFactory(db);
|
||||
const scimDAL = scimDALFactory(db);
|
||||
const telemetryDAL = telemetryDALFactory(db);
|
||||
|
||||
// ee db layer ops
|
||||
const permissionDAL = permissionDALFactory(db);
|
||||
const samlConfigDAL = samlConfigDALFactory(db);
|
||||
const scimDAL = scimDALFactory(db);
|
||||
const ldapConfigDAL = ldapConfigDALFactory(db);
|
||||
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
|
||||
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
|
||||
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
|
||||
@ -194,7 +199,7 @@ export const registerRoutes = async (
|
||||
projectRoleDAL,
|
||||
serviceTokenDAL
|
||||
});
|
||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL });
|
||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||
const trustedIpService = trustedIpServiceFactory({
|
||||
licenseService,
|
||||
projectDAL,
|
||||
@ -235,6 +240,16 @@ export const registerRoutes = async (
|
||||
smtpService
|
||||
});
|
||||
|
||||
const ldapService = ldapConfigServiceFactory({
|
||||
ldapConfigDAL,
|
||||
orgDAL,
|
||||
orgBotDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const telemetryService = telemetryServiceFactory({
|
||||
keyStore,
|
||||
licenseService
|
||||
@ -263,6 +278,8 @@ export const registerRoutes = async (
|
||||
incidentContactDAL,
|
||||
tokenService,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
projectKeyDAL,
|
||||
smtpService,
|
||||
userDAL,
|
||||
orgBotDAL
|
||||
@ -419,7 +436,12 @@ export const registerRoutes = async (
|
||||
orgDAL,
|
||||
projectMembershipDAL,
|
||||
smtpService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
});
|
||||
const secretBlindIndexService = secretBlindIndexServiceFactory({
|
||||
permissionService,
|
||||
@ -443,6 +465,7 @@ export const registerRoutes = async (
|
||||
const sarService = secretApprovalRequestServiceFactory({
|
||||
permissionService,
|
||||
folderDAL,
|
||||
secretDAL,
|
||||
secretTagDAL,
|
||||
secretApprovalRequestSecretDAL: sarSecretDAL,
|
||||
secretApprovalRequestReviewerDAL: sarReviewerDAL,
|
||||
@ -452,6 +475,7 @@ export const registerRoutes = async (
|
||||
secretApprovalRequestDAL,
|
||||
secretService,
|
||||
snapshotService,
|
||||
secretVersionTagDAL,
|
||||
secretQueueService
|
||||
});
|
||||
const secretRotationQueue = secretRotationQueueFactory({
|
||||
@ -552,6 +576,7 @@ export const registerRoutes = async (
|
||||
secretRotation: secretRotationService,
|
||||
snapshot: snapshotService,
|
||||
saml: samlService,
|
||||
ldap: ldapService,
|
||||
auditLog: auditLogService,
|
||||
secretScanning: secretScanningService,
|
||||
license: licenseService,
|
||||
|
@ -92,9 +92,10 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.AdminInit,
|
||||
distinctId: user.user.email,
|
||||
distinctId: user.user.username ?? "",
|
||||
properties: {
|
||||
email: user.user.email,
|
||||
username: user.user.username,
|
||||
email: user.user.email ?? "",
|
||||
lastName: user.user.lastName || "",
|
||||
firstName: user.user.firstName || ""
|
||||
}
|
||||
|
@ -39,11 +39,12 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { identityUa, accessToken, identityAccessToken, validClientSecretInfo } =
|
||||
const { identityUa, accessToken, identityAccessToken, validClientSecretInfo, identityMembershipOrg } =
|
||||
await server.services.identityUa.login(req.body.clientId, req.body.clientSecret, req.realIp);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityMembershipOrg?.orgId,
|
||||
event: {
|
||||
type: EventType.LOGIN_IDENTITY_UNIVERSAL_AUTH,
|
||||
metadata: {
|
||||
|
@ -513,6 +513,37 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:integrationAuthId/heroku/pipelines",
|
||||
method: "GET",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
integrationAuthId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
pipelines: z
|
||||
.object({
|
||||
app: z.object({ appId: z.string() }),
|
||||
stage: z.string(),
|
||||
pipeline: z.object({ name: z.string(), pipelineId: z.string() })
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const pipelines = await server.services.integrationAuth.getHerokuPipelines({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationAuthId
|
||||
});
|
||||
return { pipelines };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:integrationAuthId/railway/environments",
|
||||
method: "GET",
|
||||
|
@ -32,6 +32,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
.object({
|
||||
secretPrefix: z.string().optional(),
|
||||
secretSuffix: z.string().optional(),
|
||||
initialSyncBehavior: z.string().optional(),
|
||||
secretGCPLabel: z
|
||||
.object({
|
||||
labelName: z.string(),
|
||||
|
@ -58,6 +58,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
users: OrgMembershipsSchema.merge(
|
||||
z.object({
|
||||
user: UsersSchema.pick({
|
||||
username: true,
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
@ -87,11 +88,12 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
params: z.object({ organizationId: z.string().trim() }),
|
||||
body: z.object({
|
||||
name: z.string().trim().optional(),
|
||||
name: z.string().trim().max(64, { message: "Name must be 64 or fewer characters" }).optional(),
|
||||
slug: z
|
||||
.string()
|
||||
.trim()
|
||||
.regex(/^[a-zA-Z0-9-]+$/, "Name must only contain alphanumeric characters or hyphens")
|
||||
.max(64, { message: "Slug must be 64 or fewer characters" })
|
||||
.regex(/^[a-zA-Z0-9-]+$/, "Slug must only contain alphanumeric characters or hyphens")
|
||||
.optional(),
|
||||
authEnforced: z.boolean().optional(),
|
||||
scimEnabled: z.boolean().optional()
|
||||
|
@ -63,6 +63,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
users: ProjectMembershipsSchema.merge(
|
||||
z.object({
|
||||
user: UsersSchema.pick({
|
||||
username: true,
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
@ -222,7 +223,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
workspaceId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().optional(),
|
||||
name: z.string().trim().max(64, { message: "Name must be 64 or fewer characters" }).optional(),
|
||||
autoCapitalization: z.boolean().optional()
|
||||
}),
|
||||
response: {
|
||||
|
@ -120,7 +120,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:folderId",
|
||||
url: "/:folderIdOrName",
|
||||
method: "DELETE",
|
||||
schema: {
|
||||
description: "Delete a folder",
|
||||
@ -131,7 +131,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
folderId: z.string()
|
||||
folderIdOrName: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
workspaceId: z.string().trim(),
|
||||
@ -155,7 +155,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
projectId: req.body.workspaceId,
|
||||
id: req.params.folderId,
|
||||
idOrName: req.params.folderIdOrName,
|
||||
path
|
||||
});
|
||||
await server.services.auditLog.createAuditLog({
|
||||
|
@ -24,6 +24,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
users: OrgMembershipsSchema.merge(
|
||||
z.object({
|
||||
user: UsersSchema.pick({
|
||||
username: true,
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
@ -179,11 +180,12 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
handler: async (req) => {
|
||||
if (req.auth.actor !== ActorType.USER) return;
|
||||
|
||||
const organization = await server.services.org.createOrganization(
|
||||
req.permission.id,
|
||||
req.auth.user.email,
|
||||
req.body.name
|
||||
);
|
||||
const organization = await server.services.org.createOrganization({
|
||||
userId: req.permission.id,
|
||||
userEmail: req.auth.user.email,
|
||||
orgName: req.body.name
|
||||
});
|
||||
|
||||
return { organization };
|
||||
}
|
||||
});
|
||||
|
@ -14,7 +14,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
projectId: z.string().describe("The ID of the project.")
|
||||
}),
|
||||
body: z.object({
|
||||
emails: z.string().email().array().describe("Emails of the users to add to the project.")
|
||||
emails: z.string().email().array().default([]).describe("Emails of the users to add to the project."),
|
||||
usernames: z.string().array().default([]).describe("Usernames of the users to add to the project.")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -28,7 +29,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
projectId: req.params.projectId,
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
emails: req.body.emails
|
||||
emails: req.body.emails,
|
||||
usernames: req.body.usernames
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@ -57,7 +59,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
}),
|
||||
|
||||
body: z.object({
|
||||
emails: z.string().email().array().describe("Emails of the users to remove from the project.")
|
||||
emails: z.string().email().array().default([]).describe("Emails of the users to remove from the project."),
|
||||
usernames: z.string().array().default([]).describe("Usernames of the users to remove from the project.")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -72,7 +75,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId: req.params.projectId,
|
||||
emails: req.body.emails
|
||||
emails: req.body.emails,
|
||||
usernames: req.body.usernames
|
||||
});
|
||||
|
||||
for (const membership of memberships) {
|
||||
|
@ -12,7 +12,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
email: z.string().email().trim(),
|
||||
email: z.string().trim(),
|
||||
providerAuthToken: z.string().trim().optional(),
|
||||
clientPublicKey: z.string().trim()
|
||||
}),
|
||||
@ -42,7 +42,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
email: z.string().email().trim(),
|
||||
email: z.string().trim(),
|
||||
providerAuthToken: z.string().trim().optional(),
|
||||
clientProof: z.string().trim()
|
||||
}),
|
||||
|
@ -88,7 +88,7 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
email: z.string().email().trim(),
|
||||
email: z.string().trim(),
|
||||
firstName: z.string().trim(),
|
||||
lastName: z.string().trim().optional(),
|
||||
protectedKey: z.string().trim(),
|
||||
@ -131,13 +131,16 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
authorization: req.headers.authorization as string
|
||||
});
|
||||
|
||||
void server.services.telemetry.sendLoopsEvent(user.email, user.firstName || "", user.lastName || "");
|
||||
if (user.email) {
|
||||
void server.services.telemetry.sendLoopsEvent(user.email, user.firstName || "", user.lastName || "");
|
||||
}
|
||||
|
||||
void server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.UserSignedUp,
|
||||
distinctId: user.email,
|
||||
distinctId: user.username ?? "",
|
||||
properties: {
|
||||
email: user.email,
|
||||
username: user.username,
|
||||
email: user.email ?? "",
|
||||
attributionSource: req.body.attributionSource
|
||||
}
|
||||
});
|
||||
@ -194,13 +197,16 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
authorization: req.headers.authorization as string
|
||||
});
|
||||
|
||||
void server.services.telemetry.sendLoopsEvent(user.email, user.firstName || "", user.lastName || "");
|
||||
if (user.email) {
|
||||
void server.services.telemetry.sendLoopsEvent(user.email, user.firstName || "", user.lastName || "");
|
||||
}
|
||||
|
||||
void server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.UserSignedUp,
|
||||
distinctId: user.email,
|
||||
distinctId: user.username ?? "",
|
||||
properties: {
|
||||
email: user.email,
|
||||
username: user.username,
|
||||
email: user.email ?? "",
|
||||
attributionSource: "Team Invite"
|
||||
}
|
||||
});
|
||||
|
@ -5,13 +5,14 @@ import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
|
||||
import { AuthModeProviderJwtTokenPayload, AuthModeProviderSignUpTokenPayload, AuthTokenType } from "./auth-type";
|
||||
|
||||
export const validateProviderAuthToken = (providerToken: string, email: string) => {
|
||||
export const validateProviderAuthToken = (providerToken: string, username?: string) => {
|
||||
if (!providerToken) throw new UnauthorizedError();
|
||||
const appCfg = getConfig();
|
||||
const decodedToken = jwt.verify(providerToken, appCfg.AUTH_SECRET) as AuthModeProviderJwtTokenPayload;
|
||||
|
||||
if (decodedToken.authTokenType !== AuthTokenType.PROVIDER_TOKEN) throw new UnauthorizedError();
|
||||
if (decodedToken.email !== email) throw new Error("Invalid auth credentials");
|
||||
|
||||
if (decodedToken.username !== username) throw new Error("Invalid auth credentials");
|
||||
|
||||
if (decodedToken.organizationId) {
|
||||
return { orgId: decodedToken.organizationId };
|
||||
|
@ -39,17 +39,19 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
|
||||
if (!isDeviceSeen) {
|
||||
const newDeviceList = devices.concat([{ ip, userAgent }]);
|
||||
await userDAL.updateById(user.id, { devices: JSON.stringify(newDeviceList) });
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.NewDeviceJoin,
|
||||
subjectLine: "Successful login from new device",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
email: user.email,
|
||||
timestamp: new Date().toString(),
|
||||
ip,
|
||||
userAgent
|
||||
}
|
||||
});
|
||||
if (user.email) {
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.NewDeviceJoin,
|
||||
subjectLine: "Successful login from new device",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
email: user.email,
|
||||
timestamp: new Date().toString(),
|
||||
ip,
|
||||
userAgent
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -131,7 +133,9 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
|
||||
providerAuthToken,
|
||||
clientPublicKey
|
||||
}: TLoginGenServerPublicKeyDTO) => {
|
||||
const userEnc = await userDAL.findUserEncKeyByEmail(email);
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
if (!userEnc || (userEnc && !userEnc.isAccepted)) {
|
||||
throw new Error("Failed to find user");
|
||||
}
|
||||
@ -158,7 +162,9 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
|
||||
ip,
|
||||
userAgent
|
||||
}: TLoginClientProofDTO) => {
|
||||
const userEnc = await userDAL.findUserEncKeyByEmail(email);
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
if (!userEnc) throw new Error("Failed to find user");
|
||||
const cfg = getConfig();
|
||||
|
||||
@ -187,7 +193,7 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
|
||||
clientPublicKey: null
|
||||
});
|
||||
// send multi factor auth token if they it enabled
|
||||
if (userEnc.isMfaEnabled) {
|
||||
if (userEnc.isMfaEnabled && userEnc.email) {
|
||||
const mfaToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.MFA_TOKEN,
|
||||
@ -227,7 +233,7 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
|
||||
*/
|
||||
const resendMfaToken = async (userId: string) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
if (!user) return;
|
||||
if (!user || !user.email) return;
|
||||
await sendUserMfaCode({
|
||||
userId: user.id,
|
||||
email: user.email
|
||||
@ -263,7 +269,7 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
|
||||
* OAuth2 login for google,github, and other oauth2 provider
|
||||
* */
|
||||
const oauth2Login = async ({ email, firstName, lastName, authMethod, callbackPort }: TOauthLoginDTO) => {
|
||||
let user = await userDAL.findUserByEmail(email);
|
||||
let user = await userDAL.findUserByUsername(email);
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
const appCfg = getConfig();
|
||||
@ -282,7 +288,14 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
|
||||
});
|
||||
}
|
||||
|
||||
user = await userDAL.create({ email, firstName, lastName, authMethods: [authMethod], isGhost: false });
|
||||
user = await userDAL.create({
|
||||
username: email,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [authMethod],
|
||||
isGhost: false
|
||||
});
|
||||
}
|
||||
const isLinkingRequired = !user?.authMethods?.includes(authMethod);
|
||||
const isUserCompleted = user.isAccepted;
|
||||
@ -290,7 +303,7 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
email: user.email,
|
||||
username: user.username,
|
||||
firstName: user.firstName,
|
||||
lastName: user.lastName,
|
||||
authMethod,
|
||||
|
@ -99,7 +99,7 @@ export const authPaswordServiceFactory = ({
|
||||
* Email password reset flow via email. Step 1 send email
|
||||
*/
|
||||
const sendPasswordResetEmail = async (email: string) => {
|
||||
const user = await userDAL.findUserByEmail(email);
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
// ignore as user is not found to avoid an outside entity to identify infisical registered accounts
|
||||
if (!user || (user && !user.isAccepted)) return;
|
||||
|
||||
@ -126,7 +126,7 @@ export const authPaswordServiceFactory = ({
|
||||
* */
|
||||
const verifyPasswordResetEmail = async (email: string, code: string) => {
|
||||
const cfg = getConfig();
|
||||
const user = await userDAL.findUserByEmail(email);
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
// ignore as user is not found to avoid an outside entity to identify infisical registered accounts
|
||||
if (!user || (user && !user.isAccepted)) {
|
||||
throw new Error("Failed email verification for pass reset");
|
||||
|
@ -44,13 +44,13 @@ export const authSignupServiceFactory = ({
|
||||
throw new Error("Provided a disposable email");
|
||||
}
|
||||
|
||||
let user = await userDAL.findUserByEmail(email);
|
||||
let user = await userDAL.findUserByUsername(email);
|
||||
if (user && user.isAccepted) {
|
||||
// TODO(akhilmhdh-pg): copy as old one. this needs to be changed due to security issues
|
||||
throw new Error("Failed to send verification code for complete account");
|
||||
}
|
||||
if (!user) {
|
||||
user = await userDAL.create({ authMethods: [AuthMethod.EMAIL], email, isGhost: false });
|
||||
user = await userDAL.create({ authMethods: [AuthMethod.EMAIL], username: email, email, isGhost: false });
|
||||
}
|
||||
if (!user) throw new Error("Failed to create user");
|
||||
|
||||
@ -70,7 +70,7 @@ export const authSignupServiceFactory = ({
|
||||
};
|
||||
|
||||
const verifyEmailSignup = async (email: string, code: string) => {
|
||||
const user = await userDAL.findUserByEmail(email);
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
// TODO(akhilmhdh): copy as old one. this needs to be changed due to security issues
|
||||
throw new Error("Failed to send verification code for complete account");
|
||||
@ -115,14 +115,14 @@ export const authSignupServiceFactory = ({
|
||||
userAgent,
|
||||
authorization
|
||||
}: TCompleteAccountSignupDTO) => {
|
||||
const user = await userDAL.findUserByEmail(email);
|
||||
const user = await userDAL.findOne({ username: email });
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
}
|
||||
|
||||
let organizationId;
|
||||
if (providerAuthToken) {
|
||||
const { orgId } = validateProviderAuthToken(providerAuthToken, user.email);
|
||||
const { orgId } = validateProviderAuthToken(providerAuthToken, user.username);
|
||||
organizationId = orgId;
|
||||
} else {
|
||||
validateSignUpAuthorization(authorization, user.id);
|
||||
@ -150,7 +150,11 @@ export const authSignupServiceFactory = ({
|
||||
});
|
||||
|
||||
if (!organizationId) {
|
||||
await orgService.createOrganization(user.id, user.email, organizationName);
|
||||
await orgService.createOrganization({
|
||||
userId: user.id,
|
||||
userEmail: user.email ?? user.username,
|
||||
orgName: organizationName
|
||||
});
|
||||
}
|
||||
|
||||
const updatedMembersips = await orgDAL.updateMembership(
|
||||
@ -215,7 +219,7 @@ export const authSignupServiceFactory = ({
|
||||
encryptedPrivateKeyTag,
|
||||
authorization
|
||||
}: TCompleteAccountInviteDTO) => {
|
||||
const user = await userDAL.findUserByEmail(email);
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
}
|
||||
|
@ -5,7 +5,8 @@ export enum AuthMethod {
|
||||
GITLAB = "gitlab",
|
||||
OKTA_SAML = "okta-saml",
|
||||
AZURE_SAML = "azure-saml",
|
||||
JUMPCLOUD_SAML = "jumpcloud-saml"
|
||||
JUMPCLOUD_SAML = "jumpcloud-saml",
|
||||
LDAP = "ldap"
|
||||
}
|
||||
|
||||
export enum AuthTokenType {
|
||||
@ -61,7 +62,7 @@ export type AuthModeRefreshJwtTokenPayload = {
|
||||
|
||||
export type AuthModeProviderJwtTokenPayload = {
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN;
|
||||
email: string;
|
||||
username: string;
|
||||
organizationId?: string;
|
||||
};
|
||||
|
||||
|
@ -54,6 +54,8 @@ export const identityUaServiceFactory = ({
|
||||
const identityUa = await identityUaDAL.findOne({ clientId });
|
||||
if (!identityUa) throw new UnauthorizedError();
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
|
||||
|
||||
checkIPAgainstBlocklist({
|
||||
ipAddress: ip,
|
||||
trustedIps: identityUa.clientSecretTrustedIps as TIp[]
|
||||
@ -131,7 +133,7 @@ export const identityUaServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
return { accessToken, identityUa, validClientSecretInfo, identityAccessToken };
|
||||
return { accessToken, identityUa, validClientSecretInfo, identityAccessToken, identityMembershipOrg };
|
||||
};
|
||||
|
||||
const attachUa = async ({
|
||||
|
@ -109,7 +109,7 @@ const getAppsGCPSecretManager = async ({ accessToken }: { accessToken: string })
|
||||
*/
|
||||
const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
const res = (
|
||||
await request.get<{ name: string }[]>(`${IntegrationUrls.HEROKU_API_URL}/apps`, {
|
||||
await request.get<{ name: string; id: string }[]>(`${IntegrationUrls.HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
@ -118,7 +118,8 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
).data;
|
||||
|
||||
const apps = res.map((a) => ({
|
||||
name: a.name
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
}));
|
||||
|
||||
return apps;
|
||||
|
@ -20,9 +20,11 @@ import {
|
||||
TDeleteIntegrationAuthsDTO,
|
||||
TGetIntegrationAuthDTO,
|
||||
TGetIntegrationAuthTeamCityBuildConfigDTO,
|
||||
THerokuPipelineCoupling,
|
||||
TIntegrationAuthAppsDTO,
|
||||
TIntegrationAuthBitbucketWorkspaceDTO,
|
||||
TIntegrationAuthChecklyGroupsDTO,
|
||||
TIntegrationAuthHerokuPipelinesDTO,
|
||||
TIntegrationAuthNorthflankSecretGroupDTO,
|
||||
TIntegrationAuthQoveryEnvironmentsDTO,
|
||||
TIntegrationAuthQoveryOrgsDTO,
|
||||
@ -576,6 +578,38 @@ export const integrationAuthServiceFactory = ({
|
||||
return [];
|
||||
};
|
||||
|
||||
const getHerokuPipelines = async ({ id, actor, actorId, actorOrgId }: TIntegrationAuthHerokuPipelinesDTO) => {
|
||||
const integrationAuth = await integrationAuthDAL.findById(id);
|
||||
if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integrationAuth.projectId,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
const botKey = await projectBotService.getBotKey(integrationAuth.projectId);
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey);
|
||||
|
||||
const { data } = await request.get<THerokuPipelineCoupling[]>(
|
||||
`${IntegrationUrls.HEROKU_API_URL}/pipeline-couplings`,
|
||||
{
|
||||
headers: {
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return data.map(({ app: { id: appId }, stage, pipeline: { id: pipelineId, name } }) => ({
|
||||
app: { appId },
|
||||
stage,
|
||||
pipeline: { pipelineId, name }
|
||||
}));
|
||||
};
|
||||
|
||||
const getRailwayEnvironments = async ({ id, actor, actorId, actorOrgId, appId }: TIntegrationAuthRailwayEnvDTO) => {
|
||||
const integrationAuth = await integrationAuthDAL.findById(id);
|
||||
if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" });
|
||||
@ -649,33 +683,21 @@ export const integrationAuthServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
const botKey = await projectBotService.getBotKey(integrationAuth.projectId);
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, botKey);
|
||||
if (appId) {
|
||||
|
||||
if (appId && appId !== "") {
|
||||
const query = `
|
||||
query project($id: String!) {
|
||||
project(id: $id) {
|
||||
createdAt
|
||||
deletedAt
|
||||
id
|
||||
description
|
||||
expiredAt
|
||||
isPublic
|
||||
isTempProject
|
||||
isUpdatable
|
||||
name
|
||||
prDeploys
|
||||
teamId
|
||||
updatedAt
|
||||
upstreamUrl
|
||||
services {
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
query project($id: String!) {
|
||||
project(id: $id) {
|
||||
services {
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const variables = {
|
||||
@ -711,6 +733,7 @@ export const integrationAuthServiceFactory = ({
|
||||
);
|
||||
return edges.map(({ node: { name, id: serviceId } }) => ({ name, serviceId }));
|
||||
}
|
||||
|
||||
return [];
|
||||
};
|
||||
|
||||
@ -915,6 +938,7 @@ export const integrationAuthServiceFactory = ({
|
||||
getQoveryApps,
|
||||
getQoveryEnvs,
|
||||
getQoveryJobs,
|
||||
getHerokuPipelines,
|
||||
getQoveryOrgs,
|
||||
getQoveryProjects,
|
||||
getQoveryContainers,
|
||||
|
@ -62,6 +62,10 @@ export type TIntegrationAuthQoveryScopesDTO = {
|
||||
environmentId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIntegrationAuthHerokuPipelinesDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIntegrationAuthRailwayEnvDTO = {
|
||||
id: string;
|
||||
appId: string;
|
||||
@ -129,6 +133,12 @@ export type TNorthflankSecretGroup = {
|
||||
projectId: string;
|
||||
};
|
||||
|
||||
export type THerokuPipelineCoupling = {
|
||||
app: { id: string };
|
||||
stage: string;
|
||||
pipeline: { id: string; name: string };
|
||||
};
|
||||
|
||||
export type TTeamCityBuildConfig = {
|
||||
id: string;
|
||||
name: string;
|
||||
|
@ -37,6 +37,12 @@ export enum IntegrationType {
|
||||
OAUTH2 = "oauth2"
|
||||
}
|
||||
|
||||
export enum IntegrationInitialSyncBehavior {
|
||||
OVERWRITE_TARGET = "overwrite-target",
|
||||
PREFER_TARGET = "prefer-target",
|
||||
PREFER_SOURCE = "prefer-source"
|
||||
}
|
||||
|
||||
export enum IntegrationUrls {
|
||||
// integration oauth endpoints
|
||||
GCP_TOKEN_URL = "https://oauth2.googleapis.com/token",
|
||||
|
@ -20,11 +20,13 @@ import sodium from "libsodium-wrappers";
|
||||
import isEqual from "lodash.isequal";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TIntegrationAuths, TIntegrations } from "@app/db/schemas";
|
||||
import { SecretType, TIntegrationAuths, TIntegrations, TSecrets } from "@app/db/schemas";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types";
|
||||
|
||||
import { Integrations, IntegrationUrls } from "./integration-list";
|
||||
import { TIntegrationDALFactory } from "../integration/integration-dal";
|
||||
import { IntegrationInitialSyncBehavior, Integrations, IntegrationUrls } from "./integration-list";
|
||||
|
||||
const getSecretKeyValuePair = (secrets: Record<string, { value: string | null; comment?: string } | null>) =>
|
||||
Object.keys(secrets).reduce<Record<string, string | null | undefined>>((prev, key) => {
|
||||
@ -441,16 +443,19 @@ const syncSecretsAWSParameterStore = async ({
|
||||
}) => {
|
||||
if (!accessId) return;
|
||||
|
||||
AWS.config.update({
|
||||
const config = new AWS.Config({
|
||||
region: integration.region as string,
|
||||
accessKeyId: accessId,
|
||||
secretAccessKey: accessToken
|
||||
credentials: {
|
||||
accessKeyId: accessId,
|
||||
secretAccessKey: accessToken
|
||||
}
|
||||
});
|
||||
|
||||
const ssm = new AWS.SSM({
|
||||
apiVersion: "2014-11-06",
|
||||
region: integration.region as string
|
||||
});
|
||||
ssm.config.update(config);
|
||||
|
||||
const params = {
|
||||
Path: integration.path as string,
|
||||
@ -514,12 +519,6 @@ const syncSecretsAWSParameterStore = async ({
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
AWS.config.update({
|
||||
region: undefined,
|
||||
accessKeyId: undefined,
|
||||
secretAccessKey: undefined
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -541,12 +540,6 @@ const syncSecretsAWSSecretManager = async ({
|
||||
try {
|
||||
if (!accessId) return;
|
||||
|
||||
AWS.config.update({
|
||||
region: integration.region as string,
|
||||
accessKeyId: accessId,
|
||||
secretAccessKey: accessToken
|
||||
});
|
||||
|
||||
secretsManager = new SecretsManagerClient({
|
||||
region: integration.region as string,
|
||||
credentials: {
|
||||
@ -575,12 +568,6 @@ const syncSecretsAWSSecretManager = async ({
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
AWS.config.update({
|
||||
region: undefined,
|
||||
accessKeyId: undefined,
|
||||
secretAccessKey: undefined
|
||||
});
|
||||
} catch (err) {
|
||||
if (err instanceof ResourceNotFoundException && secretsManager) {
|
||||
await secretsManager.send(
|
||||
@ -590,11 +577,6 @@ const syncSecretsAWSSecretManager = async ({
|
||||
})
|
||||
);
|
||||
}
|
||||
AWS.config.update({
|
||||
region: undefined,
|
||||
accessKeyId: undefined,
|
||||
secretAccessKey: undefined
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@ -602,11 +584,25 @@ const syncSecretsAWSSecretManager = async ({
|
||||
* Sync/push [secrets] to Heroku app named [integration.app]
|
||||
*/
|
||||
const syncSecretsHeroku = async ({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: TIntegrations;
|
||||
createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
integrationDAL: Pick<TIntegrationDALFactory, "updateById">;
|
||||
integration: TIntegrations & {
|
||||
projectId: string;
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
secretPath: string;
|
||||
};
|
||||
secrets: Record<string, { value: string; comment?: string } | null>;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
@ -620,12 +616,74 @@ const syncSecretsHeroku = async ({
|
||||
})
|
||||
).data;
|
||||
|
||||
const secretsToAdd: { [key: string]: string } = {};
|
||||
const secretsToUpdate: { [key: string]: string } = {};
|
||||
|
||||
const metadata = z.record(z.any()).parse(integration.metadata);
|
||||
|
||||
Object.keys(herokuSecrets).forEach((key) => {
|
||||
if (!(key in secrets)) {
|
||||
secrets[key] = null;
|
||||
}
|
||||
if (!integration.lastUsed) {
|
||||
// first time using integration
|
||||
// -> apply initial sync behavior
|
||||
switch (metadata.initialSyncBehavior) {
|
||||
case IntegrationInitialSyncBehavior.OVERWRITE_TARGET: {
|
||||
if (!(key in secrets)) secrets[key] = null;
|
||||
break;
|
||||
}
|
||||
case IntegrationInitialSyncBehavior.PREFER_TARGET: {
|
||||
if (!(key in secrets)) {
|
||||
secretsToAdd[key] = herokuSecrets[key];
|
||||
} else if (secrets[key]?.value !== herokuSecrets[key]) {
|
||||
secretsToUpdate[key] = herokuSecrets[key];
|
||||
}
|
||||
secrets[key] = {
|
||||
value: herokuSecrets[key]
|
||||
};
|
||||
break;
|
||||
}
|
||||
case IntegrationInitialSyncBehavior.PREFER_SOURCE: {
|
||||
if (!(key in secrets)) {
|
||||
secrets[key] = herokuSecrets[key];
|
||||
secretsToAdd[key] = herokuSecrets[key];
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
if (!(key in secrets)) secrets[key] = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (!(key in secrets)) secrets[key] = null;
|
||||
});
|
||||
|
||||
if (Object.keys(secretsToAdd).length) {
|
||||
await createManySecretsRawFn({
|
||||
projectId: integration.projectId,
|
||||
environment: integration.environment.slug,
|
||||
path: integration.secretPath,
|
||||
secrets: Object.keys(secretsToAdd).map((key) => ({
|
||||
secretName: key,
|
||||
secretValue: secretsToAdd[key],
|
||||
type: SecretType.Shared,
|
||||
secretComment: ""
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
if (Object.keys(secretsToUpdate).length) {
|
||||
await updateManySecretsRawFn({
|
||||
projectId: integration.projectId,
|
||||
environment: integration.environment.slug,
|
||||
path: integration.secretPath,
|
||||
secrets: Object.keys(secretsToUpdate).map((key) => ({
|
||||
secretName: key,
|
||||
secretValue: secretsToUpdate[key],
|
||||
type: SecretType.Shared,
|
||||
secretComment: ""
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
await request.patch(
|
||||
`${IntegrationUrls.HEROKU_API_URL}/apps/${integration.app}/config-vars`,
|
||||
getSecretKeyValuePair(secrets),
|
||||
@ -637,6 +695,10 @@ const syncSecretsHeroku = async ({
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastUsed: new Date()
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -1224,21 +1286,21 @@ const syncSecretsRailway = async ({
|
||||
}
|
||||
`;
|
||||
|
||||
const input = {
|
||||
projectId: integration.appId,
|
||||
environmentId: integration.targetEnvironmentId,
|
||||
...(integration.targetServiceId ? { serviceId: integration.targetServiceId } : {}),
|
||||
replace: true,
|
||||
variables: getSecretKeyValuePair(secrets)
|
||||
const variables = {
|
||||
input: {
|
||||
projectId: integration.appId,
|
||||
environmentId: integration.targetEnvironmentId,
|
||||
...(integration.targetServiceId ? { serviceId: integration.targetServiceId } : {}),
|
||||
replace: true,
|
||||
variables: getSecretKeyValuePair(secrets)
|
||||
}
|
||||
};
|
||||
|
||||
await request.post(
|
||||
IntegrationUrls.RAILWAY_API_URL,
|
||||
{
|
||||
query,
|
||||
variables: {
|
||||
input
|
||||
}
|
||||
variables
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
@ -2950,8 +3012,14 @@ const syncSecretsHasuraCloud = async ({
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to [app] in integration named [integration]
|
||||
*
|
||||
* Do this in terms of DAL
|
||||
*
|
||||
*/
|
||||
export const syncIntegrationSecrets = async ({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
@ -2959,7 +3027,18 @@ export const syncIntegrationSecrets = async ({
|
||||
accessToken,
|
||||
appendices
|
||||
}: {
|
||||
integration: TIntegrations;
|
||||
createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
integrationDAL: Pick<TIntegrationDALFactory, "updateById">;
|
||||
integration: TIntegrations & {
|
||||
projectId: string;
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
secretPath: string;
|
||||
};
|
||||
integrationAuth: TIntegrationAuths;
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessId: string | null;
|
||||
@ -2999,6 +3078,9 @@ export const syncIntegrationSecrets = async ({
|
||||
break;
|
||||
case Integrations.HEROKU:
|
||||
await syncSecretsHeroku({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
|
@ -57,7 +57,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
const findAllOrgMembers = async (orgId: string) => {
|
||||
try {
|
||||
const members = await db(TableName.OrgMembership)
|
||||
.where({ orgId })
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin<TUserEncryptionKeys>(
|
||||
TableName.UserEncryptionKey,
|
||||
@ -72,25 +72,27 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
db.ref("roleId").withSchema(TableName.OrgMembership),
|
||||
db.ref("status").withSchema(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.ref("id").withSchema(TableName.Users).as("userId"),
|
||||
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
|
||||
)
|
||||
.where({ isGhost: false }); // MAKE SURE USER IS NOT A GHOST USER
|
||||
return members.map(({ email, firstName, lastName, userId, publicKey, ...data }) => ({
|
||||
|
||||
return members.map(({ email, username, firstName, lastName, userId, publicKey, ...data }) => ({
|
||||
...data,
|
||||
user: { email, firstName, lastName, id: userId, publicKey }
|
||||
user: { email, username, firstName, lastName, id: userId, publicKey }
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all org members" });
|
||||
}
|
||||
};
|
||||
|
||||
const findOrgMembersByEmail = async (orgId: string, emails: string[]) => {
|
||||
const findOrgMembersByUsername = async (orgId: string, usernames: string[]) => {
|
||||
try {
|
||||
const members = await db(TableName.OrgMembership)
|
||||
.where({ orgId })
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin<TUserEncryptionKeys>(
|
||||
TableName.UserEncryptionKey,
|
||||
@ -104,6 +106,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
db.ref("role").withSchema(TableName.OrgMembership),
|
||||
db.ref("roleId").withSchema(TableName.OrgMembership),
|
||||
db.ref("status").withSchema(TableName.OrgMembership),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
@ -111,7 +114,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
|
||||
)
|
||||
.where({ isGhost: false })
|
||||
.whereIn("email", emails);
|
||||
.whereIn("username", usernames);
|
||||
return members.map(({ email, firstName, lastName, userId, publicKey, ...data }) => ({
|
||||
...data,
|
||||
user: { email, firstName, lastName, id: userId, publicKey }
|
||||
@ -243,10 +246,13 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.ref("scimEnabled").withSchema(TableName.Organization)
|
||||
);
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
@ -266,7 +272,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
findOrgById,
|
||||
findAllOrgsByUserId,
|
||||
ghostUserExists,
|
||||
findOrgMembersByEmail,
|
||||
findOrgMembersByUsername,
|
||||
findOrgGhostUser,
|
||||
create,
|
||||
updateById,
|
||||
|
@ -22,6 +22,8 @@ import { ActorType, AuthMethod, AuthTokenType } from "../auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||
import { TokenType } from "../auth-token/auth-token-types";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TProjectKeyDALFactory } from "../project-key/project-key-dal";
|
||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TIncidentContactsDALFactory } from "./incident-contacts-dal";
|
||||
@ -44,6 +46,8 @@ type TOrgServiceFactoryDep = {
|
||||
orgRoleDAL: TOrgRoleDALFactory;
|
||||
userDAL: TUserDALFactory;
|
||||
projectDAL: TProjectDALFactory;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findProjectMembershipsByUserId" | "delete">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete">;
|
||||
incidentContactDAL: TIncidentContactsDALFactory;
|
||||
samlConfigDAL: Pick<TSamlConfigDALFactory, "findOne" | "findEnforceableSamlCfg">;
|
||||
smtpService: TSmtpService;
|
||||
@ -65,6 +69,8 @@ export const orgServiceFactory = ({
|
||||
permissionService,
|
||||
smtpService,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
projectKeyDAL,
|
||||
tokenService,
|
||||
orgBotDAL,
|
||||
licenseService,
|
||||
@ -97,11 +103,11 @@ export const orgServiceFactory = ({
|
||||
return members;
|
||||
};
|
||||
|
||||
const findOrgMembersByEmail = async ({ actor, actorId, orgId, emails }: TFindOrgMembersByEmailDTO) => {
|
||||
const findOrgMembersByUsername = async ({ actor, actorId, orgId, emails }: TFindOrgMembersByEmailDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
|
||||
|
||||
const members = await orgDAL.findOrgMembersByEmail(orgId, emails);
|
||||
const members = await orgDAL.findOrgMembersByUsername(orgId, emails);
|
||||
|
||||
return members;
|
||||
};
|
||||
@ -139,6 +145,7 @@ export const orgServiceFactory = ({
|
||||
{
|
||||
isGhost: true,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
username: email,
|
||||
email,
|
||||
isAccepted: true
|
||||
},
|
||||
@ -233,7 +240,15 @@ export const orgServiceFactory = ({
|
||||
/*
|
||||
* Create organization
|
||||
* */
|
||||
const createOrganization = async (userId: string, userEmail: string, orgName: string) => {
|
||||
const createOrganization = async ({
|
||||
userId,
|
||||
userEmail,
|
||||
orgName
|
||||
}: {
|
||||
userId: string;
|
||||
orgName: string;
|
||||
userEmail?: string | null;
|
||||
}) => {
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
@ -361,7 +376,7 @@ export const orgServiceFactory = ({
|
||||
});
|
||||
}
|
||||
const invitee = await orgDAL.transaction(async (tx) => {
|
||||
const inviteeUser = await userDAL.findUserByEmail(inviteeEmail, tx);
|
||||
const inviteeUser = await userDAL.findUserByUsername(inviteeEmail, tx);
|
||||
if (inviteeUser) {
|
||||
// if user already exist means its already part of infisical
|
||||
// Thus the signup flow is not needed anymore
|
||||
@ -397,6 +412,7 @@ export const orgServiceFactory = ({
|
||||
// not invited before
|
||||
const user = await userDAL.create(
|
||||
{
|
||||
username: inviteeEmail,
|
||||
email: inviteeEmail,
|
||||
isAccepted: false,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
@ -431,7 +447,7 @@ export const orgServiceFactory = ({
|
||||
recipients: [inviteeEmail],
|
||||
substitutions: {
|
||||
inviterFirstName: user.firstName,
|
||||
inviterEmail: user.email,
|
||||
inviterUsername: user.username,
|
||||
organizationName: org?.name,
|
||||
email: inviteeEmail,
|
||||
organizationId: org?.id.toString(),
|
||||
@ -451,7 +467,7 @@ export const orgServiceFactory = ({
|
||||
* magic link and issue a temporary signup token for user to complete setting up their account
|
||||
*/
|
||||
const verifyUserToOrg = async ({ orgId, email, code }: TVerifyUserToOrgDTO) => {
|
||||
const user = await userDAL.findUserByEmail(email);
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
if (!user) {
|
||||
throw new BadRequestError({ message: "Invalid request", name: "Verify user to org" });
|
||||
}
|
||||
@ -503,10 +519,50 @@ export const orgServiceFactory = ({
|
||||
const { permission } = await permissionService.getUserOrgPermission(userId, orgId, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Member);
|
||||
|
||||
const membership = await orgDAL.deleteMembershipById(membershipId, orgId);
|
||||
const deletedMembership = await orgDAL.transaction(async (tx) => {
|
||||
const orgMembership = await orgDAL.deleteMembershipById(membershipId, orgId, tx);
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return membership;
|
||||
if (!orgMembership.userId) {
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMembership;
|
||||
}
|
||||
|
||||
// Get all the project memberships of the user in the organization
|
||||
const projectMemberships = await projectMembershipDAL.findProjectMembershipsByUserId(orgId, orgMembership.userId);
|
||||
|
||||
// Delete all the project memberships of the user in the organization
|
||||
await projectMembershipDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: projectMemberships.map((membership) => membership.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// Get all the project keys of the user in the organization
|
||||
const projectKeys = await projectKeyDAL.find({
|
||||
$in: {
|
||||
projectId: projectMemberships.map((membership) => membership.projectId)
|
||||
},
|
||||
receiverId: orgMembership.userId
|
||||
});
|
||||
|
||||
// Delete all the project keys of the user in the organization
|
||||
await projectKeyDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: projectKeys.map((key) => key.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMembership;
|
||||
});
|
||||
|
||||
return deletedMembership;
|
||||
};
|
||||
|
||||
/*
|
||||
@ -549,7 +605,7 @@ export const orgServiceFactory = ({
|
||||
inviteUserToOrganization,
|
||||
verifyUserToOrg,
|
||||
updateOrg,
|
||||
findOrgMembersByEmail,
|
||||
findOrgMembersByUsername,
|
||||
createOrganization,
|
||||
deleteOrganizationById,
|
||||
deleteOrgMembership,
|
||||
|
36
backend/src/services/project-bot/project-bot-fns.ts
Normal file
36
backend/src/services/project-bot/project-bot-fns.ts
Normal file
@ -0,0 +1,36 @@
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { decryptAsymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
|
||||
import { TGetPrivateKeyDTO } from "./project-bot-types";
|
||||
|
||||
export const getBotPrivateKey = ({ bot }: TGetPrivateKeyDTO) =>
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
ciphertext: bot.encryptedPrivateKey
|
||||
});
|
||||
|
||||
export const getBotKeyFnFactory = (projectBotDAL: TProjectBotDALFactory) => {
|
||||
const getBotKeyFn = async (projectId: string) => {
|
||||
const bot = await projectBotDAL.findOne({ projectId });
|
||||
|
||||
if (!bot) throw new BadRequestError({ message: "failed to find bot key" });
|
||||
if (!bot.isActive) throw new BadRequestError({ message: "Bot is not active" });
|
||||
if (!bot.encryptedProjectKeyNonce || !bot.encryptedProjectKey)
|
||||
throw new BadRequestError({ message: "Encryption key missing" });
|
||||
|
||||
const botPrivateKey = getBotPrivateKey({ bot });
|
||||
|
||||
return decryptAsymmetric({
|
||||
ciphertext: bot.encryptedProjectKey,
|
||||
privateKey: botPrivateKey,
|
||||
nonce: bot.encryptedProjectKeyNonce,
|
||||
publicKey: bot.sender.publicKey
|
||||
});
|
||||
};
|
||||
|
||||
return getBotKeyFn;
|
||||
};
|
@ -1,15 +1,16 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { ProjectVersion, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ProjectVersion } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { decryptAsymmetric, generateAsymmetricKeyPair } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { generateAsymmetricKeyPair } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TProjectBotDALFactory } from "./project-bot-dal";
|
||||
import { TFindBotByProjectIdDTO, TGetPrivateKeyDTO, TSetActiveStateDTO } from "./project-bot-types";
|
||||
import { getBotKeyFnFactory, getBotPrivateKey } from "./project-bot-fns";
|
||||
import { TFindBotByProjectIdDTO, TSetActiveStateDTO } from "./project-bot-types";
|
||||
|
||||
type TProjectBotServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
@ -24,29 +25,10 @@ export const projectBotServiceFactory = ({
|
||||
projectDAL,
|
||||
permissionService
|
||||
}: TProjectBotServiceFactoryDep) => {
|
||||
const getBotPrivateKey = ({ bot }: TGetPrivateKeyDTO) =>
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
ciphertext: bot.encryptedPrivateKey
|
||||
});
|
||||
const getBotKeyFn = getBotKeyFnFactory(projectBotDAL);
|
||||
|
||||
const getBotKey = async (projectId: string) => {
|
||||
const bot = await projectBotDAL.findOne({ projectId });
|
||||
if (!bot) throw new BadRequestError({ message: "failed to find bot key" });
|
||||
if (!bot.isActive) throw new BadRequestError({ message: "Bot is not active" });
|
||||
if (!bot.encryptedProjectKeyNonce || !bot.encryptedProjectKey)
|
||||
throw new BadRequestError({ message: "Encryption key missing" });
|
||||
|
||||
const botPrivateKey = getBotPrivateKey({ bot });
|
||||
|
||||
return decryptAsymmetric({
|
||||
ciphertext: bot.encryptedProjectKey,
|
||||
privateKey: botPrivateKey,
|
||||
nonce: bot.encryptedProjectKeyNonce,
|
||||
publicKey: bot.sender.publicKey
|
||||
});
|
||||
return getBotKeyFn(projectId);
|
||||
};
|
||||
|
||||
const findBotByProjectId = async ({
|
||||
|
@ -25,6 +25,7 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("role").withSchema(TableName.ProjectMembership),
|
||||
db.ref("roleId").withSchema(TableName.ProjectMembership),
|
||||
db.ref("isGhost").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("publicKey").withSchema(TableName.UserEncryptionKey),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
@ -32,9 +33,9 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("id").withSchema(TableName.Users).as("userId")
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
return members.map(({ email, firstName, lastName, publicKey, isGhost, ...data }) => ({
|
||||
return members.map(({ username, email, firstName, lastName, publicKey, isGhost, ...data }) => ({
|
||||
...data,
|
||||
user: { email, firstName, lastName, id: data.userId, publicKey, isGhost }
|
||||
user: { username, email, firstName, lastName, id: data.userId, publicKey, isGhost }
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all project members" });
|
||||
@ -56,7 +57,7 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findMembershipsByEmail = async (projectId: string, emails: string[]) => {
|
||||
const findMembershipsByUsername = async (projectId: string, usernames: string[]) => {
|
||||
try {
|
||||
const members = await db(TableName.ProjectMembership)
|
||||
.where({ projectId })
|
||||
@ -69,18 +70,38 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
.select(
|
||||
selectAllTableCols(TableName.ProjectMembership),
|
||||
db.ref("id").withSchema(TableName.Users).as("userId"),
|
||||
db.ref("email").withSchema(TableName.Users)
|
||||
db.ref("username").withSchema(TableName.Users)
|
||||
)
|
||||
.whereIn("email", emails)
|
||||
.whereIn("username", usernames)
|
||||
.where({ isGhost: false });
|
||||
return members.map(({ userId, email, ...data }) => ({
|
||||
return members.map(({ userId, username, ...data }) => ({
|
||||
...data,
|
||||
user: { id: userId, email }
|
||||
user: { id: userId, username }
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find members by email" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...projectMemberOrm, findAllProjectMembers, findProjectGhostUser, findMembershipsByEmail };
|
||||
const findProjectMembershipsByUserId = async (orgId: string, userId: string) => {
|
||||
try {
|
||||
const memberships = await db(TableName.ProjectMembership)
|
||||
.where({ userId })
|
||||
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.where({ [`${TableName.Project}.orgId` as "orgId"]: orgId })
|
||||
.select(selectAllTableCols(TableName.ProjectMembership));
|
||||
|
||||
return memberships;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find project memberships by user id" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...projectMemberOrm,
|
||||
findAllProjectMembers,
|
||||
findProjectGhostUser,
|
||||
findMembershipsByUsername,
|
||||
findProjectMembershipsByUserId
|
||||
};
|
||||
};
|
||||
|
@ -45,7 +45,7 @@ type TProjectMembershipServiceFactoryDep = {
|
||||
projectMembershipDAL: TProjectMembershipDALFactory;
|
||||
userDAL: Pick<TUserDALFactory, "findById" | "findOne" | "findUserByProjectMembershipId" | "find">;
|
||||
projectRoleDAL: Pick<TProjectRoleDALFactory, "findOne">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership" | "findOrgMembersByEmail">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership" | "findOrgMembersByUsername">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById" | "findProjectGhostUser" | "transaction">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "findLatestProjectKey" | "delete" | "insertMany">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@ -134,8 +134,8 @@ export const projectMembershipServiceFactory = ({
|
||||
const appCfg = getConfig();
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.WorkspaceInvite,
|
||||
subjectLine: "Infisical workspace invitation",
|
||||
recipients: invitees.map((i) => i.email),
|
||||
subjectLine: "Infisical project invitation",
|
||||
recipients: invitees.filter((i) => i.email).map((i) => i.email as string),
|
||||
substitutions: {
|
||||
workspaceName: project.name,
|
||||
callback_url: `${appCfg.SITE_URL}/login`
|
||||
@ -206,8 +206,8 @@ export const projectMembershipServiceFactory = ({
|
||||
const appCfg = getConfig();
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.WorkspaceInvite,
|
||||
subjectLine: "Infisical workspace invitation",
|
||||
recipients: orgMembers.map(({ email }) => email).filter(Boolean),
|
||||
subjectLine: "Infisical project invitation",
|
||||
recipients: orgMembers.filter((i) => i.email).map((i) => i.email as string),
|
||||
substitutions: {
|
||||
workspaceName: project.name,
|
||||
callback_url: `${appCfg.SITE_URL}/login`
|
||||
@ -222,6 +222,7 @@ export const projectMembershipServiceFactory = ({
|
||||
actorId,
|
||||
actor,
|
||||
emails,
|
||||
usernames,
|
||||
sendEmails = true
|
||||
}: TAddUsersToWorkspaceNonE2EEDTO) => {
|
||||
const project = await projectDAL.findById(projectId);
|
||||
@ -234,9 +235,14 @@ export const projectMembershipServiceFactory = ({
|
||||
const { permission } = await permissionService.getProjectPermission(actor, actorId, projectId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Member);
|
||||
|
||||
const orgMembers = await orgDAL.findOrgMembersByEmail(project.orgId, emails);
|
||||
const usernamesAndEmails = [...emails, ...usernames];
|
||||
|
||||
if (orgMembers.length !== emails.length) throw new BadRequestError({ message: "Some users are not part of org" });
|
||||
const orgMembers = await orgDAL.findOrgMembersByUsername(project.orgId, [
|
||||
...new Set(usernamesAndEmails.map((element) => element.toLowerCase()))
|
||||
]);
|
||||
|
||||
if (orgMembers.length !== usernamesAndEmails.length)
|
||||
throw new BadRequestError({ message: "Some users are not part of org" });
|
||||
|
||||
if (!orgMembers.length) return [];
|
||||
|
||||
@ -315,16 +321,21 @@ export const projectMembershipServiceFactory = ({
|
||||
});
|
||||
|
||||
if (sendEmails) {
|
||||
const recipients = orgMembers.filter((i) => i.user.email).map((i) => i.user.email as string);
|
||||
|
||||
const appCfg = getConfig();
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.WorkspaceInvite,
|
||||
subjectLine: "Infisical workspace invitation",
|
||||
recipients: orgMembers.map(({ user }) => user.email).filter(Boolean),
|
||||
substitutions: {
|
||||
workspaceName: project.name,
|
||||
callback_url: `${appCfg.SITE_URL}/login`
|
||||
}
|
||||
});
|
||||
|
||||
if (recipients.length) {
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.WorkspaceInvite,
|
||||
subjectLine: "Infisical project invitation",
|
||||
recipients: orgMembers.filter((i) => i.user.email).map((i) => i.user.email as string),
|
||||
substitutions: {
|
||||
workspaceName: project.name,
|
||||
callback_url: `${appCfg.SITE_URL}/login`
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
return members;
|
||||
};
|
||||
@ -407,7 +418,8 @@ export const projectMembershipServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId,
|
||||
projectId,
|
||||
emails
|
||||
emails,
|
||||
usernames
|
||||
}: TDeleteProjectMembershipsDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(actor, actorId, projectId, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Member);
|
||||
@ -421,9 +433,13 @@ export const projectMembershipServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const projectMembers = await projectMembershipDAL.findMembershipsByEmail(projectId, emails);
|
||||
const usernamesAndEmails = [...emails, ...usernames];
|
||||
|
||||
if (projectMembers.length !== emails.length) {
|
||||
const projectMembers = await projectMembershipDAL.findMembershipsByUsername(projectId, [
|
||||
...new Set(usernamesAndEmails.map((element) => element.toLowerCase()))
|
||||
]);
|
||||
|
||||
if (projectMembers.length !== usernamesAndEmails.length) {
|
||||
throw new BadRequestError({
|
||||
message: "Some users are not part of project",
|
||||
name: "Delete project membership"
|
||||
|
@ -18,6 +18,7 @@ export type TDeleteProjectMembershipOldDTO = {
|
||||
|
||||
export type TDeleteProjectMembershipsDTO = {
|
||||
emails: string[];
|
||||
usernames: string[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TAddUsersToWorkspaceDTO = {
|
||||
@ -33,4 +34,5 @@ export type TAddUsersToWorkspaceDTO = {
|
||||
export type TAddUsersToWorkspaceNonE2EEDTO = {
|
||||
sendEmails?: boolean;
|
||||
emails: string[];
|
||||
usernames: string[];
|
||||
} & TProjectPermission;
|
||||
|
@ -102,8 +102,11 @@ export const projectQueueFactory = ({
|
||||
|
||||
const oldProjectKey = await projectKeyDAL.findLatestProjectKey(data.startedByUserId, data.projectId);
|
||||
|
||||
if (!project || !oldProjectKey) {
|
||||
throw new Error("Project or project key not found");
|
||||
if (!project) {
|
||||
throw new Error("Project not found");
|
||||
}
|
||||
if (!oldProjectKey) {
|
||||
throw new Error("Old project key not found");
|
||||
}
|
||||
|
||||
if (project.upgradeStatus !== ProjectUpgradeStatus.Failed && project.upgradeStatus !== null) {
|
||||
@ -267,8 +270,19 @@ export const projectQueueFactory = ({
|
||||
const user = await userDAL.findUserEncKeyByUserId(key.receiverId);
|
||||
const [orgMembership] = await orgDAL.findMembership({ userId: key.receiverId, orgId: project.orgId });
|
||||
|
||||
if (!user || !orgMembership) {
|
||||
throw new Error(`User with ID ${key.receiverId} was not found during upgrade, or user is not in org.`);
|
||||
if (!user) {
|
||||
throw new Error(`User with ID ${key.receiverId} was not found during upgrade.`);
|
||||
}
|
||||
|
||||
if (!orgMembership) {
|
||||
// This can happen. Since we don't remove project memberships and project keys when a user is removed from an org, this is a valid case.
|
||||
logger.info("User is not in organization", {
|
||||
userId: key.receiverId,
|
||||
orgId: project.orgId,
|
||||
projectId: project.id
|
||||
});
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const [newMember] = assignWorkspaceKeysToMembers({
|
||||
@ -532,7 +546,12 @@ export const projectQueueFactory = ({
|
||||
logger.error("Failed to upgrade project, because no project was found", data);
|
||||
} else {
|
||||
await projectDAL.setProjectUpgradeStatus(data.projectId, ProjectUpgradeStatus.Failed);
|
||||
logger.error(err, "Failed to upgrade project");
|
||||
logger.error("Failed to upgrade project", err, {
|
||||
extra: {
|
||||
project,
|
||||
jobData: data
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
throw err;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import path from "path";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { v4 as uuidv4, validate as uuidValidate } from "uuid";
|
||||
|
||||
import { TSecretFoldersInsert } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
@ -164,7 +164,7 @@ export const secretFolderServiceFactory = ({
|
||||
actorOrgId,
|
||||
environment,
|
||||
path: secretPath,
|
||||
id
|
||||
idOrName
|
||||
}: TDeleteFolderDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(actor, actorId, projectId, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
@ -179,7 +179,10 @@ export const secretFolderServiceFactory = ({
|
||||
const parentFolder = await folderDAL.findBySecretPath(projectId, environment, secretPath, tx);
|
||||
if (!parentFolder) throw new BadRequestError({ message: "Secret path not found" });
|
||||
|
||||
const [doc] = await folderDAL.delete({ envId: env.id, id, parentId: parentFolder.id }, tx);
|
||||
const [doc] = await folderDAL.delete(
|
||||
{ envId: env.id, [uuidValidate(idOrName) ? "id" : "name"]: idOrName, parentId: parentFolder.id },
|
||||
tx
|
||||
);
|
||||
if (!doc) throw new BadRequestError({ message: "Folder not found", name: "Delete folder" });
|
||||
return doc;
|
||||
});
|
||||
|
@ -16,7 +16,7 @@ export type TUpdateFolderDTO = {
|
||||
export type TDeleteFolderDTO = {
|
||||
environment: string;
|
||||
path: string;
|
||||
id: string;
|
||||
idOrName: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetFolderDTO = {
|
||||
|
@ -1,12 +1,35 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import path from "path";
|
||||
|
||||
import { SecretKeyEncoding, TSecretBlindIndexes, TSecrets } from "@app/db/schemas";
|
||||
import {
|
||||
SecretEncryptionAlgo,
|
||||
SecretKeyEncoding,
|
||||
SecretType,
|
||||
TableName,
|
||||
TSecretBlindIndexes,
|
||||
TSecrets
|
||||
} from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { buildSecretBlindIndexFromName, decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import {
|
||||
buildSecretBlindIndexFromName,
|
||||
decryptSymmetric128BitHexKeyUTF8,
|
||||
encryptSymmetric128BitHexKeyUTF8
|
||||
} from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { groupBy, unique } from "@app/lib/fn";
|
||||
|
||||
import { getBotKeyFnFactory } from "../project-bot/project-bot-fns";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretDALFactory } from "./secret-dal";
|
||||
import {
|
||||
TCreateManySecretsRawFn,
|
||||
TCreateManySecretsRawFnFactory,
|
||||
TFnSecretBlindIndexCheck,
|
||||
TFnSecretBulkInsert,
|
||||
TFnSecretBulkUpdate,
|
||||
TUpdateManySecretsRawFn,
|
||||
TUpdateManySecretsRawFnFactory
|
||||
} from "./secret-types";
|
||||
|
||||
export const generateSecretBlindIndexBySalt = async (secretName: string, secretBlindIndexDoc: TSecretBlindIndexes) => {
|
||||
const appCfg = getConfig();
|
||||
@ -228,3 +251,399 @@ export const decryptSecretRaw = (secret: TSecrets & { workspace: string; environ
|
||||
user: secret.userId
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks and handles secrets using a blind index method.
|
||||
* The function generates mappings between secret names and their blind indexes, validates user IDs for personal secrets, and retrieves secrets from the database based on their blind indexes.
|
||||
* For new secrets (isNew = true), it ensures they don't already exist in the database.
|
||||
* For existing secrets, it verifies their presence in the database.
|
||||
* If discrepancies are found, errors are thrown. The function returns mappings and the fetched secrets.
|
||||
*/
|
||||
export const fnSecretBlindIndexCheck = async ({
|
||||
inputSecrets,
|
||||
folderId,
|
||||
isNew,
|
||||
userId,
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
}: TFnSecretBlindIndexCheck) => {
|
||||
const blindIndex2KeyName: Record<string, string> = {}; // used at audit log point
|
||||
const keyName2BlindIndex = await Promise.all(
|
||||
inputSecrets.map(({ secretName }) => generateSecretBlindIndexBySalt(secretName, blindIndexCfg))
|
||||
).then((blindIndexes) =>
|
||||
blindIndexes.reduce<Record<string, string>>((prev, curr, i) => {
|
||||
// eslint-disable-next-line
|
||||
prev[inputSecrets[i].secretName] = curr;
|
||||
blindIndex2KeyName[curr] = inputSecrets[i].secretName;
|
||||
return prev;
|
||||
}, {})
|
||||
);
|
||||
|
||||
if (inputSecrets.some(({ type }) => type === SecretType.Personal) && !userId) {
|
||||
throw new BadRequestError({ message: "Missing user id for personal secret" });
|
||||
}
|
||||
|
||||
const secrets = await secretDAL.findByBlindIndexes(
|
||||
folderId,
|
||||
inputSecrets.map(({ secretName, type }) => ({
|
||||
blindIndex: keyName2BlindIndex[secretName],
|
||||
type: type || SecretType.Shared
|
||||
})),
|
||||
userId
|
||||
);
|
||||
|
||||
if (isNew) {
|
||||
if (secrets.length) throw new BadRequestError({ message: "Secret already exist" });
|
||||
} else {
|
||||
const secretKeysInDB = unique(secrets, (el) => el.secretBlindIndex as string).map(
|
||||
(el) => blindIndex2KeyName[el.secretBlindIndex as string]
|
||||
);
|
||||
const hasUnknownSecretsProvided = secretKeysInDB.length !== inputSecrets.length;
|
||||
if (hasUnknownSecretsProvided) {
|
||||
const keysMissingInDB = Object.keys(keyName2BlindIndex).filter((key) => !secretKeysInDB.includes(key));
|
||||
throw new BadRequestError({
|
||||
message: `Secret not found: blind index ${keysMissingInDB.join(",")}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { blindIndex2KeyName, keyName2BlindIndex, secrets };
|
||||
};
|
||||
|
||||
// these functions are special functions shared by a couple of resources
|
||||
// used by secret approval, rotation or anywhere in which secret needs to modified
|
||||
export const fnSecretBulkInsert = async ({
|
||||
// TODO: Pick types here
|
||||
folderId,
|
||||
inputSecrets,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
tx
|
||||
}: TFnSecretBulkInsert) => {
|
||||
const newSecrets = await secretDAL.insertMany(
|
||||
inputSecrets.map(({ tags, ...el }) => ({ ...el, folderId })),
|
||||
tx
|
||||
);
|
||||
const newSecretGroupByBlindIndex = groupBy(newSecrets, (item) => item.secretBlindIndex as string);
|
||||
const newSecretTags = inputSecrets.flatMap(({ tags: secretTags = [], secretBlindIndex }) =>
|
||||
secretTags.map((tag) => ({
|
||||
[`${TableName.SecretTag}Id` as const]: tag,
|
||||
[`${TableName.Secret}Id` as const]: newSecretGroupByBlindIndex[secretBlindIndex as string][0].id
|
||||
}))
|
||||
);
|
||||
const secretVersions = await secretVersionDAL.insertMany(
|
||||
inputSecrets.map(({ tags, ...el }) => ({
|
||||
...el,
|
||||
folderId,
|
||||
secretId: newSecretGroupByBlindIndex[el.secretBlindIndex as string][0].id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
if (newSecretTags.length) {
|
||||
const secTags = await secretTagDAL.saveTagsToSecret(newSecretTags, tx);
|
||||
const secVersionsGroupBySecId = groupBy(secretVersions, (i) => i.secretId);
|
||||
const newSecretVersionTags = secTags.flatMap(({ secretsId, secret_tagsId }) => ({
|
||||
[`${TableName.SecretVersion}Id` as const]: secVersionsGroupBySecId[secretsId][0].id,
|
||||
[`${TableName.SecretTag}Id` as const]: secret_tagsId
|
||||
}));
|
||||
await secretVersionTagDAL.insertMany(newSecretVersionTags, tx);
|
||||
}
|
||||
|
||||
return newSecrets.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
};
|
||||
|
||||
export const fnSecretBulkUpdate = async ({
|
||||
tx,
|
||||
inputSecrets,
|
||||
folderId,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
}: TFnSecretBulkUpdate) => {
|
||||
const newSecrets = await secretDAL.bulkUpdate(
|
||||
inputSecrets.map(({ filter, data: { tags, ...data } }) => ({
|
||||
filter: { ...filter, folderId },
|
||||
data
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const secretVersions = await secretVersionDAL.insertMany(
|
||||
newSecrets.map(({ id, createdAt, updatedAt, ...el }) => ({
|
||||
...el,
|
||||
secretId: id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const secsUpdatedTag = inputSecrets.flatMap(({ data: { tags } }, i) =>
|
||||
tags !== undefined ? { tags, secretId: newSecrets[i].id } : []
|
||||
);
|
||||
if (secsUpdatedTag.length) {
|
||||
await secretTagDAL.deleteTagsManySecret(
|
||||
projectId,
|
||||
secsUpdatedTag.map(({ secretId }) => secretId),
|
||||
tx
|
||||
);
|
||||
const newSecretTags = secsUpdatedTag.flatMap(({ tags: secretTags = [], secretId }) =>
|
||||
secretTags.map((tag) => ({
|
||||
[`${TableName.SecretTag}Id` as const]: tag,
|
||||
[`${TableName.Secret}Id` as const]: secretId
|
||||
}))
|
||||
);
|
||||
if (newSecretTags.length) {
|
||||
const secTags = await secretTagDAL.saveTagsToSecret(newSecretTags, tx);
|
||||
const secVersionsGroupBySecId = groupBy(secretVersions, (i) => i.secretId);
|
||||
const newSecretVersionTags = secTags.flatMap(({ secretsId, secret_tagsId }) => ({
|
||||
[`${TableName.SecretVersion}Id` as const]: secVersionsGroupBySecId[secretsId][0].id,
|
||||
[`${TableName.SecretTag}Id` as const]: secret_tagsId
|
||||
}));
|
||||
await secretVersionTagDAL.insertMany(newSecretVersionTags, tx);
|
||||
}
|
||||
}
|
||||
|
||||
return newSecrets.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
};
|
||||
|
||||
export const createManySecretsRawFnFactory = ({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL
|
||||
}: TCreateManySecretsRawFnFactory) => {
|
||||
const getBotKeyFn = getBotKeyFnFactory(projectBotDAL);
|
||||
const createManySecretsRawFn = async ({
|
||||
projectId,
|
||||
environment,
|
||||
path: secretPath,
|
||||
secrets,
|
||||
userId
|
||||
}: TCreateManySecretsRawFn) => {
|
||||
const botKey = await getBotKeyFn(projectId);
|
||||
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Create secret" });
|
||||
|
||||
// insert operation
|
||||
const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({
|
||||
inputSecrets: secrets,
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
const inputSecrets = await Promise.all(
|
||||
secrets.map(async (secret) => {
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretName, botKey);
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretValue || "", botKey);
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretComment || "", botKey);
|
||||
|
||||
if (secret.type === SecretType.Personal) {
|
||||
if (!userId) throw new BadRequestError({ message: "Missing user id for personal secret" });
|
||||
const sharedExist = await secretDAL.findOne({
|
||||
secretBlindIndex: keyName2BlindIndex[secret.secretName],
|
||||
folderId,
|
||||
type: SecretType.Shared
|
||||
});
|
||||
|
||||
if (!sharedExist)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create personal secret override for no corresponding shared secret"
|
||||
});
|
||||
}
|
||||
|
||||
const tags = secret.tags ? await secretTagDAL.findManyTagsById(projectId, secret.tags) : [];
|
||||
if ((secret.tags || []).length !== tags.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
return {
|
||||
type: secret.type,
|
||||
userId: secret.type === SecretType.Personal ? userId : null,
|
||||
secretName: secret.secretName,
|
||||
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
|
||||
secretKeyIV: secretKeyEncrypted.iv,
|
||||
secretKeyTag: secretKeyEncrypted.tag,
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
tags: secret.tags
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkInsert({
|
||||
inputSecrets: inputSecrets.map(({ secretName, ...el }) => ({
|
||||
...el,
|
||||
version: 0,
|
||||
secretBlindIndex: keyName2BlindIndex[secretName],
|
||||
algorithm: SecretEncryptionAlgo.AES_256_GCM,
|
||||
keyEncoding: SecretKeyEncoding.UTF8
|
||||
})),
|
||||
folderId,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
tx
|
||||
})
|
||||
);
|
||||
|
||||
return newSecrets;
|
||||
};
|
||||
|
||||
return createManySecretsRawFn;
|
||||
};
|
||||
|
||||
export const updateManySecretsRawFnFactory = ({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL
|
||||
}: TUpdateManySecretsRawFnFactory) => {
|
||||
const getBotKeyFn = getBotKeyFnFactory(projectBotDAL);
|
||||
const updateManySecretsRawFn = async ({
|
||||
projectId,
|
||||
environment,
|
||||
path: secretPath,
|
||||
secrets, // consider accepting instead ciphertext secrets
|
||||
userId
|
||||
}: TUpdateManySecretsRawFn): Promise<Array<TSecrets & { _id: string }>> => {
|
||||
const botKey = await getBotKeyFn(projectId);
|
||||
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update secret" });
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Update secret" });
|
||||
|
||||
const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({
|
||||
inputSecrets: secrets,
|
||||
folderId,
|
||||
isNew: false,
|
||||
blindIndexCfg,
|
||||
secretDAL,
|
||||
userId
|
||||
});
|
||||
|
||||
const inputSecrets = await Promise.all(
|
||||
secrets.map(async (secret) => {
|
||||
if (secret.newSecretName === "") {
|
||||
throw new BadRequestError({ message: "New secret name cannot be empty" });
|
||||
}
|
||||
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretName, botKey);
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretValue || "", botKey);
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretComment || "", botKey);
|
||||
|
||||
if (secret.type === SecretType.Personal) {
|
||||
if (!userId) throw new BadRequestError({ message: "Missing user id for personal secret" });
|
||||
|
||||
const sharedExist = await secretDAL.findOne({
|
||||
secretBlindIndex: keyName2BlindIndex[secret.secretName],
|
||||
folderId,
|
||||
type: SecretType.Shared
|
||||
});
|
||||
|
||||
if (!sharedExist)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update personal secret override for no corresponding shared secret"
|
||||
});
|
||||
|
||||
if (secret.newSecretName)
|
||||
throw new BadRequestError({ message: "Personal secret cannot change the key name" });
|
||||
}
|
||||
|
||||
const tags = secret.tags ? await secretTagDAL.findManyTagsById(projectId, secret.tags) : [];
|
||||
if ((secret.tags || []).length !== tags.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
return {
|
||||
type: secret.type,
|
||||
userId: secret.type === SecretType.Personal ? userId : null,
|
||||
secretName: secret.secretName,
|
||||
newSecretName: secret.newSecretName,
|
||||
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
|
||||
secretKeyIV: secretKeyEncrypted.iv,
|
||||
secretKeyTag: secretKeyEncrypted.tag,
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
tags: secret.tags
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags);
|
||||
const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : [];
|
||||
if (tagIds.length !== tags.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
// now find any secret that needs to update its name
|
||||
// same process as above
|
||||
const nameUpdatedSecrets = inputSecrets.filter(({ newSecretName }) => Boolean(newSecretName));
|
||||
const { keyName2BlindIndex: newKeyName2BlindIndex } = await fnSecretBlindIndexCheck({
|
||||
inputSecrets: nameUpdatedSecrets,
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
const updatedSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkUpdate({
|
||||
folderId,
|
||||
projectId,
|
||||
tx,
|
||||
inputSecrets: inputSecrets.map(({ secretName, newSecretName, ...el }) => ({
|
||||
filter: { secretBlindIndex: keyName2BlindIndex[secretName], type: SecretType.Shared },
|
||||
data: {
|
||||
...el,
|
||||
folderId,
|
||||
secretBlindIndex:
|
||||
newSecretName && newKeyName2BlindIndex[newSecretName]
|
||||
? newKeyName2BlindIndex[newSecretName]
|
||||
: keyName2BlindIndex[secretName],
|
||||
algorithm: SecretEncryptionAlgo.AES_256_GCM,
|
||||
keyEncoding: SecretKeyEncoding.UTF8
|
||||
}
|
||||
})),
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
})
|
||||
);
|
||||
|
||||
return updatedSecrets;
|
||||
};
|
||||
|
||||
return updateManySecretsRawFn;
|
||||
};
|
||||
|
@ -6,6 +6,12 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { isSamePath } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { createManySecretsRawFnFactory, updateManySecretsRawFnFactory } from "@app/services/secret/secret-fns";
|
||||
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
|
||||
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
|
||||
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
|
||||
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
|
||||
import { TIntegrationDALFactory } from "../integration/integration-dal";
|
||||
import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service";
|
||||
@ -29,18 +35,23 @@ export type TSecretQueueFactory = ReturnType<typeof secretQueueFactory>;
|
||||
|
||||
type TSecretQueueFactoryDep = {
|
||||
queueService: TQueueServiceFactory;
|
||||
integrationDAL: Pick<TIntegrationDALFactory, "findByProjectIdV2">;
|
||||
integrationDAL: Pick<TIntegrationDALFactory, "findByProjectIdV2" | "updateById">;
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
integrationAuthService: Pick<TIntegrationAuthServiceFactory, "getIntegrationAccessToken">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findByManySecretPath">;
|
||||
secretDAL: Pick<TSecretDALFactory, "findByFolderId" | "find">;
|
||||
folderDAL: TSecretFolderDALFactory;
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find">;
|
||||
webhookDAL: Pick<TWebhookDALFactory, "findAllWebhooks" | "transaction" | "update" | "bulkUpdate">;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
projectDAL: TProjectDALFactory;
|
||||
projectBotDAL: TProjectBotDALFactory;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
||||
smtpService: TSmtpService;
|
||||
orgDAL: Pick<TOrgDALFactory, "findOrgByProjectId">;
|
||||
secretVersionDAL: TSecretVersionDALFactory;
|
||||
secretBlindIndexDAL: TSecretBlindIndexDALFactory;
|
||||
secretTagDAL: TSecretTagDALFactory;
|
||||
secretVersionTagDAL: TSecretVersionTagDALFactory;
|
||||
};
|
||||
|
||||
export type TGetSecrets = {
|
||||
@ -62,8 +73,35 @@ export const secretQueueFactory = ({
|
||||
orgDAL,
|
||||
smtpService,
|
||||
projectDAL,
|
||||
projectMembershipDAL
|
||||
projectBotDAL,
|
||||
projectMembershipDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
}: TSecretQueueFactoryDep) => {
|
||||
const createManySecretsRawFn = createManySecretsRawFnFactory({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL
|
||||
});
|
||||
|
||||
const updateManySecretsRawFn = updateManySecretsRawFnFactory({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL
|
||||
});
|
||||
|
||||
const syncIntegrations = async (dto: TGetSecrets) => {
|
||||
await queueService.queue(QueueName.IntegrationSync, QueueJobs.IntegrationSync, dto, {
|
||||
attempts: 5,
|
||||
@ -307,6 +345,9 @@ export const secretQueueFactory = ({
|
||||
}
|
||||
|
||||
await syncIntegrationSecrets({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets,
|
||||
@ -350,7 +391,7 @@ export const secretQueueFactory = ({
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.SecretReminder,
|
||||
subjectLine: "Infisical secret reminder",
|
||||
recipients: [...projectMembers.map((m) => m.user.email)],
|
||||
recipients: [...projectMembers.map((m) => m.user.email)].filter((email) => email).map((email) => email as string),
|
||||
substitutions: {
|
||||
reminderNote: data.note, // May not be present.
|
||||
projectName: project.name,
|
||||
|
@ -1,13 +1,13 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, SecretsSchema, SecretType, TableName } from "@app/db/schemas";
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, SecretsSchema, SecretType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { buildSecretBlindIndexFromName, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { groupBy, pick, unique } from "@app/lib/fn";
|
||||
import { groupBy, pick } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { ActorType } from "../auth/auth-type";
|
||||
@ -19,7 +19,7 @@ import { TSecretImportDALFactory } from "../secret-import/secret-import-dal";
|
||||
import { fnSecretsFromImports } from "../secret-import/secret-import-fns";
|
||||
import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal";
|
||||
import { TSecretDALFactory } from "./secret-dal";
|
||||
import { decryptSecretRaw, generateSecretBlindIndexBySalt } from "./secret-fns";
|
||||
import { decryptSecretRaw, fnSecretBlindIndexCheck, fnSecretBulkInsert, fnSecretBulkUpdate } from "./secret-fns";
|
||||
import { TSecretQueueFactory } from "./secret-queue";
|
||||
import {
|
||||
TCreateBulkSecretDTO,
|
||||
@ -28,11 +28,8 @@ import {
|
||||
TDeleteBulkSecretDTO,
|
||||
TDeleteSecretDTO,
|
||||
TDeleteSecretRawDTO,
|
||||
TFnSecretBlindIndexCheck,
|
||||
TFnSecretBlindIndexCheckV2,
|
||||
TFnSecretBulkDelete,
|
||||
TFnSecretBulkInsert,
|
||||
TFnSecretBulkUpdate,
|
||||
TGetASecretDTO,
|
||||
TGetASecretRawDTO,
|
||||
TGetSecretsDTO,
|
||||
@ -95,85 +92,6 @@ export const secretServiceFactory = ({
|
||||
return secretBlindIndex;
|
||||
};
|
||||
|
||||
// these functions are special functions shared by a couple of resources
|
||||
// used by secret approval, rotation or anywhere in which secret needs to modified
|
||||
const fnSecretBulkInsert = async ({ folderId, inputSecrets, tx }: TFnSecretBulkInsert) => {
|
||||
const newSecrets = await secretDAL.insertMany(
|
||||
inputSecrets.map(({ tags, ...el }) => ({ ...el, folderId })),
|
||||
tx
|
||||
);
|
||||
const newSecretGroupByBlindIndex = groupBy(newSecrets, (item) => item.secretBlindIndex as string);
|
||||
const newSecretTags = inputSecrets.flatMap(({ tags: secretTags = [], secretBlindIndex }) =>
|
||||
secretTags.map((tag) => ({
|
||||
[`${TableName.SecretTag}Id` as const]: tag,
|
||||
[`${TableName.Secret}Id` as const]: newSecretGroupByBlindIndex[secretBlindIndex as string][0].id
|
||||
}))
|
||||
);
|
||||
const secretVersions = await secretVersionDAL.insertMany(
|
||||
inputSecrets.map(({ tags, ...el }) => ({
|
||||
...el,
|
||||
folderId,
|
||||
secretId: newSecretGroupByBlindIndex[el.secretBlindIndex as string][0].id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
if (newSecretTags.length) {
|
||||
const secTags = await secretTagDAL.saveTagsToSecret(newSecretTags, tx);
|
||||
const secVersionsGroupBySecId = groupBy(secretVersions, (i) => i.secretId);
|
||||
const newSecretVersionTags = secTags.flatMap(({ secretsId, secret_tagsId }) => ({
|
||||
[`${TableName.SecretVersion}Id` as const]: secVersionsGroupBySecId[secretsId][0].id,
|
||||
[`${TableName.SecretTag}Id` as const]: secret_tagsId
|
||||
}));
|
||||
await secretVersionTagDAL.insertMany(newSecretVersionTags, tx);
|
||||
}
|
||||
|
||||
return newSecrets.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
};
|
||||
|
||||
const fnSecretBulkUpdate = async ({ tx, inputSecrets, folderId, projectId }: TFnSecretBulkUpdate) => {
|
||||
const newSecrets = await secretDAL.bulkUpdate(
|
||||
inputSecrets.map(({ filter, data: { tags, ...data } }) => ({
|
||||
filter: { ...filter, folderId },
|
||||
data
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const secretVersions = await secretVersionDAL.insertMany(
|
||||
newSecrets.map(({ id, createdAt, updatedAt, ...el }) => ({
|
||||
...el,
|
||||
secretId: id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const secsUpdatedTag = inputSecrets.flatMap(({ data: { tags } }, i) =>
|
||||
tags !== undefined ? { tags, secretId: newSecrets[i].id } : []
|
||||
);
|
||||
if (secsUpdatedTag.length) {
|
||||
await secretTagDAL.deleteTagsManySecret(
|
||||
projectId,
|
||||
secsUpdatedTag.map(({ secretId }) => secretId),
|
||||
tx
|
||||
);
|
||||
const newSecretTags = secsUpdatedTag.flatMap(({ tags: secretTags = [], secretId }) =>
|
||||
secretTags.map((tag) => ({
|
||||
[`${TableName.SecretTag}Id` as const]: tag,
|
||||
[`${TableName.Secret}Id` as const]: secretId
|
||||
}))
|
||||
);
|
||||
if (newSecretTags.length) {
|
||||
const secTags = await secretTagDAL.saveTagsToSecret(newSecretTags, tx);
|
||||
const secVersionsGroupBySecId = groupBy(secretVersions, (i) => i.secretId);
|
||||
const newSecretVersionTags = secTags.flatMap(({ secretsId, secret_tagsId }) => ({
|
||||
[`${TableName.SecretVersion}Id` as const]: secVersionsGroupBySecId[secretsId][0].id,
|
||||
[`${TableName.SecretTag}Id` as const]: secret_tagsId
|
||||
}));
|
||||
await secretVersionTagDAL.insertMany(newSecretVersionTags, tx);
|
||||
}
|
||||
}
|
||||
|
||||
return newSecrets.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
};
|
||||
|
||||
const fnSecretBulkDelete = async ({ folderId, inputSecrets, tx, actorId }: TFnSecretBulkDelete) => {
|
||||
const deletedSecrets = await secretDAL.deleteMany(
|
||||
inputSecrets.map(({ type, secretBlindIndex }) => ({
|
||||
@ -202,63 +120,6 @@ export const secretServiceFactory = ({
|
||||
return deletedSecrets;
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks and handles secrets using a blind index method.
|
||||
* The function generates mappings between secret names and their blind indexes, validates user IDs for personal secrets, and retrieves secrets from the database based on their blind indexes.
|
||||
* For new secrets (isNew = true), it ensures they don't already exist in the database.
|
||||
* For existing secrets, it verifies their presence in the database.
|
||||
* If discrepancies are found, errors are thrown. The function returns mappings and the fetched secrets.
|
||||
*/
|
||||
const fnSecretBlindIndexCheck = async ({
|
||||
inputSecrets,
|
||||
folderId,
|
||||
isNew,
|
||||
userId,
|
||||
blindIndexCfg
|
||||
}: TFnSecretBlindIndexCheck) => {
|
||||
const blindIndex2KeyName: Record<string, string> = {}; // used at audit log point
|
||||
const keyName2BlindIndex = await Promise.all(
|
||||
inputSecrets.map(({ secretName }) => generateSecretBlindIndexBySalt(secretName, blindIndexCfg))
|
||||
).then((blindIndexes) =>
|
||||
blindIndexes.reduce<Record<string, string>>((prev, curr, i) => {
|
||||
// eslint-disable-next-line
|
||||
prev[inputSecrets[i].secretName] = curr;
|
||||
blindIndex2KeyName[curr] = inputSecrets[i].secretName;
|
||||
return prev;
|
||||
}, {})
|
||||
);
|
||||
|
||||
if (inputSecrets.some(({ type }) => type === SecretType.Personal) && !userId) {
|
||||
throw new BadRequestError({ message: "Missing user id for personal secret" });
|
||||
}
|
||||
|
||||
const secrets = await secretDAL.findByBlindIndexes(
|
||||
folderId,
|
||||
inputSecrets.map(({ secretName, type }) => ({
|
||||
blindIndex: keyName2BlindIndex[secretName],
|
||||
type: type || SecretType.Shared
|
||||
})),
|
||||
userId
|
||||
);
|
||||
|
||||
if (isNew) {
|
||||
if (secrets.length) throw new BadRequestError({ message: "Secret already exist" });
|
||||
} else {
|
||||
const secretKeysInDB = unique(secrets, (el) => el.secretBlindIndex as string).map(
|
||||
(el) => blindIndex2KeyName[el.secretBlindIndex as string]
|
||||
);
|
||||
const hasUnknownSecretsProvided = secretKeysInDB.length !== inputSecrets.length;
|
||||
if (hasUnknownSecretsProvided) {
|
||||
const keysMissingInDB = Object.keys(keyName2BlindIndex).filter((key) => !secretKeysInDB.includes(key));
|
||||
throw new BadRequestError({
|
||||
message: `Secret not found: blind index ${keysMissingInDB.join(",")}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { blindIndex2KeyName, keyName2BlindIndex, secrets };
|
||||
};
|
||||
|
||||
// this is used when secret blind index already exist
|
||||
// mainly for secret approval
|
||||
const fnSecretBlindIndexCheckV2 = async ({ inputSecrets, folderId, userId }: TFnSecretBlindIndexCheckV2) => {
|
||||
@ -311,7 +172,8 @@ export const secretServiceFactory = ({
|
||||
folderId,
|
||||
isNew: true,
|
||||
userId: actorId,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
// if user creating personal check its shared also exist
|
||||
@ -348,6 +210,10 @@ export const secretServiceFactory = ({
|
||||
tags: inputSecret.tags
|
||||
}
|
||||
],
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
tx
|
||||
})
|
||||
);
|
||||
@ -375,6 +241,10 @@ export const secretServiceFactory = ({
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
if (inputSecret.newSecretName === "") {
|
||||
throw new BadRequestError({ message: "New secret name cannot be empty" });
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
const folderId = folder.id;
|
||||
@ -391,7 +261,8 @@ export const secretServiceFactory = ({
|
||||
folderId,
|
||||
isNew: false,
|
||||
blindIndexCfg,
|
||||
userId: actorId
|
||||
userId: actorId,
|
||||
secretDAL
|
||||
});
|
||||
if (inputSecret.newSecretName && inputSecret.type === SecretType.Personal) {
|
||||
throw new BadRequestError({ message: "Personal secret cannot change the key name" });
|
||||
@ -403,7 +274,8 @@ export const secretServiceFactory = ({
|
||||
inputSecrets: [{ secretName: inputSecret.newSecretName }],
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
newSecretNameBlindIndex = kN2NewBlindIndex[inputSecret.newSecretName];
|
||||
}
|
||||
@ -450,6 +322,10 @@ export const secretServiceFactory = ({
|
||||
}
|
||||
}
|
||||
],
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
tx
|
||||
})
|
||||
);
|
||||
@ -492,7 +368,8 @@ export const secretServiceFactory = ({
|
||||
inputSecrets: [{ secretName: inputSecret.secretName }],
|
||||
folderId,
|
||||
isNew: false,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
const deletedSecret = await secretDAL.transaction(async (tx) =>
|
||||
@ -675,13 +552,14 @@ export const secretServiceFactory = ({
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Update secret" });
|
||||
if (!blindIndexCfg) throw new BadRequestError({ message: "Blind index not found", name: "Create secret" });
|
||||
|
||||
const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({
|
||||
inputSecrets,
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
// get all tags
|
||||
@ -700,6 +578,10 @@ export const secretServiceFactory = ({
|
||||
keyEncoding: SecretKeyEncoding.UTF8
|
||||
})),
|
||||
folderId,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
tx
|
||||
})
|
||||
);
|
||||
@ -728,7 +610,7 @@ export const secretServiceFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update secret" });
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
@ -738,7 +620,8 @@ export const secretServiceFactory = ({
|
||||
inputSecrets,
|
||||
folderId,
|
||||
isNew: false,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
// now find any secret that needs to update its name
|
||||
@ -748,7 +631,8 @@ export const secretServiceFactory = ({
|
||||
inputSecrets: nameUpdatedSecrets,
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
// get all tags
|
||||
@ -773,7 +657,11 @@ export const secretServiceFactory = ({
|
||||
algorithm: SecretEncryptionAlgo.AES_256_GCM,
|
||||
keyEncoding: SecretKeyEncoding.UTF8
|
||||
}
|
||||
}))
|
||||
})),
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
})
|
||||
);
|
||||
|
||||
@ -811,7 +699,8 @@ export const secretServiceFactory = ({
|
||||
inputSecrets,
|
||||
folderId,
|
||||
isNew: false,
|
||||
blindIndexCfg
|
||||
blindIndexCfg,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
const secretsDeleted = await secretDAL.transaction(async (tx) =>
|
||||
|
@ -2,6 +2,14 @@ import { Knex } from "knex";
|
||||
|
||||
import { SecretType, TSecretBlindIndexes, TSecrets, TSecretsInsert, TSecretsUpdate } from "@app/db/schemas";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
|
||||
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
|
||||
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
|
||||
type TPartialSecret = Pick<TSecrets, "id" | "secretReminderRepeatDays" | "secretReminderNote">;
|
||||
|
||||
@ -181,12 +189,20 @@ export type TFnSecretBulkInsert = {
|
||||
folderId: string;
|
||||
tx?: Knex;
|
||||
inputSecrets: Array<Omit<TSecretsInsert, "folderId"> & { tags?: string[] }>;
|
||||
secretDAL: Pick<TSecretDALFactory, "insertMany">;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
};
|
||||
|
||||
export type TFnSecretBulkUpdate = {
|
||||
folderId: string;
|
||||
projectId: string;
|
||||
inputSecrets: { filter: Partial<TSecrets>; data: TSecretsUpdate & { tags?: string[] } }[];
|
||||
secretDAL: Pick<TSecretDALFactory, "bulkUpdate">;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret" | "deleteTagsManySecret">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
@ -204,6 +220,7 @@ export type TFnSecretBlindIndexCheck = {
|
||||
blindIndexCfg: TSecretBlindIndexes;
|
||||
inputSecrets: Array<{ secretName: string; type?: SecretType }>;
|
||||
isNew: boolean;
|
||||
secretDAL: Pick<TSecretDALFactory, "findByBlindIndexes">;
|
||||
};
|
||||
|
||||
// when blind index is already present
|
||||
@ -229,3 +246,66 @@ export type TRemoveSecretReminderDTO = {
|
||||
secretId: string;
|
||||
repeatDays: number;
|
||||
};
|
||||
|
||||
// ---
|
||||
|
||||
export type TCreateManySecretsRawFnFactory = {
|
||||
projectDAL: TProjectDALFactory;
|
||||
projectBotDAL: TProjectBotDALFactory;
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretVersionDAL: TSecretVersionDALFactory;
|
||||
secretBlindIndexDAL: TSecretBlindIndexDALFactory;
|
||||
secretTagDAL: TSecretTagDALFactory;
|
||||
secretVersionTagDAL: TSecretVersionTagDALFactory;
|
||||
folderDAL: TSecretFolderDALFactory;
|
||||
};
|
||||
|
||||
export type TCreateManySecretsRawFn = {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
path: string;
|
||||
secrets: {
|
||||
secretName: string;
|
||||
secretValue: string;
|
||||
type: SecretType;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
tags?: string[];
|
||||
metadata?: {
|
||||
source?: string;
|
||||
};
|
||||
}[];
|
||||
userId?: string; // only relevant for personal secret(s)
|
||||
};
|
||||
|
||||
export type TUpdateManySecretsRawFnFactory = {
|
||||
projectDAL: TProjectDALFactory;
|
||||
projectBotDAL: TProjectBotDALFactory;
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretVersionDAL: TSecretVersionDALFactory;
|
||||
secretBlindIndexDAL: TSecretBlindIndexDALFactory;
|
||||
secretTagDAL: TSecretTagDALFactory;
|
||||
secretVersionTagDAL: TSecretVersionTagDALFactory;
|
||||
folderDAL: TSecretFolderDALFactory;
|
||||
};
|
||||
|
||||
export type TUpdateManySecretsRawFn = {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
path: string;
|
||||
secrets: {
|
||||
secretName: string;
|
||||
newSecretName?: string;
|
||||
secretValue: string;
|
||||
type: SecretType;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
secretReminderRepeatDays?: number | null;
|
||||
secretReminderNote?: string | null;
|
||||
tags?: string[];
|
||||
metadata?: {
|
||||
source?: string;
|
||||
};
|
||||
}[];
|
||||
userId?: string;
|
||||
};
|
||||
|
@ -8,7 +8,7 @@
|
||||
</head>
|
||||
<body>
|
||||
<h2>Join your organization on Infisical</h2>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<p>{{inviterFirstName}} ({{inviterUsername}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}&organization_id={{organizationId}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
|
@ -97,6 +97,7 @@ export const superAdminServiceFactory = ({
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
username: email,
|
||||
email,
|
||||
superAdmin: true,
|
||||
isGhost: false,
|
||||
@ -126,11 +127,11 @@ export const superAdminServiceFactory = ({
|
||||
|
||||
const initialOrganizationName = appCfg.INITIAL_ORGANIZATION_NAME ?? "Admin Org";
|
||||
|
||||
const organization = await orgService.createOrganization(
|
||||
userInfo.user.id,
|
||||
userInfo.user.email,
|
||||
initialOrganizationName
|
||||
);
|
||||
const organization = await orgService.createOrganization({
|
||||
userId: userInfo.user.id,
|
||||
userEmail: userInfo.user.email,
|
||||
orgName: initialOrganizationName
|
||||
});
|
||||
|
||||
await updateServerCfg({ initialized: true });
|
||||
const token = await authService.generateUserTokens({
|
||||
|
@ -37,6 +37,7 @@ export type TSecretModifiedEvent = {
|
||||
export type TAdminInitEvent = {
|
||||
event: PostHogEventTypes.AdminInit;
|
||||
properties: {
|
||||
username: string;
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
@ -46,6 +47,7 @@ export type TAdminInitEvent = {
|
||||
export type TUserSignedUpEvent = {
|
||||
event: PostHogEventTypes.UserSignedUp;
|
||||
properties: {
|
||||
username: string;
|
||||
email: string;
|
||||
attributionSource?: string;
|
||||
};
|
||||
|
13
backend/src/services/user-alias/user-alias-dal.ts
Normal file
13
backend/src/services/user-alias/user-alias-dal.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TUserAliasDALFactory = ReturnType<typeof userAliasDALFactory>;
|
||||
|
||||
export const userAliasDALFactory = (db: TDbClient) => {
|
||||
const userAliasOrm = ormify(db, TableName.UserAliases);
|
||||
|
||||
return {
|
||||
...userAliasOrm
|
||||
};
|
||||
};
|
0
backend/src/services/user-alias/user-alias-types.ts
Normal file
0
backend/src/services/user-alias/user-alias-types.ts
Normal file
@ -16,14 +16,17 @@ export type TUserDALFactory = ReturnType<typeof userDALFactory>;
|
||||
|
||||
export const userDALFactory = (db: TDbClient) => {
|
||||
const userOrm = ormify(db, TableName.Users);
|
||||
const findUserByEmail = async (email: string, tx?: Knex) => userOrm.findOne({ email }, tx);
|
||||
const findUserByUsername = async (username: string, tx?: Knex) => userOrm.findOne({ username }, tx);
|
||||
|
||||
// USER ENCRYPTION FUNCTIONS
|
||||
// -------------------------
|
||||
const findUserEncKeyByEmail = async (email: string) => {
|
||||
const findUserEncKeyByUsername = async ({ username }: { username: string }) => {
|
||||
try {
|
||||
return await db(TableName.Users)
|
||||
.where({ email, isGhost: false })
|
||||
.where({
|
||||
username,
|
||||
isGhost: false
|
||||
})
|
||||
.join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`)
|
||||
.first();
|
||||
} catch (error) {
|
||||
@ -118,8 +121,8 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
|
||||
return {
|
||||
...userOrm,
|
||||
findUserByEmail,
|
||||
findUserEncKeyByEmail,
|
||||
findUserByUsername,
|
||||
findUserEncKeyByUsername,
|
||||
findUserEncKeyByUserId,
|
||||
updateUserEncryptionByUserId,
|
||||
findUserByProjectMembershipId,
|
||||
|
21
backend/src/services/user/user-fns.ts
Normal file
21
backend/src/services/user/user-fns.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
export const normalizeUsername = async (username: string, userDAL: Pick<TUserDALFactory, "findOne">) => {
|
||||
let attempt = slugify(username);
|
||||
|
||||
let user = await userDAL.findOne({ username: attempt });
|
||||
if (!user) return attempt;
|
||||
|
||||
while (true) {
|
||||
attempt = slugify(`${username}-${alphaNumericNanoId(4)}`);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
user = await userDAL.findOne({ username: attempt });
|
||||
|
||||
if (!user) {
|
||||
return attempt;
|
||||
}
|
||||
}
|
||||
};
|
@ -11,6 +11,10 @@ export type TUserServiceFactory = ReturnType<typeof userServiceFactory>;
|
||||
|
||||
export const userServiceFactory = ({ userDAL }: TUserServiceFactoryDep) => {
|
||||
const toggleUserMfa = async (userId: string, isMfaEnabled: boolean) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
|
||||
if (!user || !user.email) throw new BadRequestError({ name: "Failed to toggle MFA" });
|
||||
|
||||
const updatedUser = await userDAL.updateById(userId, {
|
||||
isMfaEnabled,
|
||||
mfaMethods: isMfaEnabled ? ["email"] : []
|
||||
@ -30,6 +34,12 @@ export const userServiceFactory = ({ userDAL }: TUserServiceFactoryDep) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
if (!user) throw new BadRequestError({ name: "Update auth methods" });
|
||||
|
||||
if (user.authMethods?.includes(AuthMethod.LDAP))
|
||||
throw new BadRequestError({ message: "LDAP auth method cannot be updated", name: "Update auth methods" });
|
||||
|
||||
if (authMethods.includes(AuthMethod.LDAP))
|
||||
throw new BadRequestError({ message: "LDAP auth method cannot be updated", name: "Update auth methods" });
|
||||
|
||||
const updatedUser = await userDAL.updateById(userId, { authMethods });
|
||||
return updatedUser;
|
||||
};
|
||||
|
@ -1,5 +1,5 @@
|
||||
infisical:
|
||||
address: "http://localhost:8080"
|
||||
address: "https://app.infisical.com/"
|
||||
auth:
|
||||
type: "universal-auth"
|
||||
config:
|
||||
@ -13,3 +13,12 @@ sinks:
|
||||
templates:
|
||||
- source-path: my-dot-ev-secret-template
|
||||
destination-path: my-dot-env.env
|
||||
config:
|
||||
polling-interval: 60s
|
||||
execute:
|
||||
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
|
||||
- source-path: my-dot-ev-secret-template1
|
||||
destination-path: my-dot-env-1.env
|
||||
config:
|
||||
exec:
|
||||
command: mkdir hello-world1
|
||||
|
@ -145,6 +145,25 @@ func CallLogin2V2(httpClient *resty.Client, request GetLoginTwoV2Request) (GetLo
|
||||
return loginTwoV2Response, nil
|
||||
}
|
||||
|
||||
func CallGetAllOrganizations(httpClient *resty.Client) (GetOrganizationsResponse, error) {
|
||||
var orgResponse GetOrganizationsResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&orgResponse).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
Get(fmt.Sprintf("%v/v1/organization", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetOrganizationsResponse{}, err
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetOrganizationsResponse{}, fmt.Errorf("CallGetAllOrganizations: Unsuccessful response: [response=%v]", response)
|
||||
}
|
||||
|
||||
return orgResponse, nil
|
||||
}
|
||||
|
||||
func CallGetAllWorkSpacesUserBelongsTo(httpClient *resty.Client) (GetWorkSpacesResponse, error) {
|
||||
var workSpacesResponse GetWorkSpacesResponse
|
||||
response, err := httpClient.
|
||||
@ -490,5 +509,7 @@ func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Reques
|
||||
return GetRawSecretsV3Response{}, fmt.Errorf("CallGetRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
}
|
||||
|
||||
getRawSecretsV3Response.ETag = response.Header().Get(("etag"))
|
||||
|
||||
return getRawSecretsV3Response, nil
|
||||
}
|
||||
|
@ -120,14 +120,21 @@ type PullSecretsByInfisicalTokenResponse struct {
|
||||
|
||||
type GetWorkSpacesResponse struct {
|
||||
Workspaces []struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Plan string `json:"plan,omitempty"`
|
||||
V int `json:"__v"`
|
||||
Organization string `json:"organization,omitempty"`
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Plan string `json:"plan,omitempty"`
|
||||
V int `json:"__v"`
|
||||
OrganizationId string `json:"orgId"`
|
||||
} `json:"workspaces"`
|
||||
}
|
||||
|
||||
type GetOrganizationsResponse struct {
|
||||
Organizations []struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
} `json:"organizations"`
|
||||
}
|
||||
|
||||
type Secret struct {
|
||||
SecretKeyCiphertext string `json:"secretKeyCiphertext,omitempty"`
|
||||
SecretKeyIV string `json:"secretKeyIV,omitempty"`
|
||||
@ -292,10 +299,10 @@ type GetFoldersV1Response struct {
|
||||
}
|
||||
|
||||
type CreateFolderV1Request struct {
|
||||
FolderName string `json:"folderName"`
|
||||
FolderName string `json:"name"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
Environment string `json:"environment"`
|
||||
Directory string `json:"directory"`
|
||||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
type CreateFolderV1Response struct {
|
||||
@ -505,4 +512,5 @@ type GetRawSecretsV3Response struct {
|
||||
SecretComment string `json:"secretComment"`
|
||||
} `json:"secrets"`
|
||||
Imports []any `json:"imports"`
|
||||
ETag string
|
||||
}
|
||||
|
@ -5,12 +5,15 @@ package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"path"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
@ -71,12 +74,56 @@ type Template struct {
|
||||
SourcePath string `yaml:"source-path"`
|
||||
Base64TemplateContent string `yaml:"base64-template-content"`
|
||||
DestinationPath string `yaml:"destination-path"`
|
||||
|
||||
Config struct { // Configurations for the template
|
||||
PollingInterval string `yaml:"polling-interval"` // How often to poll for changes in the secret
|
||||
Execute struct {
|
||||
Command string `yaml:"command"` // Command to execute once the template has been rendered
|
||||
Timeout int64 `yaml:"timeout"` // Timeout for the command
|
||||
} `yaml:"execute"` // Command to execute once the template has been rendered
|
||||
} `yaml:"config"`
|
||||
}
|
||||
|
||||
func ReadFile(filePath string) ([]byte, error) {
|
||||
return ioutil.ReadFile(filePath)
|
||||
}
|
||||
|
||||
func ExecuteCommandWithTimeout(command string, timeout int64) error {
|
||||
|
||||
shell := [2]string{"sh", "-c"}
|
||||
if runtime.GOOS == "windows" {
|
||||
shell = [2]string{"cmd", "/C"}
|
||||
} else {
|
||||
currentShell := os.Getenv("SHELL")
|
||||
if currentShell != "" {
|
||||
shell[0] = currentShell
|
||||
}
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
if timeout > 0 {
|
||||
var cancel context.CancelFunc
|
||||
ctx, cancel = context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second)
|
||||
defer cancel()
|
||||
}
|
||||
|
||||
cmd := exec.CommandContext(ctx, shell[0], shell[1], command)
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
if exitError, ok := err.(*exec.ExitError); ok { // type assertion
|
||||
if exitError.ProcessState.ExitCode() == -1 {
|
||||
return fmt.Errorf("command timed out")
|
||||
}
|
||||
}
|
||||
return err
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func FileExists(filepath string) bool {
|
||||
info, err := os.Stat(filepath)
|
||||
if os.IsNotExist(err) {
|
||||
@ -170,20 +217,26 @@ func ParseAgentConfig(configFile []byte) (*Config, error) {
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func secretTemplateFunction(accessToken string) func(string, string, string) ([]models.SingleEnvironmentVariable, error) {
|
||||
func secretTemplateFunction(accessToken string, existingEtag string, currentEtag *string) func(string, string, string) ([]models.SingleEnvironmentVariable, error) {
|
||||
return func(projectID, envSlug, secretPath string) ([]models.SingleEnvironmentVariable, error) {
|
||||
secrets, err := util.GetPlainTextSecretsViaMachineIdentity(accessToken, projectID, envSlug, secretPath, false)
|
||||
res, err := util.GetPlainTextSecretsViaMachineIdentity(accessToken, projectID, envSlug, secretPath, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return secrets, nil
|
||||
if existingEtag != res.Etag {
|
||||
*currentEtag = res.Etag
|
||||
}
|
||||
|
||||
expandedSecrets := util.ExpandSecrets(res.Secrets, models.ExpandSecretsAuthentication{UniversalAuthAccessToken: accessToken}, "")
|
||||
|
||||
return expandedSecrets, nil
|
||||
}
|
||||
}
|
||||
|
||||
func ProcessTemplate(templatePath string, data interface{}, accessToken string) (*bytes.Buffer, error) {
|
||||
func ProcessTemplate(templatePath string, data interface{}, accessToken string, existingEtag string, currentEtag *string) (*bytes.Buffer, error) {
|
||||
// custom template function to fetch secrets from Infisical
|
||||
secretFunction := secretTemplateFunction(accessToken)
|
||||
secretFunction := secretTemplateFunction(accessToken, existingEtag, currentEtag)
|
||||
funcs := template.FuncMap{
|
||||
"secret": secretFunction,
|
||||
}
|
||||
@ -203,7 +256,7 @@ func ProcessTemplate(templatePath string, data interface{}, accessToken string)
|
||||
return &buf, nil
|
||||
}
|
||||
|
||||
func ProcessBase64Template(encodedTemplate string, data interface{}, accessToken string) (*bytes.Buffer, error) {
|
||||
func ProcessBase64Template(encodedTemplate string, data interface{}, accessToken string, existingEtag string, currentEtag *string) (*bytes.Buffer, error) {
|
||||
// custom template function to fetch secrets from Infisical
|
||||
decoded, err := base64.StdEncoding.DecodeString(encodedTemplate)
|
||||
if err != nil {
|
||||
@ -212,7 +265,7 @@ func ProcessBase64Template(encodedTemplate string, data interface{}, accessToken
|
||||
|
||||
templateString := string(decoded)
|
||||
|
||||
secretFunction := secretTemplateFunction(accessToken)
|
||||
secretFunction := secretTemplateFunction(accessToken, existingEtag, currentEtag) // TODO: Fix this
|
||||
funcs := template.FuncMap{
|
||||
"secret": secretFunction,
|
||||
}
|
||||
@ -250,7 +303,16 @@ type TokenManager struct {
|
||||
}
|
||||
|
||||
func NewTokenManager(fileDeposits []Sink, templates []Template, clientIdPath string, clientSecretPath string, newAccessTokenNotificationChan chan bool, removeClientSecretOnRead bool, exitAfterAuth bool) *TokenManager {
|
||||
return &TokenManager{filePaths: fileDeposits, templates: templates, clientIdPath: clientIdPath, clientSecretPath: clientSecretPath, newAccessTokenNotificationChan: newAccessTokenNotificationChan, removeClientSecretOnRead: removeClientSecretOnRead, exitAfterAuth: exitAfterAuth}
|
||||
return &TokenManager{
|
||||
filePaths: fileDeposits,
|
||||
templates: templates,
|
||||
clientIdPath: clientIdPath,
|
||||
clientSecretPath: clientSecretPath,
|
||||
newAccessTokenNotificationChan: newAccessTokenNotificationChan,
|
||||
removeClientSecretOnRead: removeClientSecretOnRead,
|
||||
exitAfterAuth: exitAfterAuth,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (tm *TokenManager) SetToken(token string, accessTokenTTL time.Duration, accessTokenMaxTTL time.Duration) {
|
||||
@ -428,38 +490,80 @@ func (tm *TokenManager) WriteTokenToFiles() {
|
||||
}
|
||||
}
|
||||
|
||||
func (tm *TokenManager) FetchSecrets() {
|
||||
log.Info().Msgf("template engine started...")
|
||||
func (tm *TokenManager) WriteTemplateToFile(bytes *bytes.Buffer, template *Template) {
|
||||
if err := WriteBytesToFile(bytes, template.DestinationPath); err != nil {
|
||||
log.Error().Msgf("template engine: unable to write secrets to path because %s. Will try again on next cycle", err)
|
||||
return
|
||||
}
|
||||
log.Info().Msgf("template engine: secret template at path %s has been rendered and saved to path %s", template.SourcePath, template.DestinationPath)
|
||||
}
|
||||
|
||||
func (tm *TokenManager) MonitorSecretChanges(secretTemplate Template, sigChan chan os.Signal) {
|
||||
|
||||
pollingInterval := time.Duration(5 * time.Minute)
|
||||
|
||||
if secretTemplate.Config.PollingInterval != "" {
|
||||
interval, err := util.ConvertPollingIntervalToTime(secretTemplate.Config.PollingInterval)
|
||||
|
||||
if err != nil {
|
||||
log.Error().Msgf("unable to convert polling interval to time because %v", err)
|
||||
sigChan <- syscall.SIGINT
|
||||
return
|
||||
|
||||
} else {
|
||||
pollingInterval = interval
|
||||
}
|
||||
}
|
||||
|
||||
var existingEtag string
|
||||
var currentEtag string
|
||||
var firstRun = true
|
||||
|
||||
execTimeout := secretTemplate.Config.Execute.Timeout
|
||||
execCommand := secretTemplate.Config.Execute.Command
|
||||
|
||||
for {
|
||||
token := tm.GetToken()
|
||||
|
||||
if token != "" {
|
||||
for _, secretTemplate := range tm.templates {
|
||||
var processedTemplate *bytes.Buffer
|
||||
var err error
|
||||
if secretTemplate.SourcePath != "" {
|
||||
processedTemplate, err = ProcessTemplate(secretTemplate.SourcePath, nil, token)
|
||||
} else {
|
||||
processedTemplate, err = ProcessBase64Template(secretTemplate.Base64TemplateContent, nil, token)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Error().Msgf("template engine: unable to render secrets because %s. Will try again on next cycle", err)
|
||||
var processedTemplate *bytes.Buffer
|
||||
var err error
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if err := WriteBytesToFile(processedTemplate, secretTemplate.DestinationPath); err != nil {
|
||||
log.Error().Msgf("template engine: unable to write secrets to path because %s. Will try again on next cycle", err)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
log.Info().Msgf("template engine: secret template at path %s has been rendered and saved to path %s", secretTemplate.SourcePath, secretTemplate.DestinationPath)
|
||||
if secretTemplate.SourcePath != "" {
|
||||
processedTemplate, err = ProcessTemplate(secretTemplate.SourcePath, nil, token, existingEtag, ¤tEtag)
|
||||
} else {
|
||||
processedTemplate, err = ProcessBase64Template(secretTemplate.Base64TemplateContent, nil, token, existingEtag, ¤tEtag)
|
||||
}
|
||||
|
||||
// fetch new secrets every 5 minutes (TODO: add PubSub in the future )
|
||||
time.Sleep(5 * time.Minute)
|
||||
if err != nil {
|
||||
log.Error().Msgf("unable to process template because %v", err)
|
||||
} else {
|
||||
if (existingEtag != currentEtag) || firstRun {
|
||||
|
||||
tm.WriteTemplateToFile(processedTemplate, &secretTemplate)
|
||||
existingEtag = currentEtag
|
||||
|
||||
if !firstRun && execCommand != "" {
|
||||
log.Info().Msgf("executing command: %s", execCommand)
|
||||
err := ExecuteCommandWithTimeout(execCommand, execTimeout)
|
||||
|
||||
if err != nil {
|
||||
log.Error().Msgf("unable to execute command because %v", err)
|
||||
}
|
||||
|
||||
}
|
||||
if firstRun {
|
||||
firstRun = false
|
||||
}
|
||||
}
|
||||
}
|
||||
time.Sleep(pollingInterval)
|
||||
} else {
|
||||
// It fails to get the access token. So we will re-try in 3 seconds. We do this because if we don't, the user will have to wait for the next polling interval to get the first secret render.
|
||||
time.Sleep(3 * time.Second)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -520,7 +624,7 @@ var agentCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if !FileExists(configPath) && agentConfigInBase64 == "" {
|
||||
log.Error().Msgf("No agent config file provided. Please provide a agent config file", configPath)
|
||||
log.Error().Msgf("No agent config file provided at %v. Please provide a agent config file", configPath)
|
||||
return
|
||||
}
|
||||
|
||||
@ -544,7 +648,11 @@ var agentCmd = &cobra.Command{
|
||||
tm := NewTokenManager(filePaths, agentConfig.Templates, configUniversalAuthType.ClientIDPath, configUniversalAuthType.ClientSecretPath, tokenRefreshNotifier, configUniversalAuthType.RemoveClientSecretOnRead, agentConfig.Infisical.ExitAfterAuth)
|
||||
|
||||
go tm.ManageTokenLifecycle()
|
||||
go tm.FetchSecrets()
|
||||
|
||||
for i, template := range agentConfig.Templates {
|
||||
log.Info().Msgf("template engine started for template %v...", i+1)
|
||||
go tm.MonitorSecretChanges(template, sigChan)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
|
@ -59,7 +59,8 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -87,7 +88,9 @@ var exportCmd = &cobra.Command{
|
||||
|
||||
var output string
|
||||
if shouldExpandSecrets {
|
||||
secrets = util.ExpandSecrets(secrets, infisicalToken, "")
|
||||
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
|
||||
InfisicalToken: infisicalToken,
|
||||
}, "")
|
||||
}
|
||||
secrets = util.FilterSecretsByTag(secrets, tagSlugs)
|
||||
output, err = formatEnvs(secrets, format)
|
||||
|
@ -36,7 +36,8 @@ var getCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
@ -5,7 +5,6 @@ package cmd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
@ -52,25 +51,19 @@ var initCmd = &cobra.Command{
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(userCreds.UserCredentials.JTWToken)
|
||||
workspaceResponse, err := api.CallGetAllWorkSpacesUserBelongsTo(httpClient)
|
||||
|
||||
organizationResponse, err := api.CallGetAllOrganizations(httpClient)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to pull projects that belong to you")
|
||||
util.HandleError(err, "Unable to pull organizations that belong to you")
|
||||
}
|
||||
|
||||
workspaces := workspaceResponse.Workspaces
|
||||
if len(workspaces) == 0 {
|
||||
message := fmt.Sprintf("You don't have any projects created in Infisical. You must first create a project at %s", util.INFISICAL_TOKEN_NAME)
|
||||
util.PrintErrorMessageAndExit(message)
|
||||
}
|
||||
organizations := organizationResponse.Organizations
|
||||
|
||||
var workspaceNames []string
|
||||
for _, workspace := range workspaces {
|
||||
workspaceNames = append(workspaceNames, workspace.Name)
|
||||
}
|
||||
organizationNames := util.GetOrganizationsNameList(organizationResponse)
|
||||
|
||||
prompt := promptui.Select{
|
||||
Label: "Which of your Infisical projects would you like to connect this project to?",
|
||||
Items: workspaceNames,
|
||||
Label: "Which Infisical organization would you like to select a project from?",
|
||||
Items: organizationNames,
|
||||
Size: 7,
|
||||
}
|
||||
|
||||
@ -79,7 +72,27 @@ var initCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
err = writeWorkspaceFile(workspaces[index])
|
||||
selectedOrganization := organizations[index]
|
||||
|
||||
workspaceResponse, err := api.CallGetAllWorkSpacesUserBelongsTo(httpClient)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to pull projects that belong to you")
|
||||
}
|
||||
|
||||
filteredWorkspaces, workspaceNames := util.GetWorkspacesInOrganization(workspaceResponse, selectedOrganization.ID)
|
||||
|
||||
prompt = promptui.Select{
|
||||
Label: "Which of your Infisical projects would you like to connect this project to?",
|
||||
Items: workspaceNames,
|
||||
Size: 7,
|
||||
}
|
||||
|
||||
index, _, err = prompt.Run()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
err = writeWorkspaceFile(filteredWorkspaces[index])
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
@ -62,7 +62,8 @@ var runCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -110,7 +111,9 @@ var runCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if shouldExpandSecrets {
|
||||
secrets = util.ExpandSecrets(secrets, infisicalToken, projectConfigDir)
|
||||
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
|
||||
InfisicalToken: infisicalToken,
|
||||
}, projectConfigDir)
|
||||
}
|
||||
|
||||
secretsByKey := getSecretsByKeys(secrets)
|
||||
|
@ -38,7 +38,12 @@ var secretsCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -80,7 +85,9 @@ var secretsCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if shouldExpandSecrets {
|
||||
secrets = util.ExpandSecrets(secrets, infisicalToken, "")
|
||||
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
|
||||
InfisicalToken: infisicalToken,
|
||||
}, "")
|
||||
}
|
||||
|
||||
visualize.PrintAllSecretDetails(secrets)
|
||||
@ -391,7 +398,8 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -406,6 +414,11 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse path flag")
|
||||
}
|
||||
|
||||
showOnlyValue, err := cmd.Flags().GetBool("raw-value")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse path flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath}, "")
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
@ -427,7 +440,15 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
}
|
||||
|
||||
visualize.PrintAllSecretDetails(requestedSecrets)
|
||||
if showOnlyValue && len(requestedSecrets) > 1 {
|
||||
util.PrintErrorMessageAndExit("--raw-value only works with one secret.")
|
||||
}
|
||||
|
||||
if showOnlyValue {
|
||||
fmt.Printf(requestedSecrets[0].Value)
|
||||
} else {
|
||||
visualize.PrintAllSecretDetails(requestedSecrets)
|
||||
}
|
||||
Telemetry.CaptureEvent("cli-command:secrets get", posthog.NewProperties().Set("secretCount", len(secrets)).Set("version", util.CLI_VERSION))
|
||||
}
|
||||
|
||||
@ -445,7 +466,8 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -661,6 +683,7 @@ func init() {
|
||||
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.AddCommand(secretsGetCmd)
|
||||
secretsGetCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
secretsGetCmd.Flags().Bool("raw-value", false, "Returns only the value of secret, only works with one secret")
|
||||
|
||||
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
secretsCmd.AddCommand(secretsSetCmd)
|
||||
|
@ -21,11 +21,12 @@ type LoggedInUser struct {
|
||||
}
|
||||
|
||||
type SingleEnvironmentVariable struct {
|
||||
Key string `json:"key"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
ID string `json:"_id"`
|
||||
Tags []struct {
|
||||
Key string `json:"key"`
|
||||
WorkspaceId string `json:"workspace"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
ID string `json:"_id"`
|
||||
Tags []struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
@ -34,17 +35,22 @@ type SingleEnvironmentVariable struct {
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type PlaintextSecretResult struct {
|
||||
Secrets []SingleEnvironmentVariable
|
||||
Etag string
|
||||
}
|
||||
|
||||
type SingleFolder struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type Workspace struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Plan string `json:"plan,omitempty"`
|
||||
V int `json:"__v"`
|
||||
Organization string `json:"organization,omitempty"`
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Plan string `json:"plan,omitempty"`
|
||||
V int `json:"__v"`
|
||||
OrganizationId string `json:"orgId"`
|
||||
}
|
||||
|
||||
type WorkspaceConfigFile struct {
|
||||
@ -63,6 +69,7 @@ type GetAllSecretsParameters struct {
|
||||
Environment string
|
||||
EnvironmentPassedViaFlag bool
|
||||
InfisicalToken string
|
||||
UniversalAuthAccessToken string
|
||||
TagSlugs string
|
||||
WorkspaceId string
|
||||
SecretsPath string
|
||||
@ -91,3 +98,8 @@ type DeleteFolderParameters struct {
|
||||
FolderPath string
|
||||
InfisicalToken string
|
||||
}
|
||||
|
||||
type ExpandSecretsAuthentication struct {
|
||||
InfisicalToken string
|
||||
UniversalAuthAccessToken string
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user