mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-22 05:34:17 +00:00
Compare commits
208 Commits
v0.4.2
...
infisical/
Author | SHA1 | Date | |
---|---|---|---|
7f7d120c2f | |||
899d46514c | |||
658df21189 | |||
8341faddc5 | |||
8e3a23e6d8 | |||
1c89474159 | |||
2f765600b1 | |||
d9057216b5 | |||
6aab90590f | |||
f7466d4855 | |||
ea2565ed35 | |||
4586656b85 | |||
e4953398df | |||
7722231656 | |||
845a476974 | |||
fc19a17f4b | |||
0890b1912f | |||
82ecc2d7dc | |||
460bdbb91c | |||
446a63a917 | |||
d67cb7b507 | |||
353ff63298 | |||
9f40266f5c | |||
8af8a1d3d5 | |||
631423fbc8 | |||
4383779377 | |||
8249043826 | |||
20294ee233 | |||
c5a924e935 | |||
429bfd27b2 | |||
c99c873d78 | |||
092a6911ce | |||
a9b642e618 | |||
919ddf5de2 | |||
89a89af4e6 | |||
b3e68cf3fb | |||
960063e61a | |||
abf4eaf6db | |||
739f97f5c9 | |||
faed5c1821 | |||
c95598aaa6 | |||
e791684f4d | |||
d32c5fb869 | |||
abbf1918dc | |||
876d0119d3 | |||
6d70dc437e | |||
174e22a2bc | |||
f4815641d8 | |||
5b95c255ec | |||
3123f6fc1f | |||
a913cd97a4 | |||
781e0b24c8 | |||
28de8cddd7 | |||
ed3e53f9a3 | |||
9cb4d5abb7 | |||
efdd1e64c4 | |||
5b3be6063f | |||
12c399d4a9 | |||
ecd17e1d6d | |||
fb4c811414 | |||
3561c589b1 | |||
420d71d923 | |||
3db5c040c3 | |||
b4f336a5bb | |||
43e61c94f0 | |||
69fa4a80c5 | |||
cf9e8b8a6b | |||
c6d5498a42 | |||
7aa5ef844c | |||
ad7972e7e1 | |||
c6d8f24968 | |||
d8ff0bef0d | |||
29b96246b9 | |||
8503c9355b | |||
ddf0a272f6 | |||
e3980f8666 | |||
d52534b185 | |||
db07a033e1 | |||
3c71bcaa8d | |||
476d0be101 | |||
2eff7b6128 | |||
d8a781af1f | |||
8b42f4f998 | |||
da127a3c0a | |||
d4aa75a182 | |||
d097003e9b | |||
b615a5084e | |||
379f086828 | |||
f11a7d0f87 | |||
f5aeb85c62 | |||
2966aa6eda | |||
b1f2515731 | |||
c5094ec37d | |||
6c745f617d | |||
82995fbd02 | |||
8d09a45454 | |||
38f578c4ae | |||
65b12eee5e | |||
9043db4727 | |||
0eceeb6aa9 | |||
2d2bbbd0ad | |||
c9b4e11539 | |||
fd4ea97e18 | |||
49d2ecc460 | |||
ca31a70032 | |||
3334338eaa | |||
6d5e281811 | |||
87d36ac47a | |||
b72e1198df | |||
837ea2ef40 | |||
b462ca3e89 | |||
f639f682c9 | |||
365fcb3044 | |||
01d9695153 | |||
21eb1815c4 | |||
85f3ae95b6 | |||
e888eed1bf | |||
addac63700 | |||
efd13e6b19 | |||
4ac74e6e9a | |||
1d422fa82c | |||
8ba3f8d1f7 | |||
6b83393952 | |||
da07d71e15 | |||
82d3971d9e | |||
3dd21374e7 | |||
c5fe41ae57 | |||
9f0313f50b | |||
a6e670e93a | |||
ec97e1a930 | |||
55ca6938db | |||
1401c7f6bc | |||
bb6d0fd7c6 | |||
689a20dca2 | |||
e4b4126971 | |||
04b04cba5c | |||
89e5f644a4 | |||
c5619d27d7 | |||
12a1d8e822 | |||
a85a7d1b00 | |||
fc2846534f | |||
2b605856a3 | |||
191582ef26 | |||
213b5d465b | |||
75f550caf2 | |||
daabf5ab70 | |||
7b11976a60 | |||
39be52c6b2 | |||
bced5d0151 | |||
939d7eb433 | |||
6de25174aa | |||
fd9387a25e | |||
b17a40d83e | |||
2aa79d4ad6 | |||
44b4de754a | |||
db0f0d0d9c | |||
3471e387ae | |||
aadd964409 | |||
102e45891c | |||
b9ae224aef | |||
e5cb0cbca3 | |||
330968c7af | |||
68e8e727cd | |||
3b94ee42e9 | |||
09286b4421 | |||
04a9604ba9 | |||
d86f88db92 | |||
fc53c094b7 | |||
6726ca1882 | |||
ddbe4d7040 | |||
3f6b0a9e66 | |||
c3a47597b6 | |||
a696a99232 | |||
8b1e64f75e | |||
f137087ef1 | |||
2157fab181 | |||
d2acab57e0 | |||
811929987b | |||
4ac13f61e0 | |||
3d2b0fa3fc | |||
242809ce26 | |||
492bf39243 | |||
dbfa4f5277 | |||
3fd2e22cbd | |||
150eb1f5ee | |||
6314a949f8 | |||
660c5806e3 | |||
c6d2828262 | |||
8dedfad22d | |||
7a3456ca1d | |||
a946031d6f | |||
f0075e8d09 | |||
3b00df6662 | |||
a263d7481b | |||
6f91331549 | |||
13ecc22159 | |||
a5c5ec1f4d | |||
cbb28dc373 | |||
e00aad4159 | |||
fb8aaa9d9f | |||
e5c5e4cca2 | |||
803a97fdfc | |||
9e42a7a33e | |||
34c79b08bc | |||
aacdaf4556 | |||
a7484f8be5 | |||
e1bf31b371 | |||
3817831577 |
15
.env.example
15
.env.example
@ -1,5 +1,6 @@
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NOT BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
|
||||
# JWT
|
||||
@ -30,14 +31,12 @@ MONGO_PASSWORD=example
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_PORT=587
|
||||
SMTP_SECURE=false
|
||||
SMTP_FROM_ADDRESS=
|
||||
SMTP_FROM_NAME=Infisical
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_NAME=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Integration
|
||||
# Optional only if integration is used
|
||||
|
21
.github/workflows/docker-image.yml
vendored
21
.github/workflows/docker-image.yml
vendored
@ -1,12 +1,17 @@
|
||||
name: Build, Publish and Deploy to Gamma
|
||||
on: [workflow_dispatch]
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
@ -51,15 +56,19 @@ jobs:
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: backend
|
||||
tags: infisical/backend:${{ steps.commit.outputs.short }},
|
||||
tags: |
|
||||
infisical/backend:${{ steps.commit.outputs.short }}
|
||||
infisical/backend:latest
|
||||
infisical/backend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Save commit hashes for tag
|
||||
@ -100,8 +109,10 @@ jobs:
|
||||
push: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: frontend
|
||||
tags: infisical/frontend:${{ steps.commit.outputs.short }},
|
||||
tags: |
|
||||
infisical/frontend:${{ steps.commit.outputs.short }}
|
||||
infisical/frontend:latest
|
||||
infisical/frontend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
|
68
.github/workflows/release-standalone-docker-img.yml
vendored
Normal file
68
.github/workflows/release-standalone-docker-img.yml
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
name: Release standalone docker image
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- uses: paulhatch/semantic-version@v5.0.2
|
||||
id: version
|
||||
with:
|
||||
# The prefix to use to identify tags
|
||||
tag_prefix: "infisical-standalone/v"
|
||||
# A string which, if present in a git commit, indicates that a change represents a
|
||||
# major (breaking) change, supports regular expressions wrapped with '/'
|
||||
major_pattern: "(MAJOR)"
|
||||
# Same as above except indicating a minor change, supports regular expressions wrapped with '/'
|
||||
minor_pattern: "(MINOR)"
|
||||
# A string to determine the format of the version output
|
||||
version_format: "${major}.${minor}.${patch}-prerelease${increment}"
|
||||
# Optional path to check for changes. If any changes are detected in the path the
|
||||
# 'changed' output will true. Enter multiple paths separated by spaces.
|
||||
change_path: "backend,frontend"
|
||||
# Prevents pre-v1.0.0 version from automatically incrementing the major version.
|
||||
# If enabled, when the major version is 0, major releases will be treated as minor and minor as patch. Note that the version_type output is unchanged.
|
||||
enable_prerelease_mode: true
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
6
.github/workflows/release_build.yml
vendored
6
.github/workflows/release_build.yml
vendored
@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "v*"
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@ -41,13 +41,15 @@ jobs:
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser
|
||||
distribution: goreleaser-pro
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
|
@ -11,10 +11,16 @@ before:
|
||||
- ./cli/scripts/completions.sh
|
||||
- ./cli/scripts/manpages.sh
|
||||
|
||||
monorepo:
|
||||
tag_prefix: infisical-cli/
|
||||
dir: cli
|
||||
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
ldflags: -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
env:
|
||||
@ -32,7 +38,9 @@ builds:
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
binary: infisical
|
||||
ldflags: -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
goos:
|
||||
@ -61,10 +69,10 @@ archives:
|
||||
- goos: windows
|
||||
format: zip
|
||||
files:
|
||||
- README*
|
||||
- LICENSE*
|
||||
- manpages/*
|
||||
- completions/*
|
||||
- ../README*
|
||||
- ../LICENSE*
|
||||
- ../manpages/*
|
||||
- ../completions/*
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
@ -74,14 +82,7 @@ checksum:
|
||||
name_template: "checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}-devel"
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- "^docs:"
|
||||
- "^test:"
|
||||
name_template: "{{ .Version }}-devel"
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
@ -164,7 +165,7 @@ aurs:
|
||||
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
5
.pre-commit-config.yaml
Normal file
5
.pre-commit-config.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
repos:
|
||||
- repo: https://github.com/gitleaks/gitleaks
|
||||
rev: v8.16.3
|
||||
hooks:
|
||||
- id: gitleaks
|
6
.pre-commit-hooks.yaml
Normal file
6
.pre-commit-hooks.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
- id: infisical-scan
|
||||
name: Scan for hardcoded secrets
|
||||
description: Will scan for hardcoded secrets using Infisical CLI
|
||||
entry: infisical scan git-changes --verbose --redact --staged
|
||||
language: golang
|
||||
pass_filenames: false
|
102
Dockerfile.standalone-infisical
Normal file
102
Dockerfile.standalone-infisical
Normal file
@ -0,0 +1,102 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
|
||||
FROM node:16-alpine AS frontend-dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM node:16-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM node:16-alpine AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 nextjs
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown nextjs:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
|
||||
COPY --chown=nextjs:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown nextjs:nodejs ./public/data
|
||||
COPY --from=frontend-builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER nextjs
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM node:16-alpine AS backend-build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:16-alpine AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
# Production stage
|
||||
FROM node:16-alpine AS production
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install PM2
|
||||
RUN npm install -g pm2
|
||||
# Copy ecosystem.config.js
|
||||
COPY ecosystem.config.js .
|
||||
|
||||
RUN apk add --no-cache nginx
|
||||
|
||||
COPY nginx/default-stand-alone-docker.conf /etc/nginx/nginx.conf
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app/ /app/
|
||||
|
||||
EXPOSE 80
|
||||
ENV HTTPS_ENABLED false
|
||||
|
||||
CMD ["pm2-runtime", "start", "ecosystem.config.js"]
|
||||
|
||||
|
10
SECURITY.md
10
SECURITY.md
@ -1,9 +1,13 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
## Supported versions
|
||||
|
||||
We always recommend using the latest version of Infisical to ensure you get all security updates.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
## Reporting vulnerabilities
|
||||
|
||||
Please report security vulnerabilities or concerns to team@infisical.com.
|
||||
Please do not file GitHub issues or post on our public forum for security vulnerabilities, as they are public!
|
||||
|
||||
Infisical takes security issues very seriously. If you have any concerns about Infisical or believe you have uncovered a vulnerability, please get in touch via the e-mail address security@infisical.com. In the message, try to provide a description of the issue and ideally a way of reproducing it. The security team will get back to you as soon as possible.
|
||||
|
||||
Note that this security address should be used only for undisclosed vulnerabilities. Please report any security problems to us before disclosing it publicly.
|
2033
backend/package-lock.json
generated
2033
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,20 +1,19 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-secrets-manager": "^3.303.0",
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@aws-sdk/client-secrets-manager": "^3.319.0",
|
||||
"@godaddy/terminus": "^4.12.0",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.45.0",
|
||||
"@sentry/tracing": "^7.46.0",
|
||||
"@sentry/node": "^7.41.0",
|
||||
"@sentry/node": "^7.49.0",
|
||||
"@sentry/tracing": "^7.48.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/libsodium-wrappers": "^0.7.10",
|
||||
"argon2": "^0.30.3",
|
||||
"await-to-js": "^3.0.0",
|
||||
"aws-sdk": "^2.1338.0",
|
||||
"axios": "^1.1.3",
|
||||
"aws-sdk": "^2.1364.0",
|
||||
"axios": "^1.3.5",
|
||||
"axios-retry": "^3.4.0",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"bigint-conversion": "^2.4.0",
|
||||
"builder-pattern": "^2.2.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
@ -25,13 +24,14 @@
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"infisical-node": "^1.0.37",
|
||||
"infisical-node": "^1.1.3",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jsrp": "^0.2.4",
|
||||
"libsodium-wrappers": "^0.7.10",
|
||||
"lodash": "^4.17.21",
|
||||
"mongoose": "^6.10.4",
|
||||
"mongoose": "^6.10.5",
|
||||
"node-cache": "^5.1.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.6.0",
|
||||
"query-string": "^7.1.3",
|
||||
|
@ -1,64 +1,76 @@
|
||||
import infisical from 'infisical-node';
|
||||
export const getPort = () => infisical.get('PORT')! || 4000;
|
||||
export const getInviteOnlySignup = () => infisical.get('INVITE_ONLY_SIGNUP')! == undefined ? false : infisical.get('INVITE_ONLY_SIGNUP');
|
||||
export const getEncryptionKey = () => infisical.get('ENCRYPTION_KEY')!;
|
||||
export const getSaltRounds = () => parseInt(infisical.get('SALT_ROUNDS')!) || 10;
|
||||
export const getJwtAuthLifetime = () => infisical.get('JWT_AUTH_LIFETIME')! || '10d';
|
||||
export const getJwtAuthSecret = () => infisical.get('JWT_AUTH_SECRET')!;
|
||||
export const getJwtMfaLifetime = () => infisical.get('JWT_MFA_LIFETIME')! || '5m';
|
||||
export const getJwtMfaSecret = () => infisical.get('JWT_MFA_LIFETIME')! || '5m';
|
||||
export const getJwtRefreshLifetime = () => infisical.get('JWT_REFRESH_LIFETIME')! || '90d';
|
||||
export const getJwtRefreshSecret = () => infisical.get('JWT_REFRESH_SECRET')!;
|
||||
export const getJwtServiceSecret = () => infisical.get('JWT_SERVICE_SECRET')!;
|
||||
export const getJwtSignupLifetime = () => infisical.get('JWT_SIGNUP_LIFETIME')! || '15m';
|
||||
export const getJwtSignupSecret = () => infisical.get('JWT_SIGNUP_SECRET')!;
|
||||
export const getMongoURL = () => infisical.get('MONGO_URL')!;
|
||||
export const getNodeEnv = () => infisical.get('NODE_ENV')! || 'production';
|
||||
export const getVerboseErrorOutput = () => infisical.get('VERBOSE_ERROR_OUTPUT')! === 'true' && true;
|
||||
export const getLokiHost = () => infisical.get('LOKI_HOST')!;
|
||||
export const getClientIdAzure = () => infisical.get('CLIENT_ID_AZURE')!;
|
||||
export const getClientIdHeroku = () => infisical.get('CLIENT_ID_HEROKU')!;
|
||||
export const getClientIdVercel = () => infisical.get('CLIENT_ID_VERCEL')!;
|
||||
export const getClientIdNetlify = () => infisical.get('CLIENT_ID_NETLIFY')!;
|
||||
export const getClientIdGitHub = () => infisical.get('CLIENT_ID_GITHUB')!;
|
||||
export const getClientIdGitLab = () => infisical.get('CLIENT_ID_GITLAB')!;
|
||||
export const getClientSecretAzure = () => infisical.get('CLIENT_SECRET_AZURE')!;
|
||||
export const getClientSecretHeroku = () => infisical.get('CLIENT_SECRET_HEROKU')!;
|
||||
export const getClientSecretVercel = () => infisical.get('CLIENT_SECRET_VERCEL')!;
|
||||
export const getClientSecretNetlify = () => infisical.get('CLIENT_SECRET_NETLIFY')!;
|
||||
export const getClientSecretGitHub = () => infisical.get('CLIENT_SECRET_GITHUB')!;
|
||||
export const getClientSecretGitLab = () => infisical.get('CLIENT_SECRET_GITLAB')!;
|
||||
export const getClientSlugVercel = () => infisical.get('CLIENT_SLUG_VERCEL')!;
|
||||
export const getPostHogHost = () => infisical.get('POSTHOG_HOST')! || 'https://app.posthog.com';
|
||||
export const getPostHogProjectApiKey = () => infisical.get('POSTHOG_PROJECT_API_KEY')! || 'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
export const getSentryDSN = () => infisical.get('SENTRY_DSN')!;
|
||||
export const getSiteURL = () => infisical.get('SITE_URL')!;
|
||||
export const getSmtpHost = () => infisical.get('SMTP_HOST')!;
|
||||
export const getSmtpSecure = () => infisical.get('SMTP_SECURE')! === 'true' || false;
|
||||
export const getSmtpPort = () => parseInt(infisical.get('SMTP_PORT')!) || 587;
|
||||
export const getSmtpUsername = () => infisical.get('SMTP_USERNAME')!;
|
||||
export const getSmtpPassword = () => infisical.get('SMTP_PASSWORD')!;
|
||||
export const getSmtpFromAddress = () => infisical.get('SMTP_FROM_ADDRESS')!;
|
||||
export const getSmtpFromName = () => infisical.get('SMTP_FROM_NAME')! || 'Infisical';
|
||||
export const getStripeProductStarter = () => infisical.get('STRIPE_PRODUCT_STARTER')!;
|
||||
export const getStripeProductPro = () => infisical.get('STRIPE_PRODUCT_PRO')!;
|
||||
export const getStripeProductTeam = () => infisical.get('STRIPE_PRODUCT_TEAM')!;
|
||||
export const getStripePublishableKey = () => infisical.get('STRIPE_PUBLISHABLE_KEY')!;
|
||||
export const getStripeSecretKey = () => infisical.get('STRIPE_SECRET_KEY')!;
|
||||
export const getStripeWebhookSecret = () => infisical.get('STRIPE_WEBHOOK_SECRET')!;
|
||||
export const getTelemetryEnabled = () => infisical.get('TELEMETRY_ENABLED')! !== 'false' && true;
|
||||
export const getLoopsApiKey = () => infisical.get('LOOPS_API_KEY')!;
|
||||
export const getSmtpConfigured = () => infisical.get('SMTP_HOST') == '' || infisical.get('SMTP_HOST') == undefined ? false : true
|
||||
export const getHttpsEnabled = () => {
|
||||
if (getNodeEnv() != "production") {
|
||||
import InfisicalClient from 'infisical-node';
|
||||
|
||||
const client = new InfisicalClient({
|
||||
token: process.env.INFISICAL_TOKEN!
|
||||
});
|
||||
|
||||
export const getPort = async () => (await client.getSecret('PORT')).secretValue || 4000;
|
||||
export const getInviteOnlySignup = async () => (await client.getSecret('INVITE_ONLY_SIGNUP')).secretValue == undefined ? false : (await client.getSecret('INVITE_ONLY_SIGNUP')).secretValue;
|
||||
export const getEncryptionKey = async () => (await client.getSecret('ENCRYPTION_KEY')).secretValue;
|
||||
export const getSaltRounds = async () => parseInt((await client.getSecret('SALT_ROUNDS')).secretValue) || 10;
|
||||
export const getJwtAuthLifetime = async () => (await client.getSecret('JWT_AUTH_LIFETIME')).secretValue || '10d';
|
||||
export const getJwtAuthSecret = async () => (await client.getSecret('JWT_AUTH_SECRET')).secretValue;
|
||||
export const getJwtMfaLifetime = async () => (await client.getSecret('JWT_MFA_LIFETIME')).secretValue || '5m';
|
||||
export const getJwtMfaSecret = async () => (await client.getSecret('JWT_MFA_LIFETIME')).secretValue || '5m';
|
||||
export const getJwtRefreshLifetime = async () => (await client.getSecret('JWT_REFRESH_LIFETIME')).secretValue || '90d';
|
||||
export const getJwtRefreshSecret = async () => (await client.getSecret('JWT_REFRESH_SECRET')).secretValue;
|
||||
export const getJwtServiceSecret = async () => (await client.getSecret('JWT_SERVICE_SECRET')).secretValue;
|
||||
export const getJwtSignupLifetime = async () => (await client.getSecret('JWT_SIGNUP_LIFETIME')).secretValue || '15m';
|
||||
export const getJwtSignupSecret = async () => (await client.getSecret('JWT_SIGNUP_SECRET')).secretValue;
|
||||
export const getMongoURL = async () => (await client.getSecret('MONGO_URL')).secretValue;
|
||||
export const getNodeEnv = async () => (await client.getSecret('NODE_ENV')).secretValue || 'production';
|
||||
export const getVerboseErrorOutput = async () => (await client.getSecret('VERBOSE_ERROR_OUTPUT')).secretValue === 'true' && true;
|
||||
export const getLokiHost = async () => (await client.getSecret('LOKI_HOST')).secretValue;
|
||||
export const getClientIdAzure = async () => (await client.getSecret('CLIENT_ID_AZURE')).secretValue;
|
||||
export const getClientIdHeroku = async () => (await client.getSecret('CLIENT_ID_HEROKU')).secretValue;
|
||||
export const getClientIdVercel = async () => (await client.getSecret('CLIENT_ID_VERCEL')).secretValue;
|
||||
export const getClientIdNetlify = async () => (await client.getSecret('CLIENT_ID_NETLIFY')).secretValue;
|
||||
export const getClientIdGitHub = async () => (await client.getSecret('CLIENT_ID_GITHUB')).secretValue;
|
||||
export const getClientIdGitLab = async () => (await client.getSecret('CLIENT_ID_GITLAB')).secretValue;
|
||||
export const getClientSecretAzure = async () => (await client.getSecret('CLIENT_SECRET_AZURE')).secretValue;
|
||||
export const getClientSecretHeroku = async () => (await client.getSecret('CLIENT_SECRET_HEROKU')).secretValue;
|
||||
export const getClientSecretVercel = async () => (await client.getSecret('CLIENT_SECRET_VERCEL')).secretValue;
|
||||
export const getClientSecretNetlify = async () => (await client.getSecret('CLIENT_SECRET_NETLIFY')).secretValue;
|
||||
export const getClientSecretGitHub = async () => (await client.getSecret('CLIENT_SECRET_GITHUB')).secretValue;
|
||||
export const getClientSecretGitLab = async () => (await client.getSecret('CLIENT_SECRET_GITLAB')).secretValue;
|
||||
export const getClientSlugVercel = async () => (await client.getSecret('CLIENT_SLUG_VERCEL')).secretValue;
|
||||
export const getPostHogHost = async () => (await client.getSecret('POSTHOG_HOST')).secretValue || 'https://app.posthog.com';
|
||||
export const getPostHogProjectApiKey = async () => (await client.getSecret('POSTHOG_PROJECT_API_KEY')).secretValue || 'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
export const getSentryDSN = async () => (await client.getSecret('SENTRY_DSN')).secretValue;
|
||||
export const getSiteURL = async () => (await client.getSecret('SITE_URL')).secretValue;
|
||||
export const getSmtpHost = async () => (await client.getSecret('SMTP_HOST')).secretValue;
|
||||
export const getSmtpSecure = async () => (await client.getSecret('SMTP_SECURE')).secretValue === 'true' || false;
|
||||
export const getSmtpPort = async () => parseInt((await client.getSecret('SMTP_PORT')).secretValue) || 587;
|
||||
export const getSmtpUsername = async () => (await client.getSecret('SMTP_USERNAME')).secretValue;
|
||||
export const getSmtpPassword = async () => (await client.getSecret('SMTP_PASSWORD')).secretValue;
|
||||
export const getSmtpFromAddress = async () => (await client.getSecret('SMTP_FROM_ADDRESS')).secretValue;
|
||||
export const getSmtpFromName = async () => (await client.getSecret('SMTP_FROM_NAME')).secretValue || 'Infisical';
|
||||
|
||||
export const getLicenseKey = async () => (await client.getSecret('LICENSE_KEY')).secretValue;
|
||||
export const getLicenseServerKey = async () => (await client.getSecret('LICENSE_SERVER_KEY')).secretValue;
|
||||
export const getLicenseServerUrl = async () => (await client.getSecret('LICENSE_SERVER_URL')).secretValue || 'https://portal.infisical.com';
|
||||
|
||||
// TODO: deprecate from here
|
||||
export const getStripeProductStarter = async () => (await client.getSecret('STRIPE_PRODUCT_STARTER')).secretValue;
|
||||
export const getStripeProductPro = async () => (await client.getSecret('STRIPE_PRODUCT_PRO')).secretValue;
|
||||
export const getStripeProductTeam = async () => (await client.getSecret('STRIPE_PRODUCT_TEAM')).secretValue;
|
||||
export const getStripePublishableKey = async () => (await client.getSecret('STRIPE_PUBLISHABLE_KEY')).secretValue;
|
||||
export const getStripeSecretKey = async () => (await client.getSecret('STRIPE_SECRET_KEY')).secretValue;
|
||||
export const getStripeWebhookSecret = async () => (await client.getSecret('STRIPE_WEBHOOK_SECRET')).secretValue;
|
||||
|
||||
export const getTelemetryEnabled = async () => (await client.getSecret('TELEMETRY_ENABLED')).secretValue !== 'false' && true;
|
||||
export const getLoopsApiKey = async () => (await client.getSecret('LOOPS_API_KEY')).secretValue;
|
||||
export const getSmtpConfigured = async () => (await client.getSecret('SMTP_HOST')).secretValue == '' || (await client.getSecret('SMTP_HOST')).secretValue == undefined ? false : true
|
||||
export const getHttpsEnabled = async () => {
|
||||
if ((await getNodeEnv()) != "production") {
|
||||
// no https for anything other than prod
|
||||
return false
|
||||
}
|
||||
|
||||
if (infisical.get('HTTPS_ENABLED') == undefined || infisical.get('HTTPS_ENABLED') == "") {
|
||||
if ((await client.getSecret('HTTPS_ENABLED')).secretValue == undefined || (await client.getSecret('HTTPS_ENABLED')).secretValue == "") {
|
||||
// default when no value present
|
||||
return true
|
||||
}
|
||||
|
||||
return infisical.get('HTTPS_ENABLED') === 'true' && true
|
||||
return (await client.getSecret('HTTPS_ENABLED')).secretValue === 'true' && true
|
||||
}
|
@ -1,10 +1,24 @@
|
||||
import axios from 'axios';
|
||||
import axiosRetry from 'axios-retry';
|
||||
import {
|
||||
getLicenseServerKeyAuthToken,
|
||||
setLicenseServerKeyAuthToken,
|
||||
getLicenseKeyAuthToken,
|
||||
setLicenseKeyAuthToken
|
||||
} from './storage';
|
||||
import {
|
||||
getLicenseKey,
|
||||
getLicenseServerKey,
|
||||
getLicenseServerUrl
|
||||
} from './index';
|
||||
|
||||
const axiosInstance = axios.create();
|
||||
// should have JWT to interact with the license server
|
||||
export const licenseServerKeyRequest = axios.create();
|
||||
export const licenseKeyRequest = axios.create();
|
||||
export const standardRequest = axios.create();
|
||||
|
||||
// add retry functionality to the axios instance
|
||||
axiosRetry(axiosInstance, {
|
||||
axiosRetry(standardRequest, {
|
||||
retries: 3,
|
||||
retryDelay: axiosRetry.exponentialDelay, // exponential back-off delay between retries
|
||||
retryCondition: (error) => {
|
||||
@ -13,4 +27,98 @@ axiosRetry(axiosInstance, {
|
||||
},
|
||||
});
|
||||
|
||||
export default axiosInstance;
|
||||
export const refreshLicenseServerKeyToken = async () => {
|
||||
const licenseServerKey = await getLicenseServerKey();
|
||||
const licenseServerUrl = await getLicenseServerUrl();
|
||||
|
||||
const { data: { token } } = await standardRequest.post(
|
||||
`${licenseServerUrl}/api/auth/v1/license-server-login`, {},
|
||||
{
|
||||
headers: {
|
||||
'X-API-KEY': licenseServerKey
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
setLicenseServerKeyAuthToken(token);
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
export const refreshLicenseKeyToken = async () => {
|
||||
const licenseKey = await getLicenseKey();
|
||||
const licenseServerUrl = await getLicenseServerUrl();
|
||||
|
||||
const { data: { token } } = await standardRequest.post(
|
||||
`${licenseServerUrl}/api/auth/v1/license-login`, {},
|
||||
{
|
||||
headers: {
|
||||
'X-API-KEY': licenseKey
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
setLicenseKeyAuthToken(token);
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
licenseServerKeyRequest.interceptors.request.use((config) => {
|
||||
const token = getLicenseServerKeyAuthToken();
|
||||
|
||||
if (token && config.headers) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
config.headers.Authorization = `Bearer ${token}`;
|
||||
}
|
||||
return config;
|
||||
}, (err) => {
|
||||
return Promise.reject(err);
|
||||
});
|
||||
|
||||
licenseServerKeyRequest.interceptors.response.use((response) => {
|
||||
return response
|
||||
}, async function (err) {
|
||||
const originalRequest = err.config;
|
||||
|
||||
if (err.response.status === 401 && !originalRequest._retry) {
|
||||
originalRequest._retry = true;
|
||||
|
||||
// refresh
|
||||
const token = await refreshLicenseServerKeyToken();
|
||||
|
||||
axios.defaults.headers.common['Authorization'] = 'Bearer ' + token;
|
||||
return licenseServerKeyRequest(originalRequest);
|
||||
}
|
||||
|
||||
return Promise.reject(err);
|
||||
});
|
||||
|
||||
licenseKeyRequest.interceptors.request.use((config) => {
|
||||
const token = getLicenseKeyAuthToken();
|
||||
|
||||
if (token && config.headers) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
config.headers.Authorization = `Bearer ${token}`;
|
||||
}
|
||||
return config;
|
||||
}, (err) => {
|
||||
return Promise.reject(err);
|
||||
});
|
||||
|
||||
licenseKeyRequest.interceptors.response.use((response) => {
|
||||
return response
|
||||
}, async function (err) {
|
||||
const originalRequest = err.config;
|
||||
|
||||
if (err.response.status === 401 && !originalRequest._retry) {
|
||||
originalRequest._retry = true;
|
||||
|
||||
// refresh
|
||||
const token = await refreshLicenseKeyToken();
|
||||
|
||||
axios.defaults.headers.common['Authorization'] = 'Bearer ' + token;
|
||||
return licenseKeyRequest(originalRequest);
|
||||
}
|
||||
|
||||
return Promise.reject(err);
|
||||
});
|
30
backend/src/config/storage.ts
Normal file
30
backend/src/config/storage.ts
Normal file
@ -0,0 +1,30 @@
|
||||
const MemoryLicenseServerKeyTokenStorage = () => {
|
||||
let authToken: string;
|
||||
|
||||
return {
|
||||
setToken: (token: string) => {
|
||||
authToken = token;
|
||||
},
|
||||
getToken: () => authToken
|
||||
};
|
||||
};
|
||||
|
||||
const MemoryLicenseKeyTokenStorage = () => {
|
||||
let authToken: string;
|
||||
|
||||
return {
|
||||
setToken: (token: string) => {
|
||||
authToken = token;
|
||||
},
|
||||
getToken: () => authToken
|
||||
};
|
||||
};
|
||||
|
||||
const licenseServerTokenStorage = MemoryLicenseServerKeyTokenStorage();
|
||||
const licenseTokenStorage = MemoryLicenseKeyTokenStorage();
|
||||
|
||||
export const getLicenseServerKeyAuthToken = licenseServerTokenStorage.getToken;
|
||||
export const setLicenseServerKeyAuthToken = licenseServerTokenStorage.setToken;
|
||||
|
||||
export const getLicenseKeyAuthToken = licenseTokenStorage.getToken;
|
||||
export const setLicenseKeyAuthToken = licenseTokenStorage.setToken;
|
@ -126,7 +126,7 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
|
||||
const loginAction = await EELogService.createAction({
|
||||
@ -182,7 +182,7 @@ export const logout = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled() as boolean
|
||||
secure: (await getHttpsEnabled()) as boolean
|
||||
});
|
||||
|
||||
const logoutAction = await EELogService.createAction({
|
||||
@ -237,7 +237,7 @@ export const getNewToken = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(refreshToken, getJwtRefreshSecret())
|
||||
jwt.verify(refreshToken, await getJwtRefreshSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
@ -252,8 +252,8 @@ export const getNewToken = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: decodedToken.userId
|
||||
},
|
||||
expiresIn: getJwtAuthLifetime(),
|
||||
secret: getJwtAuthSecret()
|
||||
expiresIn: await getJwtAuthLifetime(),
|
||||
secret: await getJwtAuthSecret()
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
|
@ -16,7 +16,7 @@ import {
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_RAILWAY_API_URL
|
||||
} from '../../variables';
|
||||
import request from '../../config/request';
|
||||
import { standardRequest } from '../../config/request';
|
||||
|
||||
/***
|
||||
* Return integration authorization with id [integrationAuthId]
|
||||
@ -44,7 +44,7 @@ export const getIntegrationAuth = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
export const getIntegrationOptions = async (req: Request, res: Response) => {
|
||||
const INTEGRATION_OPTIONS = getIntegrationOptionsFunc();
|
||||
const INTEGRATION_OPTIONS = await getIntegrationOptionsFunc();
|
||||
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS,
|
||||
@ -229,7 +229,7 @@ export const getIntegrationAuthVercelBranches = async (req: Request, res: Respon
|
||||
let branches: string[] = [];
|
||||
|
||||
if (appId && appId !== '') {
|
||||
const { data }: { data: VercelBranch[] } = await request.get(
|
||||
const { data }: { data: VercelBranch[] } = await standardRequest.get(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v1/integrations/git-branches`,
|
||||
{
|
||||
params,
|
||||
@ -292,7 +292,7 @@ export const getIntegrationAuthRailwayEnvironments = async (req: Request, res: R
|
||||
projectId: appId
|
||||
}
|
||||
|
||||
const { data: { data: { environments: { edges } } } } = await request.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
const { data: { data: { environments: { edges } } } } = await standardRequest.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
query,
|
||||
variables,
|
||||
}, {
|
||||
@ -372,7 +372,7 @@ export const getIntegrationAuthRailwayServices = async (req: Request, res: Respo
|
||||
id: appId
|
||||
}
|
||||
|
||||
const { data: { data: { project: { services: { edges } } } } } = await request.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
const { data: { data: { project: { services: { edges } } } } } = await standardRequest.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
query,
|
||||
variables
|
||||
}, {
|
||||
|
@ -215,7 +215,7 @@ export const inviteUserToWorkspace = async (req: Request, res: Response) => {
|
||||
inviterFirstName: req.user.firstName,
|
||||
inviterEmail: req.user.email,
|
||||
workspaceName: req.membership.workspace.name,
|
||||
callback_url: getSiteURL() + '/login'
|
||||
callback_url: (await getSiteURL()) + '/login'
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { MembershipOrg, Organization, User } from '../../models';
|
||||
@ -134,12 +135,13 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
if (!inviteeMembershipOrg) {
|
||||
|
||||
await new MembershipOrg({
|
||||
user: invitee,
|
||||
inviteEmail: inviteeEmail,
|
||||
organization: organizationId,
|
||||
role: MEMBER,
|
||||
status: invitee?.publicKey ? ACCEPTED : INVITED
|
||||
status: INVITED
|
||||
}).save();
|
||||
}
|
||||
} else {
|
||||
@ -164,6 +166,7 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
const organization = await Organization.findOne({ _id: organizationId });
|
||||
|
||||
if (organization) {
|
||||
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_ORG_INVITATION,
|
||||
email: inviteeEmail,
|
||||
@ -179,13 +182,14 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
inviterEmail: req.user.email,
|
||||
organizationName: organization.name,
|
||||
email: inviteeEmail,
|
||||
organizationId: organization._id.toString(),
|
||||
token,
|
||||
callback_url: getSiteURL() + '/signupinvite'
|
||||
callback_url: (await getSiteURL()) + '/signupinvite'
|
||||
}
|
||||
});
|
||||
|
||||
if (!getSmtpConfigured()) {
|
||||
completeInviteLink = `${siteUrl + '/signupinvite'}?token=${token}&to=${inviteeEmail}`
|
||||
if (!(await getSmtpConfigured())) {
|
||||
completeInviteLink = `${siteUrl + '/signupinvite'}?token=${token}&to=${inviteeEmail}&organization_id=${organization._id}`
|
||||
}
|
||||
}
|
||||
|
||||
@ -214,13 +218,18 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
let user, token;
|
||||
try {
|
||||
const { email, code } = req.body;
|
||||
const {
|
||||
email,
|
||||
organizationId,
|
||||
code
|
||||
} = req.body;
|
||||
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
status: INVITED,
|
||||
organization: new Types.ObjectId(organizationId)
|
||||
});
|
||||
|
||||
if (!membershipOrg)
|
||||
@ -238,6 +247,10 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
// membership can be approved and redirected to login/dashboard
|
||||
membershipOrg.status = ACCEPTED;
|
||||
await membershipOrg.save();
|
||||
|
||||
await updateSubscriptionOrgQuantity({
|
||||
organizationId
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully verified email',
|
||||
@ -257,8 +270,8 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: getJwtSignupLifetime(),
|
||||
secret: getJwtSignupSecret()
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -19,7 +19,8 @@ export const getOrganizations = async (req: Request, res: Response) => {
|
||||
try {
|
||||
organizations = (
|
||||
await MembershipOrg.find({
|
||||
user: req.user._id
|
||||
user: req.user._id,
|
||||
status: ACCEPTED
|
||||
}).populate('organization')
|
||||
).map((m) => m.organization);
|
||||
} catch (err) {
|
||||
@ -85,7 +86,7 @@ export const createOrganization = async (req: Request, res: Response) => {
|
||||
export const getOrganization = async (req: Request, res: Response) => {
|
||||
let organization;
|
||||
try {
|
||||
organization = req.membershipOrg.organization;
|
||||
organization = req.organization
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
@ -317,29 +318,29 @@ export const createOrganizationPortalSession = async (
|
||||
) => {
|
||||
let session;
|
||||
try {
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
// check if there is a payment method on file
|
||||
const paymentMethods = await stripe.paymentMethods.list({
|
||||
customer: req.membershipOrg.organization.customerId,
|
||||
customer: req.organization.customerId,
|
||||
type: 'card'
|
||||
});
|
||||
|
||||
|
||||
if (paymentMethods.data.length < 1) {
|
||||
// case: no payment method on file
|
||||
session = await stripe.checkout.sessions.create({
|
||||
customer: req.membershipOrg.organization.customerId,
|
||||
customer: req.organization.customerId,
|
||||
mode: 'setup',
|
||||
payment_method_types: ['card'],
|
||||
success_url: getSiteURL() + '/dashboard',
|
||||
cancel_url: getSiteURL() + '/dashboard'
|
||||
success_url: (await getSiteURL()) + '/dashboard',
|
||||
cancel_url: (await getSiteURL()) + '/dashboard'
|
||||
});
|
||||
} else {
|
||||
session = await stripe.billingPortal.sessions.create({
|
||||
customer: req.membershipOrg.organization.customerId,
|
||||
return_url: getSiteURL() + '/dashboard'
|
||||
customer: req.organization.customerId,
|
||||
return_url: (await getSiteURL()) + '/dashboard'
|
||||
});
|
||||
}
|
||||
|
||||
@ -365,12 +366,12 @@ export const getOrganizationSubscriptions = async (
|
||||
) => {
|
||||
let subscriptions;
|
||||
try {
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
subscriptions = await stripe.subscriptions.list({
|
||||
customer: req.membershipOrg.organization.customerId
|
||||
customer: req.organization.customerId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
|
@ -44,7 +44,7 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
substitutions: {
|
||||
email,
|
||||
token,
|
||||
callback_url: getSiteURL() + '/password-reset'
|
||||
callback_url: (await getSiteURL()) + '/password-reset'
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
@ -91,8 +91,8 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: getJwtSignupLifetime(),
|
||||
secret: getJwtSignupSecret()
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -39,7 +39,7 @@ export const pushSecrets = async (req: Request, res: Response) => {
|
||||
// upload (encrypted) secrets to workspace with id [workspaceId]
|
||||
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
let { secrets }: { secrets: PushSecret[] } = req.body;
|
||||
const { keys, environment, channel } = req.body;
|
||||
const { workspaceId } = req.params;
|
||||
@ -114,7 +114,7 @@ export const pullSecrets = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
let key;
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
const { workspaceId } = req.params;
|
||||
@ -183,7 +183,7 @@ export const pullSecretsServiceToken = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
let key;
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
const { workspaceId } = req.params;
|
||||
|
107
backend/src/controllers/v1/secretsFolderController.ts
Normal file
107
backend/src/controllers/v1/secretsFolderController.ts
Normal file
@ -0,0 +1,107 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { Secret } from '../../models';
|
||||
import Folder from '../../models/folder';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
import { ROOT_FOLDER_PATH, getFolderPath, getParentPath, normalizePath, validateFolderName } from '../../utils/folder';
|
||||
import { ADMIN, MEMBER } from '../../variables';
|
||||
import { validateMembership } from '../../helpers/membership';
|
||||
|
||||
// TODO
|
||||
// verify workspace id/environment
|
||||
export const createFolder = async (req: Request, res: Response) => {
|
||||
const { workspaceId, environment, folderName, parentFolderId } = req.body
|
||||
if (!validateFolderName(folderName)) {
|
||||
throw BadRequestError({ message: "Folder name cannot contain spaces. Only underscore and dashes" })
|
||||
}
|
||||
|
||||
if (parentFolderId) {
|
||||
const parentFolder = await Folder.find({ environment: environment, workspace: workspaceId, id: parentFolderId });
|
||||
if (!parentFolder) {
|
||||
throw BadRequestError({ message: "The parent folder doesn't exist" })
|
||||
}
|
||||
}
|
||||
|
||||
let completePath = await getFolderPath(parentFolderId)
|
||||
if (completePath == ROOT_FOLDER_PATH) {
|
||||
completePath = ""
|
||||
}
|
||||
|
||||
const currentFolderPath = completePath + "/" + folderName // construct new path with current folder to be created
|
||||
const normalizedCurrentPath = normalizePath(currentFolderPath)
|
||||
const normalizedParentPath = getParentPath(normalizedCurrentPath)
|
||||
|
||||
const existingFolder = await Folder.findOne({
|
||||
name: folderName,
|
||||
workspace: workspaceId,
|
||||
environment: environment,
|
||||
parent: parentFolderId,
|
||||
path: normalizedCurrentPath
|
||||
});
|
||||
|
||||
if (existingFolder) {
|
||||
return res.json(existingFolder)
|
||||
}
|
||||
|
||||
const newFolder = new Folder({
|
||||
name: folderName,
|
||||
workspace: workspaceId,
|
||||
environment: environment,
|
||||
parent: parentFolderId,
|
||||
path: normalizedCurrentPath,
|
||||
parentPath: normalizedParentPath
|
||||
});
|
||||
|
||||
await newFolder.save();
|
||||
|
||||
return res.json(newFolder)
|
||||
}
|
||||
|
||||
export const deleteFolder = async (req: Request, res: Response) => {
|
||||
const { folderId } = req.params
|
||||
const queue: any[] = [folderId];
|
||||
|
||||
const folder = await Folder.findById(folderId);
|
||||
if (!folder) {
|
||||
throw BadRequestError({ message: "The folder doesn't exist" })
|
||||
}
|
||||
|
||||
// check that user is a member of the workspace
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: folder.workspace as any,
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
});
|
||||
|
||||
while (queue.length > 0) {
|
||||
const currentFolderId = queue.shift();
|
||||
|
||||
const childFolders = await Folder.find({ parent: currentFolderId });
|
||||
for (const childFolder of childFolders) {
|
||||
queue.push(childFolder._id);
|
||||
}
|
||||
|
||||
await Secret.deleteMany({ folder: currentFolderId });
|
||||
|
||||
await Folder.deleteOne({ _id: currentFolderId });
|
||||
}
|
||||
|
||||
res.send()
|
||||
}
|
||||
|
||||
// TODO: validate workspace
|
||||
export const getFolderById = async (req: Request, res: Response) => {
|
||||
const { folderId } = req.params
|
||||
|
||||
const folder = await Folder.findById(folderId);
|
||||
if (!folder) {
|
||||
throw BadRequestError({ message: "The folder doesn't exist" })
|
||||
}
|
||||
// check that user is a member of the workspace
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: folder.workspace as any,
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
});
|
||||
|
||||
res.send({ folder })
|
||||
}
|
@ -61,7 +61,7 @@ export const createServiceToken = async (req: Request, res: Response) => {
|
||||
workspaceId
|
||||
},
|
||||
expiresIn: expiresIn,
|
||||
secret: getJwtServiceSecret()
|
||||
secret: await getJwtServiceSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
return res.status(400).send({
|
||||
|
@ -21,7 +21,7 @@ export const beginEmailSignup = async (req: Request, res: Response) => {
|
||||
try {
|
||||
email = req.body.email;
|
||||
|
||||
if (getInviteOnlySignup()) {
|
||||
if (await getInviteOnlySignup()) {
|
||||
// Only one user can create an account without being invited. The rest need to be invited in order to make an account
|
||||
const userCount = await User.countDocuments({})
|
||||
if (userCount != 0) {
|
||||
@ -75,7 +75,7 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
// verify email
|
||||
if (getSmtpConfigured()) {
|
||||
if (await getSmtpConfigured()) {
|
||||
await checkEmailVerification({
|
||||
email,
|
||||
code
|
||||
@ -93,8 +93,8 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: getJwtSignupLifetime(),
|
||||
secret: getJwtSignupSecret()
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -13,7 +13,7 @@ export const handleWebhook = async (req: Request, res: Response) => {
|
||||
let event;
|
||||
try {
|
||||
// check request for valid stripe signature
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
@ -21,7 +21,7 @@ export const handleWebhook = async (req: Request, res: Response) => {
|
||||
event = stripe.webhooks.constructEvent(
|
||||
req.body,
|
||||
sig,
|
||||
getStripeWebhookSecret()
|
||||
await getStripeWebhookSecret()
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
|
@ -43,7 +43,7 @@ export const createAPIKeyData = async (req: Request, res: Response) => {
|
||||
const { name, expiresIn } = req.body;
|
||||
|
||||
const secret = crypto.randomBytes(16).toString('hex');
|
||||
const secretHash = await bcrypt.hash(secret, getSaltRounds());
|
||||
const secretHash = await bcrypt.hash(secret, await getSaltRounds());
|
||||
|
||||
const expiresAt = new Date();
|
||||
expiresAt.setSeconds(expiresAt.getSeconds() + expiresIn);
|
||||
|
@ -124,8 +124,8 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: getJwtMfaLifetime(),
|
||||
secret: getJwtMfaSecret()
|
||||
expiresIn: await getJwtMfaLifetime(),
|
||||
secret: await getJwtMfaSecret()
|
||||
});
|
||||
|
||||
const code = await TokenService.createToken({
|
||||
@ -163,7 +163,7 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
|
||||
// case: user does not have MFA enablgged
|
||||
@ -302,7 +302,7 @@ export const verifyMfaToken = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
|
||||
interface VerifyMfaTokenRes {
|
||||
|
@ -17,7 +17,7 @@ import { AccountNotFoundError } from '../../utils/errors';
|
||||
* @param res
|
||||
*/
|
||||
export const createSecret = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const secretToCreate: CreateSecretRequestBody = req.body.secret;
|
||||
const { workspaceId, environment } = req.params
|
||||
const sanitizedSecret: SanitizedSecretForCreate = {
|
||||
@ -70,7 +70,7 @@ export const createSecret = async (req: Request, res: Response) => {
|
||||
* @param res
|
||||
*/
|
||||
export const createSecrets = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const secretsToCreate: CreateSecretRequestBody[] = req.body.secrets;
|
||||
const { workspaceId, environment } = req.params
|
||||
const sanitizedSecretesToCreate: SanitizedSecretForCreate[] = []
|
||||
@ -132,7 +132,7 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
* @param res
|
||||
*/
|
||||
export const deleteSecrets = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const { workspaceId, environmentName } = req.params
|
||||
const secretIdsToDelete: string[] = req.body.secretIds
|
||||
|
||||
@ -186,7 +186,7 @@ export const deleteSecrets = async (req: Request, res: Response) => {
|
||||
* @param res
|
||||
*/
|
||||
export const deleteSecret = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
await Secret.findByIdAndDelete(req._secret._id)
|
||||
|
||||
if (postHogClient) {
|
||||
@ -215,7 +215,7 @@ export const deleteSecret = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const updateSecrets = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const { workspaceId, environmentName } = req.params
|
||||
const secretsModificationsRequested: ModifySecretRequestBody[] = req.body.secrets;
|
||||
const [secretIdsUserCanModifyError, secretIdsUserCanModify] = await to(Secret.find({ workspace: workspaceId, environment: environmentName }, { _id: 1 }).then())
|
||||
@ -283,7 +283,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const updateSecret = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const { workspaceId, environmentName } = req.params
|
||||
const secretModificationsRequested: ModifySecretRequestBody = req.body.secret;
|
||||
|
||||
@ -337,7 +337,7 @@ export const updateSecret = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getSecrets = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const { environment } = req.query;
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
|
@ -25,6 +25,8 @@ import {
|
||||
BatchSecretRequest,
|
||||
BatchSecret
|
||||
} from '../../types/secret';
|
||||
import { getFolderPath, getFoldersInDirectory, normalizePath } from '../../utils/folder';
|
||||
import { ROOT_FOLDER_PATH } from '../../utils/folder';
|
||||
|
||||
/**
|
||||
* Peform a batch of any specified CUD secret operations
|
||||
@ -35,7 +37,7 @@ import {
|
||||
export const batchSecrets = async (req: Request, res: Response) => {
|
||||
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent']);
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
const {
|
||||
workspaceId,
|
||||
@ -51,13 +53,18 @@ export const batchSecrets = async (req: Request, res: Response) => {
|
||||
const updateSecrets: BatchSecret[] = [];
|
||||
const deleteSecrets: Types.ObjectId[] = [];
|
||||
const actions: IAction[] = [];
|
||||
|
||||
|
||||
// get secret blind index salt
|
||||
const salt = await SecretService.getSecretBlindIndexSalt({
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
for await (const request of requests) {
|
||||
const folderId = request.secret.folderId
|
||||
|
||||
// TODO: need to auth folder
|
||||
const fullFolderPath = await getFolderPath(folderId)
|
||||
|
||||
let secretBlindIndex = '';
|
||||
switch (request.method) {
|
||||
case 'POST':
|
||||
@ -72,19 +79,23 @@ export const batchSecrets = async (req: Request, res: Response) => {
|
||||
user: request.secret.type === SECRET_PERSONAL ? req.user : undefined,
|
||||
environment,
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
path: fullFolderPath,
|
||||
folder: folderId,
|
||||
secretBlindIndex
|
||||
});
|
||||
break;
|
||||
case 'PATCH':
|
||||
secretBlindIndex = await SecretService.generateSecretBlindIndexWithSalt({
|
||||
secretName: request.secret.secretName,
|
||||
salt
|
||||
salt,
|
||||
});
|
||||
|
||||
updateSecrets.push({
|
||||
...request.secret,
|
||||
_id: new Types.ObjectId(request.secret._id),
|
||||
secretBlindIndex
|
||||
secretBlindIndex,
|
||||
folder: folderId,
|
||||
path: fullFolderPath,
|
||||
});
|
||||
break;
|
||||
case 'DELETE':
|
||||
@ -437,9 +448,9 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
const newlyCreatedSecrets: ISecret[] = (await Secret.insertMany(secretsToInsert)).map((insertedSecret) => insertedSecret.toObject());
|
||||
|
||||
|
||||
setTimeout(async () => {
|
||||
// trigger event - push secrets
|
||||
await EventService.handleEvent({
|
||||
@ -508,7 +519,7 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets added',
|
||||
@ -578,9 +589,11 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
*/
|
||||
|
||||
const { tagSlugs } = req.query;
|
||||
const { tagSlugs, secretsPath } = req.query;
|
||||
const workspaceId = req.query.workspaceId as string;
|
||||
const environment = req.query.environment as string;
|
||||
const normalizedPath = normalizePath(secretsPath as string)
|
||||
const folders = await getFoldersInDirectory(workspaceId as string, environment as string, normalizedPath)
|
||||
|
||||
// secrets to return
|
||||
let secrets: ISecret[] = [];
|
||||
@ -613,6 +626,12 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
]
|
||||
}
|
||||
|
||||
if (normalizedPath == ROOT_FOLDER_PATH) {
|
||||
secretQuery.path = { $in: [ROOT_FOLDER_PATH, null, undefined] }
|
||||
} else if (normalizedPath) {
|
||||
secretQuery.path = normalizedPath
|
||||
}
|
||||
|
||||
if (tagIds.length > 0) {
|
||||
secretQuery.tags = { $in: tagIds };
|
||||
}
|
||||
@ -638,6 +657,13 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
]
|
||||
}
|
||||
|
||||
// TODO: check if user can query for given path
|
||||
if (normalizedPath == ROOT_FOLDER_PATH) {
|
||||
secretQuery.path = { $in: [ROOT_FOLDER_PATH, null, undefined] }
|
||||
} else if (normalizedPath) {
|
||||
secretQuery.path = normalizedPath
|
||||
}
|
||||
|
||||
if (tagIds.length > 0) {
|
||||
secretQuery.tags = { $in: tagIds };
|
||||
}
|
||||
@ -655,6 +681,12 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
user: { $exists: false } // shared secrets only from workspace
|
||||
}
|
||||
|
||||
if (normalizedPath == ROOT_FOLDER_PATH) {
|
||||
secretQuery.path = { $in: [ROOT_FOLDER_PATH, null, undefined] }
|
||||
} else if (normalizedPath) {
|
||||
secretQuery.path = normalizedPath
|
||||
}
|
||||
|
||||
if (tagIds.length > 0) {
|
||||
secretQuery.tags = { $in: tagIds };
|
||||
}
|
||||
@ -683,7 +715,7 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets pulled',
|
||||
@ -701,7 +733,8 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets
|
||||
secrets,
|
||||
folders
|
||||
});
|
||||
}
|
||||
|
||||
@ -905,7 +938,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId: new Types.ObjectId(key)
|
||||
})
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets modified',
|
||||
@ -1039,7 +1072,7 @@ export const deleteSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId: new Types.ObjectId(key)
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets deleted',
|
||||
|
@ -72,7 +72,7 @@ export const createServiceAccount = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
const secret = crypto.randomBytes(16).toString('base64');
|
||||
const secretHash = await bcrypt.hash(secret, getSaltRounds());
|
||||
const secretHash = await bcrypt.hash(secret, await getSaltRounds());
|
||||
|
||||
// create service account
|
||||
const serviceAccount = await new ServiceAccount({
|
||||
|
@ -84,7 +84,7 @@ export const createServiceTokenData = async (req: Request, res: Response) => {
|
||||
} = req.body;
|
||||
|
||||
const secret = crypto.randomBytes(16).toString('hex');
|
||||
const secretHash = await bcrypt.hash(secret, getSaltRounds());
|
||||
const secretHash = await bcrypt.hash(secret, await getSaltRounds());
|
||||
|
||||
let expiresAt;
|
||||
if (expiresIn) {
|
||||
|
@ -7,8 +7,9 @@ import {
|
||||
} from '../../helpers/signup';
|
||||
import { issueAuthTokens } from '../../helpers/auth';
|
||||
import { INVITED, ACCEPTED } from '../../variables';
|
||||
import request from '../../config/request';
|
||||
import { standardRequest } from '../../config/request';
|
||||
import { getLoopsApiKey, getHttpsEnabled } from '../../config';
|
||||
import { updateSubscriptionOrgQuantity } from '../../helpers/organization';
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
@ -87,6 +88,19 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
user
|
||||
});
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
const membershipsToUpdate = await MembershipOrg.find({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
});
|
||||
|
||||
membershipsToUpdate.forEach(async (membership) => {
|
||||
await updateSubscriptionOrgQuantity({
|
||||
organizationId: membership.organization.toString()
|
||||
});
|
||||
});
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
@ -108,8 +122,8 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
token = tokens.token;
|
||||
|
||||
// sending a welcome email to new users
|
||||
if (getLoopsApiKey()) {
|
||||
await request.post("https://app.loops.so/api/v1/events/send", {
|
||||
if (await getLoopsApiKey()) {
|
||||
await standardRequest.post("https://app.loops.so/api/v1/events/send", {
|
||||
"email": email,
|
||||
"eventName": "Sign Up",
|
||||
"firstName": firstName,
|
||||
@ -117,7 +131,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
}, {
|
||||
headers: {
|
||||
"Accept": "application/json",
|
||||
"Authorization": "Bearer " + getLoopsApiKey()
|
||||
"Authorization": "Bearer " + (await getLoopsApiKey())
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -127,7 +141,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
@ -206,9 +220,20 @@ export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user');
|
||||
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
const membershipsToUpdate = await MembershipOrg.find({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
});
|
||||
|
||||
membershipsToUpdate.forEach(async (membership) => {
|
||||
await updateSubscriptionOrgQuantity({
|
||||
organizationId: membership.organization.toString()
|
||||
});
|
||||
});
|
||||
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
@ -232,7 +257,7 @@ export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -48,7 +48,7 @@ interface V2PushSecret {
|
||||
export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
// upload (encrypted) secrets to workspace with id [workspaceId]
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
let { secrets }: { secrets: V2PushSecret[] } = req.body;
|
||||
const { keys, environment, channel } = req.body;
|
||||
const { workspaceId } = req.params;
|
||||
@ -123,7 +123,7 @@ export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
export const pullSecrets = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
const { workspaceId } = req.params;
|
||||
|
@ -1,6 +1,7 @@
|
||||
import * as stripeController from './stripeController';
|
||||
import * as secretController from './secretController';
|
||||
import * as secretSnapshotController from './secretSnapshotController';
|
||||
import * as organizationsController from './organizationsController';
|
||||
import * as workspaceController from './workspaceController';
|
||||
import * as actionController from './actionController';
|
||||
import * as membershipController from './membershipController';
|
||||
@ -9,6 +10,7 @@ export {
|
||||
stripeController,
|
||||
secretController,
|
||||
secretSnapshotController,
|
||||
organizationsController,
|
||||
workspaceController,
|
||||
actionController,
|
||||
membershipController
|
||||
|
15
backend/src/ee/controllers/v1/organizationsController.ts
Normal file
15
backend/src/ee/controllers/v1/organizationsController.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response } from 'express';
|
||||
import { getOrganizationPlanHelper } from '../../helpers/organizations';
|
||||
|
||||
export const getOrganizationPlan = async (req: Request, res: Response) => {
|
||||
const { organizationId } = req.params;
|
||||
|
||||
const plan = await getOrganizationPlanHelper({
|
||||
organizationId: new Types.ObjectId(organizationId)
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
plan
|
||||
});
|
||||
}
|
@ -12,7 +12,7 @@ import { getStripeSecretKey, getStripeWebhookSecret } from '../../../config';
|
||||
export const handleWebhook = async (req: Request, res: Response) => {
|
||||
let event;
|
||||
try {
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
@ -21,7 +21,7 @@ export const handleWebhook = async (req: Request, res: Response) => {
|
||||
event = stripe.webhooks.constructEvent(
|
||||
req.body,
|
||||
sig,
|
||||
getStripeWebhookSecret()
|
||||
await getStripeWebhookSecret()
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { Action } from '../models';
|
||||
import {
|
||||
@ -36,33 +35,25 @@ const createActionUpdateSecret = async ({
|
||||
workspaceId: Types.ObjectId;
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
const latestSecretVersions = (await getLatestNSecretSecretVersionIds({
|
||||
secretIds,
|
||||
n: 2
|
||||
}))
|
||||
.map((s) => ({
|
||||
oldSecretVersion: s.versions[0]._id,
|
||||
newSecretVersion: s.versions[1]._id
|
||||
}));
|
||||
|
||||
action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create update secret action');
|
||||
}
|
||||
const latestSecretVersions = (await getLatestNSecretSecretVersionIds({
|
||||
secretIds,
|
||||
n: 2
|
||||
}))
|
||||
.map((s) => ({
|
||||
oldSecretVersion: s.versions[0]._id,
|
||||
newSecretVersion: s.versions[1]._id
|
||||
}));
|
||||
|
||||
const action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
return action;
|
||||
}
|
||||
@ -90,33 +81,25 @@ const createActionSecret = async ({
|
||||
workspaceId: Types.ObjectId;
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
// case: action is adding, deleting, or reading secrets
|
||||
// -> add new secret versions
|
||||
const latestSecretVersions = (await getLatestSecretVersionIds({
|
||||
secretIds
|
||||
}))
|
||||
.map((s) => ({
|
||||
newSecretVersion: s.versionId
|
||||
}));
|
||||
|
||||
action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create action create/read/delete secret action');
|
||||
}
|
||||
// case: action is adding, deleting, or reading secrets
|
||||
// -> add new secret versions
|
||||
const latestSecretVersions = (await getLatestSecretVersionIds({
|
||||
secretIds
|
||||
}))
|
||||
.map((s) => ({
|
||||
newSecretVersion: s.versionId
|
||||
}));
|
||||
|
||||
const action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
return action;
|
||||
}
|
||||
@ -140,19 +123,12 @@ const createActionClient = ({
|
||||
serviceAccountId?: Types.ObjectId;
|
||||
serviceTokenDataId?: Types.ObjectId;
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
action = new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create client action');
|
||||
}
|
||||
const action = new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId
|
||||
}).save();
|
||||
|
||||
return action;
|
||||
}
|
||||
@ -181,40 +157,34 @@ const createActionHelper = async ({
|
||||
secretIds?: Types.ObjectId[];
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
switch (name) {
|
||||
case ACTION_LOGIN:
|
||||
case ACTION_LOGOUT:
|
||||
action = await createActionClient({
|
||||
name,
|
||||
userId
|
||||
});
|
||||
break;
|
||||
case ACTION_ADD_SECRETS:
|
||||
case ACTION_READ_SECRETS:
|
||||
case ACTION_DELETE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
case ACTION_UPDATE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionUpdateSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create action');
|
||||
switch (name) {
|
||||
case ACTION_LOGIN:
|
||||
case ACTION_LOGOUT:
|
||||
action = await createActionClient({
|
||||
name,
|
||||
userId
|
||||
});
|
||||
break;
|
||||
case ACTION_ADD_SECRETS:
|
||||
case ACTION_READ_SECRETS:
|
||||
case ACTION_DELETE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
case ACTION_UPDATE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionUpdateSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return action;
|
||||
@ -222,4 +192,4 @@ const createActionHelper = async ({
|
||||
|
||||
export {
|
||||
createActionHelper
|
||||
};
|
||||
};
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
Log,
|
||||
@ -32,27 +31,20 @@ const createLogHelper = async ({
|
||||
channel: string;
|
||||
ipAddress: string;
|
||||
}) => {
|
||||
let log;
|
||||
try {
|
||||
log = await new Log({
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId ?? undefined,
|
||||
actionNames: actions.map((a) => a.name),
|
||||
actions,
|
||||
channel,
|
||||
ipAddress
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create log');
|
||||
}
|
||||
const log = await new Log({
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId ?? undefined,
|
||||
actionNames: actions.map((a) => a.name),
|
||||
actions,
|
||||
channel,
|
||||
ipAddress
|
||||
}).save();
|
||||
|
||||
return log;
|
||||
}
|
||||
|
||||
export {
|
||||
createLogHelper
|
||||
}
|
||||
}
|
||||
|
39
backend/src/ee/helpers/organizations.ts
Normal file
39
backend/src/ee/helpers/organizations.ts
Normal file
@ -0,0 +1,39 @@
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Organization } from '../../models';
|
||||
import { EELicenseService } from '../services';
|
||||
import { getLicenseServerUrl } from '../../config';
|
||||
import { licenseServerKeyRequest } from '../../config/request';
|
||||
import { OrganizationNotFoundError } from '../../utils/errors';
|
||||
|
||||
export const getOrganizationPlanHelper = async ({
|
||||
organizationId
|
||||
}: {
|
||||
organizationId: Types.ObjectId;
|
||||
}) => {
|
||||
try {
|
||||
if (EELicenseService.instanceType === 'cloud') {
|
||||
// instance of Infisical is a cloud instance
|
||||
|
||||
const organization = await Organization.findById(organizationId);
|
||||
if (!organization) throw OrganizationNotFoundError();
|
||||
|
||||
const cachedPlan = EELicenseService.localFeatureSet.get(organizationId.toString());
|
||||
if (cachedPlan) return cachedPlan;
|
||||
|
||||
const { data: { currentPlan } } = await licenseServerKeyRequest.get(
|
||||
`${await getLicenseServerUrl()}/api/license-server/v1/customers/${organization.customerId}/cloud-plan`
|
||||
);
|
||||
|
||||
// cache fetched plan for organization
|
||||
EELicenseService.localFeatureSet.set(organizationId.toString(), currentPlan);
|
||||
return currentPlan;
|
||||
}
|
||||
|
||||
return EELicenseService.globalFeatureSet;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return EELicenseService.globalFeatureSet;
|
||||
}
|
||||
}
|
@ -1,14 +1,6 @@
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Secret,
|
||||
ISecret,
|
||||
} from '../../models';
|
||||
import {
|
||||
SecretSnapshot,
|
||||
SecretVersion,
|
||||
ISecretVersion
|
||||
} from '../models';
|
||||
import { Types } from "mongoose";
|
||||
import { Secret, ISecret } from "../../models";
|
||||
import { SecretSnapshot, SecretVersion, ISecretVersion } from "../models";
|
||||
|
||||
/**
|
||||
* Save a secret snapshot that is a copy of the current state of secrets in workspace with id
|
||||
@ -19,56 +11,53 @@ import {
|
||||
* @returns {SecretSnapshot} secretSnapshot - new secret snapshot
|
||||
*/
|
||||
const takeSecretSnapshotHelper = async ({
|
||||
workspaceId
|
||||
workspaceId,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
workspaceId: Types.ObjectId;
|
||||
}) => {
|
||||
const secretIds = (
|
||||
await Secret.find(
|
||||
{
|
||||
workspace: workspaceId,
|
||||
},
|
||||
"_id"
|
||||
)
|
||||
).map((s) => s._id);
|
||||
|
||||
let secretSnapshot;
|
||||
try {
|
||||
const secretIds = (await Secret.find({
|
||||
workspace: workspaceId
|
||||
}, '_id')).map((s) => s._id);
|
||||
const latestSecretVersions = (
|
||||
await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$secret",
|
||||
version: { $max: "$version" },
|
||||
versionId: { $max: "$_id" }, // secret version id
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 },
|
||||
},
|
||||
]).exec()
|
||||
).map((s) => s.versionId);
|
||||
|
||||
const latestSecretVersions = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$secret',
|
||||
version: { $max: '$version' },
|
||||
versionId: { $max: '$_id' } // secret version id
|
||||
}
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 }
|
||||
}
|
||||
])
|
||||
.exec())
|
||||
.map((s) => s.versionId);
|
||||
const latestSecretSnapshot = await SecretSnapshot.findOne({
|
||||
workspace: workspaceId,
|
||||
}).sort({ version: -1 });
|
||||
|
||||
const latestSecretSnapshot = await SecretSnapshot.findOne({
|
||||
workspace: workspaceId
|
||||
}).sort({ version: -1 });
|
||||
const secretSnapshot = await new SecretSnapshot({
|
||||
workspace: workspaceId,
|
||||
version: latestSecretSnapshot ? latestSecretSnapshot.version + 1 : 1,
|
||||
secretVersions: latestSecretVersions,
|
||||
}).save();
|
||||
|
||||
secretSnapshot = await new SecretSnapshot({
|
||||
workspace: workspaceId,
|
||||
version: latestSecretSnapshot ? latestSecretSnapshot.version + 1 : 1,
|
||||
secretVersions: latestSecretVersions
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to take a secret snapshot');
|
||||
}
|
||||
|
||||
return secretSnapshot;
|
||||
}
|
||||
return secretSnapshot;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add secret versions [secretVersions] to the SecretVersion collection.
|
||||
@ -77,93 +66,79 @@ const takeSecretSnapshotHelper = async ({
|
||||
* @returns {SecretVersion[]} newSecretVersions - new secret versions
|
||||
*/
|
||||
const addSecretVersionsHelper = async ({
|
||||
secretVersions
|
||||
secretVersions,
|
||||
}: {
|
||||
secretVersions: ISecretVersion[]
|
||||
secretVersions: ISecretVersion[];
|
||||
}) => {
|
||||
let newSecretVersions;
|
||||
try {
|
||||
newSecretVersions = await SecretVersion.insertMany(secretVersions);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(`Failed to add secret versions [err=${err}]`);
|
||||
}
|
||||
const newSecretVersions = await SecretVersion.insertMany(secretVersions);
|
||||
|
||||
return newSecretVersions;
|
||||
}
|
||||
return newSecretVersions;
|
||||
};
|
||||
|
||||
const markDeletedSecretVersionsHelper = async ({
|
||||
secretIds
|
||||
secretIds,
|
||||
}: {
|
||||
secretIds: Types.ObjectId[];
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
try {
|
||||
await SecretVersion.updateMany({
|
||||
secret: { $in: secretIds }
|
||||
}, {
|
||||
isDeleted: true
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to mark secret versions as deleted');
|
||||
}
|
||||
}
|
||||
await SecretVersion.updateMany(
|
||||
{
|
||||
secret: { $in: secretIds },
|
||||
},
|
||||
{
|
||||
isDeleted: true,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Initialize secret versioning by setting previously unversioned
|
||||
* secrets to version 1 and begin populating secret versions.
|
||||
*/
|
||||
const initSecretVersioningHelper = async () => {
|
||||
try {
|
||||
await Secret.updateMany(
|
||||
{ version: { $exists: false } },
|
||||
{ $set: { version: 1 } }
|
||||
);
|
||||
|
||||
await Secret.updateMany(
|
||||
{ version: { $exists: false } },
|
||||
{ $set: { version: 1 } }
|
||||
);
|
||||
const unversionedSecrets: ISecret[] = await Secret.aggregate([
|
||||
{
|
||||
$lookup: {
|
||||
from: "secretversions",
|
||||
localField: "_id",
|
||||
foreignField: "secret",
|
||||
as: "versions",
|
||||
},
|
||||
},
|
||||
{
|
||||
$match: {
|
||||
versions: { $size: 0 },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const unversionedSecrets: ISecret[] = await Secret.aggregate([
|
||||
{
|
||||
$lookup: {
|
||||
from: 'secretversions',
|
||||
localField: '_id',
|
||||
foreignField: 'secret',
|
||||
as: 'versions',
|
||||
},
|
||||
},
|
||||
{
|
||||
$match: {
|
||||
versions: { $size: 0 },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
if (unversionedSecrets.length > 0) {
|
||||
await addSecretVersionsHelper({
|
||||
secretVersions: unversionedSecrets.map((s, idx) => new SecretVersion({
|
||||
...s,
|
||||
secret: s._id,
|
||||
version: s.version ? s.version : 1,
|
||||
isDeleted: false,
|
||||
workspace: s.workspace,
|
||||
environment: s.environment
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to ensure that secrets are versioned');
|
||||
}
|
||||
}
|
||||
if (unversionedSecrets.length > 0) {
|
||||
await addSecretVersionsHelper({
|
||||
secretVersions: unversionedSecrets.map(
|
||||
(s, idx) =>
|
||||
new SecretVersion({
|
||||
...s,
|
||||
secret: s._id,
|
||||
version: s.version ? s.version : 1,
|
||||
isDeleted: false,
|
||||
workspace: s.workspace,
|
||||
environment: s.environment,
|
||||
})
|
||||
),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
takeSecretSnapshotHelper,
|
||||
addSecretVersionsHelper,
|
||||
markDeletedSecretVersionsHelper,
|
||||
initSecretVersioningHelper
|
||||
}
|
||||
takeSecretSnapshotHelper,
|
||||
addSecretVersionsHelper,
|
||||
markDeletedSecretVersionsHelper,
|
||||
initSecretVersioningHelper,
|
||||
};
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { SecretVersion } from '../models';
|
||||
|
||||
@ -13,41 +12,32 @@ const getLatestSecretVersionIds = async ({
|
||||
}: {
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
|
||||
interface LatestSecretVersionId {
|
||||
_id: Types.ObjectId;
|
||||
version: number;
|
||||
versionId: Types.ObjectId;
|
||||
}
|
||||
|
||||
let latestSecretVersionIds: LatestSecretVersionId[];
|
||||
try {
|
||||
latestSecretVersionIds = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$secret',
|
||||
version: { $max: '$version' },
|
||||
versionId: { $max: '$_id' } // id of latest secret version
|
||||
}
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 }
|
||||
const latestSecretVersionIds = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds
|
||||
}
|
||||
}
|
||||
])
|
||||
.exec());
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get latest secret versions');
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$secret',
|
||||
version: { $max: '$version' },
|
||||
versionId: { $max: '$_id' } // id of latest secret version
|
||||
}
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 }
|
||||
}
|
||||
])
|
||||
.exec());
|
||||
|
||||
return latestSecretVersionIds;
|
||||
}
|
||||
@ -66,40 +56,32 @@ const getLatestNSecretSecretVersionIds = async ({
|
||||
secretIds: Types.ObjectId[];
|
||||
n: number;
|
||||
}) => {
|
||||
|
||||
// TODO: optimize query
|
||||
let latestNSecretVersions;
|
||||
try {
|
||||
latestNSecretVersions = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds,
|
||||
},
|
||||
},
|
||||
const latestNSecretVersions = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds,
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 },
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$secret",
|
||||
versions: { $push: "$$ROOT" },
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 },
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$secret",
|
||||
versions: { $push: "$$ROOT" },
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 0,
|
||||
secret: "$_id",
|
||||
versions: { $slice: ["$versions", n] },
|
||||
},
|
||||
}
|
||||
]));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get latest n secret versions');
|
||||
}
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 0,
|
||||
secret: "$_id",
|
||||
versions: { $slice: ["$versions", n] },
|
||||
},
|
||||
}
|
||||
]));
|
||||
|
||||
return latestNSecretVersions;
|
||||
}
|
||||
|
@ -1,11 +1,13 @@
|
||||
import secret from './secret';
|
||||
import secretSnapshot from './secretSnapshot';
|
||||
import organizations from './organizations';
|
||||
import workspace from './workspace';
|
||||
import action from './action';
|
||||
|
||||
export {
|
||||
secret,
|
||||
secretSnapshot,
|
||||
organizations,
|
||||
workspace,
|
||||
action
|
||||
}
|
28
backend/src/ee/routes/v1/organizations.ts
Normal file
28
backend/src/ee/routes/v1/organizations.ts
Normal file
@ -0,0 +1,28 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import {
|
||||
requireAuth,
|
||||
requireOrganizationAuth,
|
||||
validateRequest
|
||||
} from '../../../middleware';
|
||||
import { param } from 'express-validator';
|
||||
import { organizationsController } from '../../controllers/v1';
|
||||
import {
|
||||
OWNER, ADMIN, MEMBER, ACCEPTED
|
||||
} from '../../../variables';
|
||||
|
||||
router.get(
|
||||
'/:organizationId/plan',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey']
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED]
|
||||
}),
|
||||
param('organizationId').exists().trim(),
|
||||
validateRequest,
|
||||
organizationsController.getOrganizationPlan
|
||||
);
|
||||
|
||||
export default router;
|
@ -7,7 +7,7 @@ import {
|
||||
requireAuth,
|
||||
validateRequest
|
||||
} from '../../../middleware';
|
||||
import { param, body } from 'express-validator';
|
||||
import { param } from 'express-validator';
|
||||
import { ADMIN, MEMBER } from '../../../variables';
|
||||
import { secretSnapshotController } from '../../controllers/v1';
|
||||
|
||||
|
@ -1,12 +1,99 @@
|
||||
import NodeCache from 'node-cache';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
getLicenseKey,
|
||||
getLicenseServerKey,
|
||||
getLicenseServerUrl
|
||||
} from '../../config';
|
||||
import {
|
||||
licenseKeyRequest,
|
||||
refreshLicenseServerKeyToken,
|
||||
refreshLicenseKeyToken
|
||||
} from '../../config/request';
|
||||
|
||||
interface FeatureSet {
|
||||
_id: string | null;
|
||||
slug: 'starter' | 'team' | 'pro' | 'enterprise' | null;
|
||||
tier: number | null;
|
||||
projectLimit: number | null;
|
||||
memberLimit: number | null;
|
||||
secretVersioning: boolean;
|
||||
pitRecovery: boolean;
|
||||
rbac: boolean;
|
||||
customRateLimits: boolean;
|
||||
customAlerts: boolean;
|
||||
auditLogs: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Class to handle Enterprise Edition license actions
|
||||
* Class to handle license/plan configurations:
|
||||
* - Infisical Cloud: Fetch and cache customer plans in [localFeatureSet]
|
||||
* - Self-hosted regular: Use default global feature set
|
||||
* - Self-hosted enterprise: Fetch and update global feature set
|
||||
*/
|
||||
class EELicenseService {
|
||||
|
||||
private readonly _isLicenseValid: boolean;
|
||||
private readonly _isLicenseValid: boolean; // TODO: deprecate
|
||||
|
||||
public instanceType: 'self-hosted' | 'enterprise-self-hosted' | 'cloud' = 'self-hosted';
|
||||
|
||||
public globalFeatureSet: FeatureSet = {
|
||||
_id: null,
|
||||
slug: null,
|
||||
tier: null,
|
||||
projectLimit: null,
|
||||
memberLimit: null,
|
||||
secretVersioning: true,
|
||||
pitRecovery: true,
|
||||
rbac: true,
|
||||
customRateLimits: true,
|
||||
customAlerts: true,
|
||||
auditLogs: false
|
||||
}
|
||||
|
||||
public localFeatureSet: NodeCache;
|
||||
|
||||
constructor(licenseKey: string) {
|
||||
constructor() {
|
||||
this._isLicenseValid = true;
|
||||
this.localFeatureSet = new NodeCache({
|
||||
stdTTL: 300
|
||||
});
|
||||
}
|
||||
|
||||
public async initGlobalFeatureSet() {
|
||||
const licenseServerKey = await getLicenseServerKey();
|
||||
const licenseKey = await getLicenseKey();
|
||||
|
||||
try {
|
||||
if (licenseServerKey) {
|
||||
// license server key is present -> validate it
|
||||
const token = await refreshLicenseServerKeyToken()
|
||||
|
||||
if (token) {
|
||||
this.instanceType = 'cloud';
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (licenseKey) {
|
||||
// license key is present -> validate it
|
||||
const token = await refreshLicenseKeyToken();
|
||||
|
||||
if (token) {
|
||||
const { data: { currentPlan } } = await licenseKeyRequest.get(
|
||||
`${await getLicenseServerUrl()}/api/license/v1/plan`
|
||||
);
|
||||
|
||||
this.globalFeatureSet = currentPlan;
|
||||
this.instanceType = 'enterprise-self-hosted';
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// case: self-hosted free
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
}
|
||||
}
|
||||
|
||||
public get isLicenseValid(): boolean {
|
||||
@ -14,4 +101,4 @@ class EELicenseService {
|
||||
}
|
||||
}
|
||||
|
||||
export default new EELicenseService('N/A');
|
||||
export default new EELicenseService();
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import bcrypt from 'bcrypt';
|
||||
@ -104,7 +103,7 @@ const getAuthUserPayload = async ({
|
||||
authTokenValue: string;
|
||||
}) => {
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(authTokenValue, getJwtAuthSecret())
|
||||
jwt.verify(authTokenValue, await getJwtAuthSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
@ -263,16 +262,16 @@ const issueAuthTokens = async ({ userId }: { userId: string }) => {
|
||||
payload: {
|
||||
userId
|
||||
},
|
||||
expiresIn: getJwtAuthLifetime(),
|
||||
secret: getJwtAuthSecret()
|
||||
expiresIn: await getJwtAuthLifetime(),
|
||||
secret: await getJwtAuthSecret()
|
||||
});
|
||||
|
||||
const refreshToken = createToken({
|
||||
payload: {
|
||||
userId
|
||||
},
|
||||
expiresIn: getJwtRefreshLifetime(),
|
||||
secret: getJwtRefreshSecret()
|
||||
expiresIn: await getJwtRefreshLifetime(),
|
||||
secret: await getJwtRefreshSecret()
|
||||
});
|
||||
|
||||
return {
|
||||
|
@ -1,41 +1,34 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { Types } from "mongoose";
|
||||
import {
|
||||
Bot,
|
||||
BotKey,
|
||||
Secret,
|
||||
ISecret,
|
||||
IUser,
|
||||
User,
|
||||
IServiceAccount,
|
||||
ServiceAccount,
|
||||
IServiceTokenData,
|
||||
ServiceTokenData
|
||||
} from '../models';
|
||||
import {
|
||||
generateKeyPair,
|
||||
encryptSymmetric,
|
||||
decryptSymmetric,
|
||||
decryptAsymmetric
|
||||
} from '../utils/crypto';
|
||||
Bot,
|
||||
BotKey,
|
||||
Secret,
|
||||
ISecret,
|
||||
IUser,
|
||||
User,
|
||||
IServiceAccount,
|
||||
ServiceAccount,
|
||||
IServiceTokenData,
|
||||
ServiceTokenData,
|
||||
} from "../models";
|
||||
import {
|
||||
SECRET_SHARED,
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY
|
||||
} from '../variables';
|
||||
import { getEncryptionKey } from '../config';
|
||||
import { BotNotFoundError, UnauthorizedRequestError } from '../utils/errors';
|
||||
generateKeyPair,
|
||||
encryptSymmetric,
|
||||
decryptSymmetric,
|
||||
decryptAsymmetric,
|
||||
} from "../utils/crypto";
|
||||
import {
|
||||
validateMembership
|
||||
} from '../helpers/membership';
|
||||
import {
|
||||
validateUserClientForWorkspace
|
||||
} from '../helpers/user';
|
||||
import {
|
||||
validateServiceAccountClientForWorkspace
|
||||
} from '../helpers/serviceAccount';
|
||||
SECRET_SHARED,
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY,
|
||||
} from "../variables";
|
||||
import { getEncryptionKey } from "../config";
|
||||
import { BotNotFoundError, UnauthorizedRequestError } from "../utils/errors";
|
||||
import { validateMembership } from "../helpers/membership";
|
||||
import { validateUserClientForWorkspace } from "../helpers/user";
|
||||
import { validateServiceAccountClientForWorkspace } from "../helpers/serviceAccount";
|
||||
|
||||
/**
|
||||
* Validate authenticated clients for bot with id [botId] based
|
||||
@ -46,99 +39,104 @@ import {
|
||||
* @param {Array<'admin' | 'member'>} obj.acceptedRoles - accepted workspace roles
|
||||
*/
|
||||
const validateClientForBot = async ({
|
||||
authData,
|
||||
botId,
|
||||
acceptedRoles
|
||||
authData,
|
||||
botId,
|
||||
acceptedRoles,
|
||||
}: {
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
};
|
||||
botId: Types.ObjectId;
|
||||
acceptedRoles: Array<'admin' | 'member'>;
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
};
|
||||
botId: Types.ObjectId;
|
||||
acceptedRoles: Array<"admin" | "member">;
|
||||
}) => {
|
||||
const bot = await Bot.findById(botId);
|
||||
|
||||
if (!bot) throw BotNotFoundError();
|
||||
|
||||
if (authData.authMode === AUTH_MODE_JWT && authData.authPayload instanceof User) {
|
||||
await validateUserClientForWorkspace({
|
||||
user: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
acceptedRoles
|
||||
});
|
||||
|
||||
return bot;
|
||||
}
|
||||
const bot = await Bot.findById(botId);
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_ACCOUNT && authData.authPayload instanceof ServiceAccount) {
|
||||
await validateServiceAccountClientForWorkspace({
|
||||
serviceAccount: authData.authPayload,
|
||||
workspaceId: bot.workspace
|
||||
});
|
||||
if (!bot) throw BotNotFoundError();
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_TOKEN && authData.authPayload instanceof ServiceTokenData) {
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'Failed service token authorization for bot'
|
||||
});
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_API_KEY && authData.authPayload instanceof User) {
|
||||
await validateUserClientForWorkspace({
|
||||
user: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
acceptedRoles
|
||||
});
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
throw BotNotFoundError({
|
||||
message: 'Failed client authorization for bot'
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_JWT &&
|
||||
authData.authPayload instanceof User
|
||||
) {
|
||||
await validateUserClientForWorkspace({
|
||||
user: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
acceptedRoles,
|
||||
});
|
||||
}
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_SERVICE_ACCOUNT &&
|
||||
authData.authPayload instanceof ServiceAccount
|
||||
) {
|
||||
await validateServiceAccountClientForWorkspace({
|
||||
serviceAccount: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
});
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_SERVICE_TOKEN &&
|
||||
authData.authPayload instanceof ServiceTokenData
|
||||
) {
|
||||
throw UnauthorizedRequestError({
|
||||
message: "Failed service token authorization for bot",
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_API_KEY &&
|
||||
authData.authPayload instanceof User
|
||||
) {
|
||||
await validateUserClientForWorkspace({
|
||||
user: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
acceptedRoles,
|
||||
});
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
throw BotNotFoundError({
|
||||
message: "Failed client authorization for bot",
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create an inactive bot with name [name] for workspace with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.name - name of bot
|
||||
* @param {String} obj.workspaceId - id of workspace that bot belongs to
|
||||
*/
|
||||
const createBot = async ({
|
||||
name,
|
||||
workspaceId,
|
||||
name,
|
||||
workspaceId,
|
||||
}: {
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
}) => {
|
||||
let bot;
|
||||
try {
|
||||
const { publicKey, privateKey } = generateKeyPair();
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext: privateKey,
|
||||
key: getEncryptionKey()
|
||||
});
|
||||
const { publicKey, privateKey } = generateKeyPair();
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext: privateKey,
|
||||
key: await getEncryptionKey(),
|
||||
});
|
||||
|
||||
bot = await new Bot({
|
||||
name,
|
||||
workspace: workspaceId,
|
||||
isActive: false,
|
||||
publicKey,
|
||||
encryptedPrivateKey: ciphertext,
|
||||
iv,
|
||||
tag
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create bot');
|
||||
}
|
||||
|
||||
return bot;
|
||||
}
|
||||
const bot = await new Bot({
|
||||
name,
|
||||
workspace: workspaceId,
|
||||
isActive: false,
|
||||
publicKey,
|
||||
encryptedPrivateKey: ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
}).save();
|
||||
|
||||
return bot;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return decrypted secrets for workspace with id [workspaceId]
|
||||
@ -148,125 +146,105 @@ const createBot = async ({
|
||||
* @param {String} obj.environment - environment
|
||||
*/
|
||||
const getSecretsHelper = async ({
|
||||
workspaceId,
|
||||
environment
|
||||
workspaceId,
|
||||
environment,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
environment: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
environment: string;
|
||||
}) => {
|
||||
const content = {} as any;
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const secrets = await Secret.find({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
type: SECRET_SHARED
|
||||
});
|
||||
|
||||
secrets.forEach((secret: ISecret) => {
|
||||
const secretKey = decryptSymmetric({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key
|
||||
});
|
||||
const content = {} as any;
|
||||
const key = await getKey({ workspaceId: workspaceId.toString() });
|
||||
const secrets = await Secret.find({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
type: SECRET_SHARED,
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key
|
||||
});
|
||||
secrets.forEach((secret: ISecret) => {
|
||||
const secretKey = decryptSymmetric({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key,
|
||||
});
|
||||
|
||||
content[secretKey] = secretValue;
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get secrets');
|
||||
}
|
||||
const secretValue = decryptSymmetric({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key,
|
||||
});
|
||||
|
||||
return content;
|
||||
}
|
||||
content[secretKey] = secretValue;
|
||||
});
|
||||
|
||||
return content;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return bot's copy of the workspace key for workspace
|
||||
* Return bot's copy of the workspace key for workspace
|
||||
* with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @returns {String} key - decrypted workspace key
|
||||
*/
|
||||
const getKey = async ({ workspaceId }: { workspaceId: Types.ObjectId }) => {
|
||||
let key;
|
||||
try {
|
||||
const botKey = await BotKey.findOne({
|
||||
workspace: workspaceId
|
||||
}).populate<{ sender: IUser }>('sender', 'publicKey');
|
||||
|
||||
if (!botKey) throw new Error('Failed to find bot key');
|
||||
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId
|
||||
}).select('+encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!bot) throw new Error('Failed to find bot');
|
||||
if (!bot.isActive) throw new Error('Bot is not active');
|
||||
|
||||
const privateKeyBot = decryptSymmetric({
|
||||
ciphertext: bot.encryptedPrivateKey,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
key: getEncryptionKey()
|
||||
});
|
||||
|
||||
key = decryptAsymmetric({
|
||||
ciphertext: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
publicKey: botKey.sender.publicKey as string,
|
||||
privateKey: privateKeyBot
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get workspace key');
|
||||
}
|
||||
|
||||
return key;
|
||||
}
|
||||
const getKey = async ({ workspaceId }: { workspaceId: string }) => {
|
||||
const botKey = await BotKey.findOne({
|
||||
workspace: workspaceId,
|
||||
}).populate<{ sender: IUser }>("sender", "publicKey");
|
||||
|
||||
if (!botKey) throw new Error("Failed to find bot key");
|
||||
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
}).select("+encryptedPrivateKey +iv +tag");
|
||||
|
||||
if (!bot) throw new Error("Failed to find bot");
|
||||
if (!bot.isActive) throw new Error("Bot is not active");
|
||||
|
||||
const privateKeyBot = decryptSymmetric({
|
||||
ciphertext: bot.encryptedPrivateKey,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
key: await getEncryptionKey(),
|
||||
});
|
||||
|
||||
const key = decryptAsymmetric({
|
||||
ciphertext: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
publicKey: botKey.sender.publicKey as string,
|
||||
privateKey: privateKeyBot,
|
||||
});
|
||||
|
||||
return key;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return symmetrically encrypted [plaintext] using the
|
||||
* key for workspace with id [workspaceId]
|
||||
* key for workspace with id [workspaceId]
|
||||
* @param {Object} obj1
|
||||
* @param {String} obj1.workspaceId - id of workspace
|
||||
* @param {String} obj1.plaintext - plaintext to encrypt
|
||||
*/
|
||||
const encryptSymmetricHelper = async ({
|
||||
workspaceId,
|
||||
plaintext
|
||||
workspaceId,
|
||||
plaintext,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
plaintext: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
plaintext: string;
|
||||
}) => {
|
||||
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext,
|
||||
key
|
||||
});
|
||||
|
||||
return ({
|
||||
ciphertext,
|
||||
iv,
|
||||
tag
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric encryption with bot');
|
||||
}
|
||||
}
|
||||
const key = await getKey({ workspaceId: workspaceId.toString() });
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext,
|
||||
key,
|
||||
});
|
||||
|
||||
return {
|
||||
ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Return symmetrically decrypted [ciphertext] using the
|
||||
* key for workspace with id [workspaceId]
|
||||
@ -277,40 +255,31 @@ const encryptSymmetricHelper = async ({
|
||||
* @param {String} obj.tag - tag
|
||||
*/
|
||||
const decryptSymmetricHelper = async ({
|
||||
workspaceId,
|
||||
workspaceId,
|
||||
ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
ciphertext: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}) => {
|
||||
const key = await getKey({ workspaceId: workspaceId.toString() });
|
||||
const plaintext = decryptSymmetric({
|
||||
ciphertext,
|
||||
iv,
|
||||
tag
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
ciphertext: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}) => {
|
||||
let plaintext;
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const plaintext = decryptSymmetric({
|
||||
ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
key
|
||||
});
|
||||
|
||||
return plaintext;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric decryption with bot');
|
||||
}
|
||||
|
||||
return plaintext;
|
||||
}
|
||||
tag,
|
||||
key,
|
||||
});
|
||||
|
||||
return plaintext;
|
||||
};
|
||||
|
||||
export {
|
||||
validateClientForBot,
|
||||
createBot,
|
||||
getSecretsHelper,
|
||||
encryptSymmetricHelper,
|
||||
decryptSymmetricHelper
|
||||
}
|
||||
validateClientForBot,
|
||||
createBot,
|
||||
getSecretsHelper,
|
||||
encryptSymmetricHelper,
|
||||
decryptSymmetricHelper,
|
||||
};
|
||||
|
@ -20,12 +20,12 @@ const initDatabaseHelper = async ({
|
||||
// allow empty strings to pass the required validator
|
||||
mongoose.Schema.Types.String.checkRequired(v => typeof v === 'string');
|
||||
|
||||
getLogger("database").info("Database connection established");
|
||||
(await getLogger("database")).info("Database connection established");
|
||||
|
||||
await EESecretService.initSecretVersioning();
|
||||
await SecretService.initSecretBlindIndexDataHelper();
|
||||
} catch (err) {
|
||||
getLogger("database").error(`Unable to establish Database connection due to the error.\n${err}`);
|
||||
(await getLogger("database")).error(`Unable to establish Database connection due to the error.\n${err}`);
|
||||
}
|
||||
|
||||
return mongoose.connection;
|
||||
|
@ -1,14 +1,13 @@
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Bot, IBot } from '../models';
|
||||
import { EVENT_PUSH_SECRETS } from '../variables';
|
||||
import { IntegrationService } from '../services';
|
||||
import { Types } from "mongoose";
|
||||
import { Bot, IBot } from "../models";
|
||||
import { EVENT_PUSH_SECRETS } from "../variables";
|
||||
import { IntegrationService } from "../services";
|
||||
|
||||
interface Event {
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
payload: any;
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
payload: any;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -19,39 +18,25 @@ interface Event {
|
||||
* @param {String} obj.event.workspaceId - id of workspace that event is part of
|
||||
* @param {Object} obj.event.payload - payload of event (depends on event)
|
||||
*/
|
||||
const handleEventHelper = async ({
|
||||
event
|
||||
}: {
|
||||
event: Event;
|
||||
}) => {
|
||||
const {
|
||||
workspaceId,
|
||||
environment
|
||||
} = event;
|
||||
|
||||
// TODO: moduralize bot check into separate function
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (!bot) return;
|
||||
|
||||
try {
|
||||
switch (event.name) {
|
||||
case EVENT_PUSH_SECRETS:
|
||||
IntegrationService.syncIntegrations({
|
||||
workspaceId,
|
||||
environment
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
}
|
||||
}
|
||||
const handleEventHelper = async ({ event }: { event: Event }) => {
|
||||
const { workspaceId, environment } = event;
|
||||
|
||||
export {
|
||||
handleEventHelper
|
||||
}
|
||||
// TODO: moduralize bot check into separate function
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
isActive: true,
|
||||
});
|
||||
|
||||
if (!bot) return;
|
||||
|
||||
switch (event.name) {
|
||||
case EVENT_PUSH_SECRETS:
|
||||
IntegrationService.syncIntegrations({
|
||||
workspaceId,
|
||||
environment,
|
||||
});
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
export { handleEventHelper };
|
||||
|
@ -256,7 +256,7 @@ const syncIntegrationsHelper = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessId: access.accessId,
|
||||
accessId: access.accessId === undefined ? null : access.accessId,
|
||||
accessToken: access.accessToken
|
||||
});
|
||||
}
|
||||
@ -482,4 +482,4 @@ export {
|
||||
getIntegrationAuthAccessHelper,
|
||||
setIntegrationAuthRefreshHelper,
|
||||
setIntegrationAuthAccessHelper
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Key, IKey } from '../models';
|
||||
|
||||
interface Key {
|
||||
@ -27,36 +26,30 @@ const pushKeys = async ({
|
||||
workspaceId: string;
|
||||
keys: Key[];
|
||||
}): Promise<void> => {
|
||||
try {
|
||||
// filter out already-inserted keys
|
||||
const keysSet = new Set(
|
||||
(
|
||||
await Key.find(
|
||||
{
|
||||
workspace: workspaceId
|
||||
},
|
||||
'receiver'
|
||||
)
|
||||
).map((k: IKey) => k.receiver.toString())
|
||||
);
|
||||
// filter out already-inserted keys
|
||||
const keysSet = new Set(
|
||||
(
|
||||
await Key.find(
|
||||
{
|
||||
workspace: workspaceId
|
||||
},
|
||||
'receiver'
|
||||
)
|
||||
).map((k: IKey) => k.receiver.toString())
|
||||
);
|
||||
|
||||
keys = keys.filter((key) => !keysSet.has(key.userId));
|
||||
keys = keys.filter((key) => !keysSet.has(key.userId));
|
||||
|
||||
// add new shared keys only
|
||||
await Key.insertMany(
|
||||
keys.map((k) => ({
|
||||
encryptedKey: k.encryptedKey,
|
||||
nonce: k.nonce,
|
||||
sender: userId,
|
||||
receiver: k.userId,
|
||||
workspace: workspaceId
|
||||
}))
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to push access keys');
|
||||
}
|
||||
// add new shared keys only
|
||||
await Key.insertMany(
|
||||
keys.map((k) => ({
|
||||
encryptedKey: k.encryptedKey,
|
||||
nonce: k.nonce,
|
||||
sender: userId,
|
||||
receiver: k.userId,
|
||||
workspace: workspaceId
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
export { pushKeys };
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
MembershipOrg,
|
||||
@ -144,15 +143,7 @@ const validateMembershipOrg = async ({
|
||||
* @return {Object} membershipOrg - membership
|
||||
*/
|
||||
const findMembershipOrg = (queryObj: any) => {
|
||||
let membershipOrg;
|
||||
try {
|
||||
membershipOrg = MembershipOrg.findOne(queryObj);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to find organization membership');
|
||||
}
|
||||
|
||||
const membershipOrg = MembershipOrg.findOne(queryObj);
|
||||
return membershipOrg;
|
||||
};
|
||||
|
||||
@ -175,33 +166,27 @@ const addMembershipsOrg = async ({
|
||||
roles: string[];
|
||||
statuses: string[];
|
||||
}) => {
|
||||
try {
|
||||
const operations = userIds.map((userId, idx) => {
|
||||
return {
|
||||
updateOne: {
|
||||
filter: {
|
||||
user: userId,
|
||||
organization: organizationId,
|
||||
role: roles[idx],
|
||||
status: statuses[idx]
|
||||
},
|
||||
update: {
|
||||
user: userId,
|
||||
organization: organizationId,
|
||||
role: roles[idx],
|
||||
status: statuses[idx]
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
};
|
||||
});
|
||||
const operations = userIds.map((userId, idx) => {
|
||||
return {
|
||||
updateOne: {
|
||||
filter: {
|
||||
user: userId,
|
||||
organization: organizationId,
|
||||
role: roles[idx],
|
||||
status: statuses[idx]
|
||||
},
|
||||
update: {
|
||||
user: userId,
|
||||
organization: organizationId,
|
||||
role: roles[idx],
|
||||
status: statuses[idx]
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
await MembershipOrg.bulkWrite(operations as any);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to add users to organization');
|
||||
}
|
||||
await MembershipOrg.bulkWrite(operations as any);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -214,43 +199,36 @@ const deleteMembershipOrg = async ({
|
||||
}: {
|
||||
membershipOrgId: string;
|
||||
}) => {
|
||||
let deletedMembershipOrg;
|
||||
try {
|
||||
deletedMembershipOrg = await MembershipOrg.findOneAndDelete({
|
||||
_id: membershipOrgId
|
||||
});
|
||||
const deletedMembershipOrg = await MembershipOrg.findOneAndDelete({
|
||||
_id: membershipOrgId
|
||||
});
|
||||
|
||||
if (!deletedMembershipOrg) throw new Error('Failed to delete organization membership');
|
||||
if (!deletedMembershipOrg) throw new Error('Failed to delete organization membership');
|
||||
|
||||
// delete keys associated with organization membership
|
||||
if (deletedMembershipOrg?.user) {
|
||||
// case: organization membership had a registered user
|
||||
// delete keys associated with organization membership
|
||||
if (deletedMembershipOrg?.user) {
|
||||
// case: organization membership had a registered user
|
||||
|
||||
const workspaces = (
|
||||
await Workspace.find({
|
||||
organization: deletedMembershipOrg.organization
|
||||
})
|
||||
).map((w) => w._id.toString());
|
||||
const workspaces = (
|
||||
await Workspace.find({
|
||||
organization: deletedMembershipOrg.organization
|
||||
})
|
||||
).map((w) => w._id.toString());
|
||||
|
||||
await Membership.deleteMany({
|
||||
user: deletedMembershipOrg.user,
|
||||
workspace: {
|
||||
$in: workspaces
|
||||
}
|
||||
});
|
||||
await Membership.deleteMany({
|
||||
user: deletedMembershipOrg.user,
|
||||
workspace: {
|
||||
$in: workspaces
|
||||
}
|
||||
});
|
||||
|
||||
await Key.deleteMany({
|
||||
receiver: deletedMembershipOrg.user,
|
||||
workspace: {
|
||||
$in: workspaces
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to delete organization membership');
|
||||
}
|
||||
await Key.deleteMany({
|
||||
receiver: deletedMembershipOrg.user,
|
||||
workspace: {
|
||||
$in: workspaces
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return deletedMembershipOrg;
|
||||
};
|
||||
|
@ -25,7 +25,7 @@ const sendMail = async ({
|
||||
recipients: string[];
|
||||
substitutions: any;
|
||||
}) => {
|
||||
if (getSmtpConfigured()) {
|
||||
if (await getSmtpConfigured()) {
|
||||
try {
|
||||
const html = fs.readFileSync(
|
||||
path.resolve(__dirname, '../templates/' + template),
|
||||
@ -35,7 +35,7 @@ const sendMail = async ({
|
||||
const htmlToSend = temp(substitutions);
|
||||
|
||||
await smtpTransporter.sendMail({
|
||||
from: `"${getSmtpFromName()}" <${getSmtpFromAddress()}>`,
|
||||
from: `"${await getSmtpFromName()}" <${await getSmtpFromAddress()}>`,
|
||||
to: recipients.join(', '),
|
||||
subject: subjectLine,
|
||||
html: htmlToSend
|
||||
|
@ -1,39 +1,44 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import Stripe from 'stripe';
|
||||
import { Types } from 'mongoose';
|
||||
import Stripe from "stripe";
|
||||
import { Types } from "mongoose";
|
||||
import {
|
||||
IUser,
|
||||
User,
|
||||
IServiceAccount,
|
||||
ServiceAccount,
|
||||
IServiceTokenData,
|
||||
ServiceTokenData
|
||||
} from '../models';
|
||||
import { Organization, MembershipOrg } from '../models';
|
||||
import {
|
||||
ACCEPTED,
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY,
|
||||
OWNER
|
||||
} from '../variables';
|
||||
import {
|
||||
getStripeSecretKey,
|
||||
getStripeProductPro,
|
||||
getStripeProductTeam,
|
||||
getStripeProductStarter
|
||||
IUser,
|
||||
User,
|
||||
IServiceAccount,
|
||||
ServiceAccount,
|
||||
IServiceTokenData,
|
||||
ServiceTokenData,
|
||||
} from "../models";
|
||||
import { Organization, MembershipOrg } from "../models";
|
||||
import {
|
||||
ACCEPTED,
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY,
|
||||
OWNER,
|
||||
} from "../variables";
|
||||
import {
|
||||
getStripeSecretKey,
|
||||
getStripeProductPro,
|
||||
getStripeProductTeam,
|
||||
getStripeProductStarter,
|
||||
} from "../config";
|
||||
import {
|
||||
UnauthorizedRequestError,
|
||||
OrganizationNotFoundError,
|
||||
} from "../utils/errors";
|
||||
import { validateUserClientForOrganization } from "../helpers/user";
|
||||
import { validateServiceAccountClientForOrganization } from "../helpers/serviceAccount";
|
||||
import {
|
||||
EELicenseService
|
||||
} from '../ee/services';
|
||||
import {
|
||||
getLicenseServerUrl
|
||||
} from '../config';
|
||||
import {
|
||||
UnauthorizedRequestError,
|
||||
OrganizationNotFoundError
|
||||
} from '../utils/errors';
|
||||
import {
|
||||
validateUserClientForOrganization
|
||||
} from '../helpers/user';
|
||||
import {
|
||||
validateServiceAccountClientForOrganization
|
||||
} from '../helpers/serviceAccount';
|
||||
licenseServerKeyRequest,
|
||||
licenseKeyRequest
|
||||
} from '../config/request';
|
||||
|
||||
/**
|
||||
* Validate accepted clients for organization with id [organizationId]
|
||||
@ -42,69 +47,80 @@ import {
|
||||
* @param {Types.ObjectId} obj.organizationId - id of organization to validate against
|
||||
*/
|
||||
const validateClientForOrganization = async ({
|
||||
authData,
|
||||
organizationId,
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
authData,
|
||||
organizationId,
|
||||
acceptedRoles,
|
||||
acceptedStatuses,
|
||||
}: {
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
},
|
||||
organizationId: Types.ObjectId;
|
||||
acceptedRoles: Array<'owner' | 'admin' | 'member'>;
|
||||
acceptedStatuses: Array<'invited' | 'accepted'>;
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
};
|
||||
organizationId: Types.ObjectId;
|
||||
acceptedRoles: Array<"owner" | "admin" | "member">;
|
||||
acceptedStatuses: Array<"invited" | "accepted">;
|
||||
}) => {
|
||||
|
||||
const organization = await Organization.findById(organizationId);
|
||||
|
||||
if (!organization) {
|
||||
throw OrganizationNotFoundError({
|
||||
message: 'Failed to find organization'
|
||||
});
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_JWT && authData.authPayload instanceof User) {
|
||||
const membershipOrg = await validateUserClientForOrganization({
|
||||
user: authData.authPayload,
|
||||
organization,
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
});
|
||||
|
||||
return ({ organization, membershipOrg });
|
||||
}
|
||||
const organization = await Organization.findById(organizationId);
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_ACCOUNT && authData.authPayload instanceof ServiceAccount) {
|
||||
await validateServiceAccountClientForOrganization({
|
||||
serviceAccount: authData.authPayload,
|
||||
organization
|
||||
});
|
||||
|
||||
return ({ organization });
|
||||
}
|
||||
if (!organization) {
|
||||
throw OrganizationNotFoundError({
|
||||
message: "Failed to find organization",
|
||||
});
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_TOKEN && authData.authPayload instanceof ServiceTokenData) {
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'Failed service token authorization for organization'
|
||||
});
|
||||
}
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_JWT &&
|
||||
authData.authPayload instanceof User
|
||||
) {
|
||||
const membershipOrg = await validateUserClientForOrganization({
|
||||
user: authData.authPayload,
|
||||
organization,
|
||||
acceptedRoles,
|
||||
acceptedStatuses,
|
||||
});
|
||||
|
||||
if (authData.authMode === AUTH_MODE_API_KEY && authData.authPayload instanceof User) {
|
||||
const membershipOrg = await validateUserClientForOrganization({
|
||||
user: authData.authPayload,
|
||||
organization,
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
});
|
||||
|
||||
return ({ organization, membershipOrg });
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'Failed client authorization for organization'
|
||||
});
|
||||
}
|
||||
return { organization, membershipOrg };
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_SERVICE_ACCOUNT &&
|
||||
authData.authPayload instanceof ServiceAccount
|
||||
) {
|
||||
await validateServiceAccountClientForOrganization({
|
||||
serviceAccount: authData.authPayload,
|
||||
organization,
|
||||
});
|
||||
|
||||
return { organization };
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_SERVICE_TOKEN &&
|
||||
authData.authPayload instanceof ServiceTokenData
|
||||
) {
|
||||
throw UnauthorizedRequestError({
|
||||
message: "Failed service token authorization for organization",
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_API_KEY &&
|
||||
authData.authPayload instanceof User
|
||||
) {
|
||||
const membershipOrg = await validateUserClientForOrganization({
|
||||
user: authData.authPayload,
|
||||
organization,
|
||||
acceptedRoles,
|
||||
acceptedStatuses,
|
||||
});
|
||||
|
||||
return { organization, membershipOrg };
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: "Failed client authorization for organization",
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create an organization with name [name]
|
||||
@ -114,43 +130,37 @@ const validateClientForOrganization = async ({
|
||||
* @param {Object} organization - new organization
|
||||
*/
|
||||
const createOrganization = async ({
|
||||
name,
|
||||
email
|
||||
name,
|
||||
email,
|
||||
}: {
|
||||
name: string;
|
||||
email: string;
|
||||
name: string;
|
||||
email: string;
|
||||
}) => {
|
||||
let organization;
|
||||
try {
|
||||
// register stripe account
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
let organization;
|
||||
// register stripe account
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: "2022-08-01",
|
||||
});
|
||||
|
||||
if (getStripeSecretKey()) {
|
||||
const customer = await stripe.customers.create({
|
||||
email,
|
||||
description: name
|
||||
});
|
||||
if (await getStripeSecretKey()) {
|
||||
const customer = await stripe.customers.create({
|
||||
email,
|
||||
description: name,
|
||||
});
|
||||
|
||||
organization = await new Organization({
|
||||
name,
|
||||
customerId: customer.id
|
||||
}).save();
|
||||
} else {
|
||||
organization = await new Organization({
|
||||
name
|
||||
}).save();
|
||||
}
|
||||
organization = await new Organization({
|
||||
name,
|
||||
customerId: customer.id,
|
||||
}).save();
|
||||
} else {
|
||||
organization = await new Organization({
|
||||
name,
|
||||
}).save();
|
||||
}
|
||||
|
||||
await initSubscriptionOrg({ organizationId: organization._id });
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email });
|
||||
Sentry.captureException(err);
|
||||
throw new Error(`Failed to create organization [err=${err}]`);
|
||||
}
|
||||
await initSubscriptionOrg({ organizationId: organization._id });
|
||||
|
||||
return organization;
|
||||
return organization;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -162,57 +172,52 @@ const createOrganization = async ({
|
||||
* @return {Subscription} obj.subscription - new subscription
|
||||
*/
|
||||
const initSubscriptionOrg = async ({
|
||||
organizationId
|
||||
organizationId,
|
||||
}: {
|
||||
organizationId: Types.ObjectId;
|
||||
organizationId: Types.ObjectId;
|
||||
}) => {
|
||||
let stripeSubscription;
|
||||
let subscription;
|
||||
try {
|
||||
// find organization
|
||||
const organization = await Organization.findOne({
|
||||
_id: organizationId
|
||||
});
|
||||
let stripeSubscription;
|
||||
let subscription;
|
||||
|
||||
if (organization) {
|
||||
if (organization.customerId) {
|
||||
// initialize starter subscription with quantity of 0
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
// find organization
|
||||
const organization = await Organization.findOne({
|
||||
_id: organizationId,
|
||||
});
|
||||
|
||||
const productToPriceMap = {
|
||||
starter: getStripeProductStarter(),
|
||||
team: getStripeProductTeam(),
|
||||
pro: getStripeProductPro()
|
||||
};
|
||||
if (organization) {
|
||||
if (organization.customerId) {
|
||||
// initialize starter subscription with quantity of 0
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: "2022-08-01",
|
||||
});
|
||||
|
||||
stripeSubscription = await stripe.subscriptions.create({
|
||||
customer: organization.customerId,
|
||||
items: [
|
||||
{
|
||||
price: productToPriceMap['starter'],
|
||||
quantity: 1
|
||||
}
|
||||
],
|
||||
payment_behavior: 'default_incomplete',
|
||||
proration_behavior: 'none',
|
||||
expand: ['latest_invoice.payment_intent']
|
||||
});
|
||||
}
|
||||
} else {
|
||||
throw new Error('Failed to initialize free organization subscription');
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to initialize free organization subscription');
|
||||
}
|
||||
const productToPriceMap = {
|
||||
starter: await getStripeProductStarter(),
|
||||
team: await getStripeProductTeam(),
|
||||
pro: await getStripeProductPro(),
|
||||
};
|
||||
|
||||
return {
|
||||
stripeSubscription,
|
||||
subscription
|
||||
};
|
||||
stripeSubscription = await stripe.subscriptions.create({
|
||||
customer: organization.customerId,
|
||||
items: [
|
||||
{
|
||||
price: productToPriceMap["starter"],
|
||||
quantity: 1,
|
||||
},
|
||||
],
|
||||
payment_behavior: "default_incomplete",
|
||||
proration_behavior: "none",
|
||||
expand: ["latest_invoice.payment_intent"],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
throw new Error("Failed to initialize free organization subscription");
|
||||
}
|
||||
|
||||
return {
|
||||
stripeSubscription,
|
||||
subscription,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
@ -222,54 +227,54 @@ const initSubscriptionOrg = async ({
|
||||
* @param {Number} obj.organizationId - id of subscription's organization
|
||||
*/
|
||||
const updateSubscriptionOrgQuantity = async ({
|
||||
organizationId
|
||||
organizationId,
|
||||
}: {
|
||||
organizationId: string;
|
||||
organizationId: string;
|
||||
}) => {
|
||||
let stripeSubscription;
|
||||
try {
|
||||
// find organization
|
||||
const organization = await Organization.findOne({
|
||||
_id: organizationId
|
||||
});
|
||||
let stripeSubscription;
|
||||
// find organization
|
||||
const organization = await Organization.findOne({
|
||||
_id: organizationId,
|
||||
});
|
||||
|
||||
if (organization && organization.customerId) {
|
||||
const quantity = await MembershipOrg.countDocuments({
|
||||
organization: organizationId,
|
||||
status: ACCEPTED
|
||||
});
|
||||
if (organization && organization.customerId) {
|
||||
if (EELicenseService.instanceType === 'cloud') {
|
||||
// instance of Infisical is a cloud instance
|
||||
const quantity = await MembershipOrg.countDocuments({
|
||||
organization: new Types.ObjectId(organizationId),
|
||||
status: ACCEPTED,
|
||||
});
|
||||
|
||||
await licenseServerKeyRequest.patch(
|
||||
`${await getLicenseServerUrl()}/api/license-server/v1/customers/${organization.customerId}/cloud-plan`,
|
||||
{
|
||||
quantity
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
if (EELicenseService.instanceType === 'enterprise-self-hosted') {
|
||||
// instance of Infisical is an enterprise self-hosted instance
|
||||
|
||||
const usedSeats = await MembershipOrg.countDocuments({
|
||||
status: ACCEPTED
|
||||
});
|
||||
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
await licenseKeyRequest.patch(
|
||||
`${await getLicenseServerUrl()}/api/license/v1/license`,
|
||||
{
|
||||
usedSeats
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const subscription = (
|
||||
await stripe.subscriptions.list({
|
||||
customer: organization.customerId
|
||||
})
|
||||
).data[0];
|
||||
|
||||
stripeSubscription = await stripe.subscriptions.update(subscription.id, {
|
||||
items: [
|
||||
{
|
||||
id: subscription.items.data[0].id,
|
||||
price: subscription.items.data[0].price.id,
|
||||
quantity
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
}
|
||||
|
||||
return stripeSubscription;
|
||||
return stripeSubscription;
|
||||
};
|
||||
|
||||
export {
|
||||
validateClientForOrganization,
|
||||
createOrganization,
|
||||
initSubscriptionOrg,
|
||||
updateSubscriptionOrgQuantity
|
||||
};
|
||||
validateClientForOrganization,
|
||||
createOrganization,
|
||||
initSubscriptionOrg,
|
||||
updateSubscriptionOrgQuantity,
|
||||
};
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -233,27 +233,29 @@ const initSecretBlindIndexDataHelper = async () => {
|
||||
}
|
||||
});
|
||||
|
||||
const secretBlindIndexDataToInsert = workspaceIdsToBlindIndex.map((workspaceToBlindIndex) => {
|
||||
const salt = crypto.randomBytes(16).toString('base64');
|
||||
const secretBlindIndexDataToInsert = await Promise.all(
|
||||
workspaceIdsToBlindIndex.map(async (workspaceToBlindIndex) => {
|
||||
const salt = crypto.randomBytes(16).toString('base64');
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSaltCiphertext,
|
||||
iv: saltIV,
|
||||
tag: saltTag
|
||||
} = encryptSymmetric({
|
||||
plaintext: salt,
|
||||
key: getEncryptionKey()
|
||||
});
|
||||
const {
|
||||
ciphertext: encryptedSaltCiphertext,
|
||||
iv: saltIV,
|
||||
tag: saltTag
|
||||
} = encryptSymmetric({
|
||||
plaintext: salt,
|
||||
key: await getEncryptionKey()
|
||||
});
|
||||
|
||||
const secretBlindIndexData = new SecretBlindIndexData({
|
||||
workspace: workspaceToBlindIndex,
|
||||
encryptedSaltCiphertext,
|
||||
saltIV,
|
||||
saltTag
|
||||
const secretBlindIndexData = new SecretBlindIndexData({
|
||||
workspace: workspaceToBlindIndex,
|
||||
encryptedSaltCiphertext,
|
||||
saltIV,
|
||||
saltTag
|
||||
})
|
||||
|
||||
return secretBlindIndexData;
|
||||
})
|
||||
|
||||
return secretBlindIndexData;
|
||||
});
|
||||
);
|
||||
|
||||
if (secretBlindIndexDataToInsert.length > 0) {
|
||||
await SecretBlindIndexData.insertMany(secretBlindIndexDataToInsert);
|
||||
@ -280,7 +282,7 @@ const createSecretBlindIndexDataHelper = async ({
|
||||
tag: saltTag
|
||||
} = encryptSymmetric({
|
||||
plaintext: salt,
|
||||
key: getEncryptionKey()
|
||||
key: await getEncryptionKey()
|
||||
});
|
||||
|
||||
const secretBlindIndexData = await new SecretBlindIndexData({
|
||||
@ -316,7 +318,7 @@ const getSecretBlindIndexSaltHelper = async ({
|
||||
ciphertext: secretBlindIndexData.encryptedSaltCiphertext,
|
||||
iv: secretBlindIndexData.saltIV,
|
||||
tag: secretBlindIndexData.saltTag,
|
||||
key: getEncryptionKey()
|
||||
key: await getEncryptionKey()
|
||||
});
|
||||
|
||||
return salt;
|
||||
@ -378,7 +380,7 @@ const generateSecretBlindIndexHelper = async ({
|
||||
ciphertext: secretBlindIndexData.encryptedSaltCiphertext,
|
||||
iv: secretBlindIndexData.saltIV,
|
||||
tag: secretBlindIndexData.saltTag,
|
||||
key: getEncryptionKey()
|
||||
key: await getEncryptionKey()
|
||||
});
|
||||
|
||||
const secretBlindIndex = await generateSecretBlindIndexWithSaltHelper({
|
||||
@ -508,7 +510,7 @@ const createSecretHelper = async ({
|
||||
workspaceId
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
@ -578,7 +580,7 @@ const getSecretsHelper = async ({
|
||||
ipAddress: authData.authIP
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
@ -660,7 +662,7 @@ const getSecretHelper = async ({
|
||||
ipAddress: authData.authIP
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
@ -798,7 +800,7 @@ const updateSecretHelper = async ({
|
||||
workspaceId
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
@ -905,7 +907,7 @@ const deleteSecretHelper = async ({
|
||||
workspaceId
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
|
@ -1,16 +1,15 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { TokenData } from '../models';
|
||||
import crypto from 'crypto';
|
||||
import bcrypt from 'bcrypt';
|
||||
import { Types } from "mongoose";
|
||||
import { TokenData } from "../models";
|
||||
import crypto from "crypto";
|
||||
import bcrypt from "bcrypt";
|
||||
import {
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
} from '../variables';
|
||||
import { UnauthorizedRequestError } from '../utils/errors';
|
||||
import { getSaltRounds } from '../config';
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET,
|
||||
} from "../variables";
|
||||
import { UnauthorizedRequestError } from "../utils/errors";
|
||||
import { getSaltRounds } from "../config";
|
||||
|
||||
/**
|
||||
* Create and store a token in the database for purpose [type]
|
||||
@ -22,194 +21,197 @@ import { getSaltRounds } from '../config';
|
||||
* @returns {String} token - the created token
|
||||
*/
|
||||
const createTokenHelper = async ({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
type:
|
||||
| "emailConfirmation"
|
||||
| "emailMfa"
|
||||
| "organizationInvitation"
|
||||
| "passwordReset";
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
}) => {
|
||||
let token, expiresAt, triesLeft;
|
||||
// generate random token based on specified token use-case
|
||||
// type [type]
|
||||
switch (type) {
|
||||
case TOKEN_EMAIL_CONFIRMATION:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
expiresAt = new Date(new Date().getTime() + 86400000);
|
||||
break;
|
||||
case TOKEN_EMAIL_MFA:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
triesLeft = 5;
|
||||
expiresAt = new Date(new Date().getTime() + 300000);
|
||||
break;
|
||||
case TOKEN_EMAIL_ORG_INVITATION:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString("hex");
|
||||
expiresAt = new Date(new Date().getTime() + 259200000);
|
||||
break;
|
||||
case TOKEN_EMAIL_PASSWORD_RESET:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString("hex");
|
||||
expiresAt = new Date(new Date().getTime() + 86400000);
|
||||
break;
|
||||
default:
|
||||
token = crypto.randomBytes(16).toString("hex");
|
||||
expiresAt = new Date();
|
||||
break;
|
||||
}
|
||||
|
||||
interface TokenDataQuery {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId
|
||||
}) => {
|
||||
let token, expiresAt, triesLeft;
|
||||
try {
|
||||
// generate random token based on specified token use-case
|
||||
// type [type]
|
||||
switch (type) {
|
||||
case TOKEN_EMAIL_CONFIRMATION:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
expiresAt = new Date((new Date()).getTime() + 86400000);
|
||||
break;
|
||||
case TOKEN_EMAIL_MFA:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
triesLeft = 5;
|
||||
expiresAt = new Date((new Date()).getTime() + 300000);
|
||||
break;
|
||||
case TOKEN_EMAIL_ORG_INVITATION:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date((new Date()).getTime() + 259200000);
|
||||
break;
|
||||
case TOKEN_EMAIL_PASSWORD_RESET:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date((new Date()).getTime() + 86400000);
|
||||
break;
|
||||
default:
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date();
|
||||
break;
|
||||
}
|
||||
|
||||
interface TokenDataQuery {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
interface TokenDataUpdate {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
tokenHash: string;
|
||||
triesLeft?: number;
|
||||
expiresAt: Date;
|
||||
}
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
const query: TokenDataQuery = { type };
|
||||
const update: TokenDataUpdate = {
|
||||
type,
|
||||
tokenHash: await bcrypt.hash(token, getSaltRounds()),
|
||||
expiresAt
|
||||
}
|
||||
interface TokenDataUpdate {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
tokenHash: string;
|
||||
triesLeft?: number;
|
||||
expiresAt: Date;
|
||||
}
|
||||
|
||||
if (email) {
|
||||
query.email = email;
|
||||
update.email = email;
|
||||
}
|
||||
if (phoneNumber) {
|
||||
query.phoneNumber = phoneNumber;
|
||||
update.phoneNumber = phoneNumber;
|
||||
}
|
||||
if (organizationId) {
|
||||
query.organization = organizationId
|
||||
update.organization = organizationId
|
||||
}
|
||||
|
||||
if (triesLeft) {
|
||||
update.triesLeft = triesLeft;
|
||||
}
|
||||
|
||||
await TokenData.findOneAndUpdate(
|
||||
query,
|
||||
update,
|
||||
{
|
||||
new: true,
|
||||
upsert: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(
|
||||
"Failed to create token"
|
||||
);
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
const query: TokenDataQuery = { type };
|
||||
const update: TokenDataUpdate = {
|
||||
type,
|
||||
tokenHash: await bcrypt.hash(token, await getSaltRounds()),
|
||||
expiresAt,
|
||||
};
|
||||
|
||||
if (email) {
|
||||
query.email = email;
|
||||
update.email = email;
|
||||
}
|
||||
if (phoneNumber) {
|
||||
query.phoneNumber = phoneNumber;
|
||||
update.phoneNumber = phoneNumber;
|
||||
}
|
||||
if (organizationId) {
|
||||
query.organization = organizationId;
|
||||
update.organization = organizationId;
|
||||
}
|
||||
|
||||
if (triesLeft) {
|
||||
update.triesLeft = triesLeft;
|
||||
}
|
||||
|
||||
await TokenData.findOneAndUpdate(query, update, {
|
||||
new: true,
|
||||
upsert: true,
|
||||
});
|
||||
|
||||
return token;
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.email - email associated with the token
|
||||
* @param {String} obj.token - value of the token
|
||||
*/
|
||||
const validateTokenHelper = async ({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token,
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
type:
|
||||
| "emailConfirmation"
|
||||
| "emailMfa"
|
||||
| "organizationInvitation"
|
||||
| "passwordReset";
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
token: string;
|
||||
}) => {
|
||||
interface Query {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
token: string;
|
||||
}) => {
|
||||
interface Query {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
const query: Query = { type };
|
||||
const query: Query = { type };
|
||||
|
||||
if (email) { query.email = email; }
|
||||
if (phoneNumber) { query.phoneNumber = phoneNumber; }
|
||||
if (organizationId) { query.organization = organizationId; }
|
||||
if (email) {
|
||||
query.email = email;
|
||||
}
|
||||
if (phoneNumber) {
|
||||
query.phoneNumber = phoneNumber;
|
||||
}
|
||||
if (organizationId) {
|
||||
query.organization = organizationId;
|
||||
}
|
||||
|
||||
const tokenData = await TokenData.findOne(query).select('+tokenHash');
|
||||
|
||||
if (!tokenData) throw new Error('Failed to find token to validate');
|
||||
|
||||
if (tokenData.expiresAt < new Date()) {
|
||||
// case: token expired
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA session expired. Please log in again',
|
||||
context: {
|
||||
code: 'mfa_expired'
|
||||
}
|
||||
});
|
||||
}
|
||||
const tokenData = await TokenData.findOne(query).select("+tokenHash");
|
||||
|
||||
const isValid = await bcrypt.compare(token, tokenData.tokenHash);
|
||||
if (!isValid) {
|
||||
// case: token is not valid
|
||||
if (tokenData?.triesLeft !== undefined) {
|
||||
// case: token has a try-limit
|
||||
if (tokenData.triesLeft === 1) {
|
||||
// case: token is out of tries
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
} else {
|
||||
// case: token has more than 1 try left
|
||||
await TokenData.findByIdAndUpdate(tokenData._id, {
|
||||
triesLeft: tokenData.triesLeft - 1
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
}
|
||||
if (!tokenData) throw new Error("Failed to find token to validate");
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA code is invalid',
|
||||
context: {
|
||||
code: 'mfa_invalid',
|
||||
triesLeft: tokenData.triesLeft - 1
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA code is invalid',
|
||||
context: {
|
||||
code: 'mfa_invalid'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// case: token is valid
|
||||
if (tokenData.expiresAt < new Date()) {
|
||||
// case: token expired
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
}
|
||||
throw UnauthorizedRequestError({
|
||||
message: "MFA session expired. Please log in again",
|
||||
context: {
|
||||
code: "mfa_expired",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
createTokenHelper,
|
||||
validateTokenHelper
|
||||
}
|
||||
const isValid = await bcrypt.compare(token, tokenData.tokenHash);
|
||||
if (!isValid) {
|
||||
// case: token is not valid
|
||||
if (tokenData?.triesLeft !== undefined) {
|
||||
// case: token has a try-limit
|
||||
if (tokenData.triesLeft === 1) {
|
||||
// case: token is out of tries
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
} else {
|
||||
// case: token has more than 1 try left
|
||||
await TokenData.findByIdAndUpdate(
|
||||
tokenData._id,
|
||||
{
|
||||
triesLeft: tokenData.triesLeft - 1,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: "MFA code is invalid",
|
||||
context: {
|
||||
code: "mfa_invalid",
|
||||
triesLeft: tokenData.triesLeft - 1,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: "MFA code is invalid",
|
||||
context: {
|
||||
code: "mfa_invalid",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// case: token is valid
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
};
|
||||
|
||||
export { createTokenHelper, validateTokenHelper };
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
IUser,
|
||||
@ -28,16 +27,9 @@ import {
|
||||
* @returns {Object} user - the initialized user
|
||||
*/
|
||||
const setupAccount = async ({ email }: { email: string }) => {
|
||||
let user;
|
||||
try {
|
||||
user = await new User({
|
||||
email
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email });
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to set up account');
|
||||
}
|
||||
const user = await new User({
|
||||
email
|
||||
}).save();
|
||||
|
||||
return user;
|
||||
};
|
||||
@ -89,34 +81,27 @@ const completeAccount = async ({
|
||||
salt: string;
|
||||
verifier: string;
|
||||
}) => {
|
||||
let user;
|
||||
try {
|
||||
const options = {
|
||||
new: true
|
||||
};
|
||||
user = await User.findByIdAndUpdate(
|
||||
userId,
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
options
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to complete account set up');
|
||||
}
|
||||
const options = {
|
||||
new: true
|
||||
};
|
||||
const user = await User.findByIdAndUpdate(
|
||||
userId,
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
options
|
||||
);
|
||||
|
||||
return user;
|
||||
};
|
||||
|
@ -28,7 +28,6 @@ import {
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY
|
||||
} from '../variables';
|
||||
import { getEncryptionKey } from '../config';
|
||||
import { encryptSymmetric } from '../utils/crypto';
|
||||
import { SecretService } from '../services';
|
||||
|
||||
@ -90,7 +89,7 @@ const validateClientForWorkspace = async ({
|
||||
requiredPermissions
|
||||
});
|
||||
|
||||
return ({ membership });
|
||||
return ({ membership, workspace });
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_ACCOUNT && authData.authPayload instanceof ServiceAccount) {
|
||||
@ -124,7 +123,7 @@ const validateClientForWorkspace = async ({
|
||||
requiredPermissions
|
||||
});
|
||||
|
||||
return ({ membership });
|
||||
return ({ membership, workspace });
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
|
@ -1,12 +1,11 @@
|
||||
import mongoose from 'mongoose';
|
||||
import dotenv from 'dotenv';
|
||||
dotenv.config();
|
||||
import infisical from 'infisical-node';
|
||||
import express from 'express';
|
||||
import helmet from 'helmet';
|
||||
import cors from 'cors';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { DatabaseService } from './services';
|
||||
import { EELicenseService } from './ee/services';
|
||||
import { setUpHealthEndpoint } from './services/health';
|
||||
import { initSmtp } from './services/smtp';
|
||||
import { TelemetryService } from './services';
|
||||
@ -26,7 +25,8 @@ import {
|
||||
workspace as eeWorkspaceRouter,
|
||||
secret as eeSecretRouter,
|
||||
secretSnapshot as eeSecretSnapshotRouter,
|
||||
action as eeActionRouter
|
||||
action as eeActionRouter,
|
||||
organizations as eeOrganizationsRouter
|
||||
} from './ee/routes/v1';
|
||||
import {
|
||||
signup as v1SignupRouter,
|
||||
@ -45,7 +45,8 @@ import {
|
||||
password as v1PasswordRouter,
|
||||
stripe as v1StripeRouter,
|
||||
integration as v1IntegrationRouter,
|
||||
integrationAuth as v1IntegrationAuthRouter
|
||||
integrationAuth as v1IntegrationAuthRouter,
|
||||
secretsFolder as v1SecretsFolder
|
||||
} from './routes/v1';
|
||||
import {
|
||||
signup as v2SignupRouter,
|
||||
@ -74,27 +75,22 @@ import {
|
||||
getNodeEnv,
|
||||
getPort,
|
||||
getSentryDSN,
|
||||
getSiteURL,
|
||||
getSmtpHost
|
||||
getSiteURL
|
||||
} from './config';
|
||||
|
||||
const main = async () => {
|
||||
if (process.env.INFISICAL_TOKEN != "" || process.env.INFISICAL_TOKEN != undefined) {
|
||||
await infisical.connect({
|
||||
token: process.env.INFISICAL_TOKEN!
|
||||
});
|
||||
}
|
||||
|
||||
TelemetryService.logTelemetryMessage();
|
||||
setTransporter(initSmtp());
|
||||
setTransporter(await initSmtp());
|
||||
|
||||
await DatabaseService.initDatabase(getMongoURL());
|
||||
if (getNodeEnv() !== 'test') {
|
||||
await EELicenseService.initGlobalFeatureSet();
|
||||
|
||||
await DatabaseService.initDatabase(await getMongoURL());
|
||||
if ((await getNodeEnv()) !== 'test') {
|
||||
Sentry.init({
|
||||
dsn: getSentryDSN(),
|
||||
dsn: await getSentryDSN(),
|
||||
tracesSampleRate: 1.0,
|
||||
debug: getNodeEnv() === 'production' ? false : true,
|
||||
environment: getNodeEnv()
|
||||
debug: await getNodeEnv() === 'production' ? false : true,
|
||||
environment: await getNodeEnv()
|
||||
});
|
||||
}
|
||||
|
||||
@ -106,13 +102,13 @@ const main = async () => {
|
||||
app.use(
|
||||
cors({
|
||||
credentials: true,
|
||||
origin: getSiteURL()
|
||||
origin: await getSiteURL()
|
||||
})
|
||||
);
|
||||
|
||||
app.use(requestIp.mw());
|
||||
|
||||
if (getNodeEnv() === 'production') {
|
||||
if ((await getNodeEnv()) === 'production') {
|
||||
// enable app-wide rate-limiting + helmet security
|
||||
// in production
|
||||
app.disable('x-powered-by');
|
||||
@ -125,6 +121,7 @@ const main = async () => {
|
||||
app.use('/api/v1/secret-snapshot', eeSecretSnapshotRouter);
|
||||
app.use('/api/v1/workspace', eeWorkspaceRouter);
|
||||
app.use('/api/v1/action', eeActionRouter);
|
||||
app.use('/api/v1/organizations', eeOrganizationsRouter);
|
||||
|
||||
// v1 routes (default)
|
||||
app.use('/api/v1/signup', v1SignupRouter);
|
||||
@ -144,6 +141,7 @@ const main = async () => {
|
||||
app.use('/api/v1/stripe', v1StripeRouter);
|
||||
app.use('/api/v1/integration', v1IntegrationRouter);
|
||||
app.use('/api/v1/integration-auth', v1IntegrationAuthRouter);
|
||||
app.use('/api/v1/folder', v1SecretsFolder)
|
||||
|
||||
// v2 routes (improvements)
|
||||
app.use('/api/v2/signup', v2SignupRouter);
|
||||
@ -158,7 +156,7 @@ const main = async () => {
|
||||
app.use('/api/v2/service-token', v2ServiceTokenDataRouter); // TODO: turn into plural route
|
||||
app.use('/api/v2/service-accounts', v2ServiceAccountsRouter); // new
|
||||
app.use('/api/v2/api-key', v2APIKeyDataRouter);
|
||||
|
||||
|
||||
// v3 routes (experimental)
|
||||
app.use('/api/v3/secrets', v3SecretsRouter);
|
||||
app.use('/api/v3/workspaces', v3WorkspacesRouter);
|
||||
@ -177,8 +175,8 @@ const main = async () => {
|
||||
|
||||
app.use(requestErrorHandler)
|
||||
|
||||
const server = app.listen(getPort(), () => {
|
||||
getLogger("backend-main").info(`Server started listening at port ${getPort()}`)
|
||||
const server = app.listen(await getPort(), async () => {
|
||||
(await getLogger("backend-main")).info(`Server started listening at port ${await getPort()}`)
|
||||
});
|
||||
|
||||
await createTestUserForDevelopment();
|
||||
|
@ -1,7 +1,6 @@
|
||||
import * as Sentry from "@sentry/node";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import request from '../config/request';
|
||||
import { standardRequest } from "../config/request";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
@ -26,7 +25,7 @@ import {
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
INTEGRATION_SUPABASE_API_URL
|
||||
INTEGRATION_SUPABASE_API_URL,
|
||||
} from "../variables";
|
||||
|
||||
interface App {
|
||||
@ -47,87 +46,80 @@ interface App {
|
||||
const getApps = async ({
|
||||
integrationAuth,
|
||||
accessToken,
|
||||
teamId
|
||||
teamId,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
teamId?: string;
|
||||
}) => {
|
||||
|
||||
let apps: App[] = [];
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_PARAMETER_STORE:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_SECRET_MANAGER:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
apps = await getAppsHeroku({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
apps = await getAppsVercel({
|
||||
integrationAuth,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
apps = await getAppsNetlify({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
apps = await getAppsGithub({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
apps = await getAppsGitlab({
|
||||
accessToken,
|
||||
teamId
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RENDER:
|
||||
apps = await getAppsRender({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RAILWAY:
|
||||
apps = await getAppsRailway({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_FLYIO:
|
||||
apps = await getAppsFlyio({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CIRCLECI:
|
||||
apps = await getAppsCircleCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TRAVISCI:
|
||||
apps = await getAppsTravisCI({
|
||||
accessToken,
|
||||
})
|
||||
break;
|
||||
case INTEGRATION_SUPABASE:
|
||||
apps = await getAppsSupabase({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get integration apps");
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_PARAMETER_STORE:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_SECRET_MANAGER:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
apps = await getAppsHeroku({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
apps = await getAppsVercel({
|
||||
integrationAuth,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
apps = await getAppsNetlify({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
apps = await getAppsGithub({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
apps = await getAppsGitlab({
|
||||
accessToken,
|
||||
teamId,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RENDER:
|
||||
apps = await getAppsRender({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RAILWAY:
|
||||
apps = await getAppsRailway({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_FLYIO:
|
||||
apps = await getAppsFlyio({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CIRCLECI:
|
||||
apps = await getAppsCircleCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TRAVISCI:
|
||||
apps = await getAppsTravisCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_SUPABASE:
|
||||
apps = await getAppsSupabase({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -141,25 +133,18 @@ const getApps = async ({
|
||||
* @returns {String} apps.name - name of Heroku app
|
||||
*/
|
||||
const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
const res = (
|
||||
await standardRequest.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Heroku integration apps");
|
||||
}
|
||||
const apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -178,33 +163,26 @@ const getAppsVercel = async ({
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
},
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
).data;
|
||||
const res = (
|
||||
await standardRequest.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.projects.map((a: any) => ({
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Vercel integration apps");
|
||||
}
|
||||
const apps = res.projects.map((a: any) => ({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -218,43 +196,41 @@ const getAppsVercel = async ({
|
||||
*/
|
||||
const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
|
||||
const apps: any = [];
|
||||
try {
|
||||
let page = 1;
|
||||
const perPage = 10;
|
||||
let hasMorePages = true;
|
||||
|
||||
// paginate through all sites
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage)
|
||||
});
|
||||
let page = 1;
|
||||
const perPage = 10;
|
||||
let hasMorePages = true;
|
||||
|
||||
const { data } = await request.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
|
||||
// paginate through all sites
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage),
|
||||
filter: 'all'
|
||||
});
|
||||
|
||||
const { data } = await standardRequest.get(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.site_id
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
page++;
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.site_id,
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Netlify integration apps");
|
||||
|
||||
page++;
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -268,35 +244,58 @@ const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.name - name of Github site
|
||||
*/
|
||||
const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const octokit = new Octokit({
|
||||
auth: accessToken,
|
||||
});
|
||||
interface GitHubApp {
|
||||
id: string;
|
||||
name: string;
|
||||
permissions: {
|
||||
admin: boolean;
|
||||
};
|
||||
owner: {
|
||||
login: string;
|
||||
};
|
||||
}
|
||||
|
||||
const repos = (
|
||||
await octokit.request(
|
||||
const octokit = new Octokit({
|
||||
auth: accessToken,
|
||||
});
|
||||
|
||||
const getAllRepos = async () => {
|
||||
let repos: GitHubApp[] = [];
|
||||
let page = 1;
|
||||
const per_page = 100;
|
||||
let hasMore = true;
|
||||
|
||||
while (hasMore) {
|
||||
const response = await octokit.request(
|
||||
"GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}",
|
||||
{
|
||||
per_page: 100,
|
||||
per_page,
|
||||
page,
|
||||
}
|
||||
)
|
||||
).data;
|
||||
);
|
||||
|
||||
apps = repos
|
||||
.filter((a: any) => a.permissions.admin === true)
|
||||
.map((a: any) => {
|
||||
return ({
|
||||
appId: a.id,
|
||||
name: a.name,
|
||||
owner: a.owner.login,
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Github repos");
|
||||
}
|
||||
if (response.data.length > 0) {
|
||||
repos = repos.concat(response.data);
|
||||
page++;
|
||||
} else {
|
||||
hasMore = false;
|
||||
}
|
||||
}
|
||||
|
||||
return repos;
|
||||
};
|
||||
|
||||
const repos = await getAllRepos();
|
||||
|
||||
const apps = repos
|
||||
.filter((a: GitHubApp) => a.permissions.admin === true)
|
||||
.map((a: GitHubApp) => {
|
||||
return {
|
||||
appId: a.id,
|
||||
name: a.name,
|
||||
owner: a.owner.login,
|
||||
};
|
||||
});
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -310,29 +309,20 @@ const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.appId - id of Render service
|
||||
*/
|
||||
const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res
|
||||
.map((a: any) => ({
|
||||
name: a.service.name,
|
||||
appId: a.service.id
|
||||
}));
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Render services");
|
||||
}
|
||||
const res = (
|
||||
await standardRequest.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const apps = res.map((a: any) => ({
|
||||
name: a.service.name,
|
||||
appId: a.service.id,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -345,49 +335,51 @@ const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.name - name of Railway project
|
||||
* @returns {String} apps.appId - id of Railway project
|
||||
*
|
||||
*/
|
||||
*/
|
||||
const getAppsRailway = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any[] = [];
|
||||
try {
|
||||
const query = `
|
||||
query GetProjects($userId: String, $teamId: String) {
|
||||
projects(userId: $userId, teamId: $teamId) {
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
}
|
||||
const query = `
|
||||
query GetProjects($userId: String, $teamId: String) {
|
||||
projects(userId: $userId, teamId: $teamId) {
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
}
|
||||
`;
|
||||
|
||||
const variables = {};
|
||||
const variables = {};
|
||||
|
||||
const { data: { data: { projects: { edges }}} } = await request.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
const {
|
||||
data: {
|
||||
data: {
|
||||
projects: { edges },
|
||||
},
|
||||
},
|
||||
} = await standardRequest.post(
|
||||
INTEGRATION_RAILWAY_API_URL,
|
||||
{
|
||||
query,
|
||||
variables,
|
||||
}, {
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
apps = edges.map((e: any) => ({
|
||||
name: e.node.name,
|
||||
appId: e.node.id
|
||||
}));
|
||||
}
|
||||
);
|
||||
|
||||
const apps = edges.map((e: any) => ({
|
||||
name: e.node.name,
|
||||
appId: e.node.id,
|
||||
}));
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Railway services");
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of apps for Fly.io integration
|
||||
@ -397,41 +389,40 @@ const getAppsRailway = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.name - name of Fly.io apps
|
||||
*/
|
||||
const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const query = `
|
||||
query($role: String) {
|
||||
apps(type: "container", first: 400, role: $role) {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
hostname
|
||||
}
|
||||
const query = `
|
||||
query($role: String) {
|
||||
apps(type: "container", first: 400, role: $role) {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
hostname
|
||||
}
|
||||
}
|
||||
`;
|
||||
}
|
||||
`;
|
||||
|
||||
const res = (await request.post(INTEGRATION_FLYIO_API_URL, {
|
||||
query,
|
||||
variables: {
|
||||
role: null,
|
||||
const res = (
|
||||
await standardRequest.post(
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
{
|
||||
query,
|
||||
variables: {
|
||||
role: null,
|
||||
},
|
||||
},
|
||||
}, {
|
||||
headers: {
|
||||
Authorization: "Bearer " + accessToken,
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
})).data.data.apps.nodes;
|
||||
{
|
||||
headers: {
|
||||
Authorization: "Bearer " + accessToken,
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data.data.apps.nodes;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Fly.io apps");
|
||||
}
|
||||
const apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -444,63 +435,43 @@ const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.name - name of CircleCI apps
|
||||
*/
|
||||
const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(
|
||||
`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data
|
||||
const res = (
|
||||
await standardRequest.get(`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`, {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.reponame,
|
||||
};
|
||||
});
|
||||
|
||||
apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.reponame
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get CircleCI projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
const getAppsTravisCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(
|
||||
`${INTEGRATION_TRAVISCI_API_URL}/repos`,
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `token ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
const res = (
|
||||
await standardRequest.get(`${INTEGRATION_TRAVISCI_API_URL}/repos`, {
|
||||
headers: {
|
||||
Authorization: `token ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.slug?.split("/")[1],
|
||||
appId: a?.id,
|
||||
};
|
||||
});
|
||||
|
||||
apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.slug?.split("/")[1],
|
||||
appId: a?.id,
|
||||
}
|
||||
});
|
||||
}catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get TravisCI projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of repositories for GitLab integration
|
||||
@ -509,112 +480,98 @@ const getAppsTravisCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {Object[]} apps - names of GitLab sites
|
||||
* @returns {String} apps.name - name of GitLab site
|
||||
*/
|
||||
const getAppsGitlab = async ({
|
||||
const getAppsGitlab = async ({
|
||||
accessToken,
|
||||
teamId
|
||||
teamId,
|
||||
}: {
|
||||
accessToken: string;
|
||||
teamId?: string;
|
||||
}) => {
|
||||
const apps: App[] = [];
|
||||
|
||||
|
||||
let page = 1;
|
||||
const perPage = 10;
|
||||
let hasMorePages = true;
|
||||
try {
|
||||
|
||||
if (teamId) {
|
||||
// case: fetch projects for group with id [teamId] in GitLab
|
||||
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage)
|
||||
});
|
||||
if (teamId) {
|
||||
// case: fetch projects for group with id [teamId] in GitLab
|
||||
|
||||
const { data } = (
|
||||
await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/groups/${teamId}/projects`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage),
|
||||
});
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
const { data } = await standardRequest.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/groups/${teamId}/projects`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
} else {
|
||||
// case: fetch projects for individual in GitLab
|
||||
|
||||
const { id } = (
|
||||
await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/user`,
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage)
|
||||
);
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
});
|
||||
});
|
||||
|
||||
const { data } = (
|
||||
await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/users/${id}/projects`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
|
||||
page++;
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get GitLab projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
} else {
|
||||
// case: fetch projects for individual in GitLab
|
||||
|
||||
const { id } = (
|
||||
await standardRequest.get(`${INTEGRATION_GITLAB_API_URL}/v4/user`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage),
|
||||
});
|
||||
|
||||
const { data } = await standardRequest.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/users/${id}/projects`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of projects for Supabase integration
|
||||
@ -624,30 +581,23 @@ const getAppsGitlab = async ({
|
||||
* @returns {String} apps.name - name of Supabase app
|
||||
*/
|
||||
const getAppsSupabase = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const { data } = await request.get(
|
||||
`${INTEGRATION_SUPABASE_API_URL}/v1/projects`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
}
|
||||
);
|
||||
const { data } = await standardRequest.get(
|
||||
`${INTEGRATION_SUPABASE_API_URL}/v1/projects`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const apps = data.map((a: any) => {
|
||||
return {
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
};
|
||||
});
|
||||
|
||||
apps = data.map((a: any) => {
|
||||
return {
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Supabase projects');
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import request from '../config/request';
|
||||
import { standardRequest } from "../config/request";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
@ -12,8 +11,8 @@ import {
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_GITLAB_TOKEN_URL
|
||||
} from '../variables';
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
} from "../variables";
|
||||
import {
|
||||
getSiteURL,
|
||||
getClientIdAzure,
|
||||
@ -26,8 +25,8 @@ import {
|
||||
getClientIdGitHub,
|
||||
getClientSecretGitHub,
|
||||
getClientIdGitLab,
|
||||
getClientSecretGitLab
|
||||
} from '../config';
|
||||
getClientSecretGitLab,
|
||||
} from "../config";
|
||||
|
||||
interface ExchangeCodeAzureResponse {
|
||||
token_type: string;
|
||||
@ -93,49 +92,43 @@ interface ExchangeCodeGitlabResponse {
|
||||
*/
|
||||
const exchangeCode = async ({
|
||||
integration,
|
||||
code
|
||||
code,
|
||||
}: {
|
||||
integration: string;
|
||||
code: string;
|
||||
}) => {
|
||||
let obj = {} as any;
|
||||
|
||||
try {
|
||||
switch (integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
obj = await exchangeCodeAzure({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
obj = await exchangeCodeHeroku({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
obj = await exchangeCodeVercel({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
obj = await exchangeCodeNetlify({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
obj = await exchangeCodeGithub({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
obj = await exchangeCodeGitlab({
|
||||
code
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange');
|
||||
switch (integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
obj = await exchangeCodeAzure({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
obj = await exchangeCodeHeroku({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
obj = await exchangeCodeVercel({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
obj = await exchangeCodeNetlify({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
obj = await exchangeCodeGithub({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
obj = await exchangeCodeGitlab({
|
||||
code,
|
||||
});
|
||||
}
|
||||
|
||||
return obj;
|
||||
@ -143,43 +136,33 @@ const exchangeCode = async ({
|
||||
|
||||
/**
|
||||
* Return [accessToken] for Azure OAuth2 code-token exchange
|
||||
* @param param0
|
||||
* @param param0
|
||||
*/
|
||||
const exchangeCodeAzure = async ({
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
}) => {
|
||||
const exchangeCodeAzure = async ({ code }: { code: string }) => {
|
||||
const accessExpiresAt = new Date();
|
||||
let res: ExchangeCodeAzureResponse;
|
||||
try {
|
||||
res = (await request.post(
|
||||
|
||||
const res: ExchangeCodeAzureResponse = (
|
||||
await standardRequest.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
scope: 'https://vault.azure.net/.default openid offline_access',
|
||||
client_id: getClientIdAzure(),
|
||||
client_secret: getClientSecretAzure(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/azure-key-vault/oauth2/callback`
|
||||
scope: "https://vault.azure.net/.default openid offline_access",
|
||||
client_id: await getClientIdAzure(),
|
||||
client_secret: await getClientSecretAzure(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/azure-key-vault/oauth2/callback`,
|
||||
} as any)
|
||||
)).data;
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Azure');
|
||||
}
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in);
|
||||
|
||||
return ({
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Heroku
|
||||
@ -191,38 +174,28 @@ const exchangeCodeAzure = async ({
|
||||
* @returns {String} obj2.refreshToken - refresh token for Heroku API
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeHeroku = async ({
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
}) => {
|
||||
let res: ExchangeCodeHerokuResponse;
|
||||
const exchangeCodeHeroku = async ({ code }: { code: string }) => {
|
||||
const accessExpiresAt = new Date();
|
||||
try {
|
||||
res = (await request.post(
|
||||
|
||||
const res: ExchangeCodeHerokuResponse = (
|
||||
await standardRequest.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
client_secret: getClientSecretHeroku()
|
||||
client_secret: await getClientSecretHeroku(),
|
||||
} as any)
|
||||
)).data;
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Heroku');
|
||||
}
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in);
|
||||
|
||||
return ({
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Vercel
|
||||
@ -235,30 +208,23 @@ const exchangeCodeHeroku = async ({
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeVercel = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeVercelResponse;
|
||||
try {
|
||||
res = (
|
||||
await request.post(
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
code: code,
|
||||
client_id: getClientIdVercel(),
|
||||
client_secret: getClientSecretVercel(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/vercel/oauth2/callback`
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(`Failed OAuth2 code-token exchange with Vercel [err=${err}]`);
|
||||
}
|
||||
const res: ExchangeCodeVercelResponse = (
|
||||
await standardRequest.post(
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
code: code,
|
||||
client_id: await getClientIdVercel(),
|
||||
client_secret: await getClientSecretVercel(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/vercel/oauth2/callback`,
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: null,
|
||||
accessExpiresAt: null,
|
||||
teamId: res.team_id
|
||||
teamId: res.team_id,
|
||||
};
|
||||
};
|
||||
|
||||
@ -273,47 +239,39 @@ const exchangeCodeVercel = async ({ code }: { code: string }) => {
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeNetlify = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeNetlifyResponse;
|
||||
let accountId;
|
||||
try {
|
||||
res = (
|
||||
await request.post(
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_id: getClientIdNetlify(),
|
||||
client_secret: getClientSecretNetlify(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/netlify/oauth2/callback`
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
const res: ExchangeCodeNetlifyResponse = (
|
||||
await standardRequest.post(
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
client_id: await getClientIdNetlify(),
|
||||
client_secret: await getClientSecretNetlify(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/netlify/oauth2/callback`,
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
|
||||
const res2 = await request.get('https://api.netlify.com/api/v1/sites', {
|
||||
const res2 = await standardRequest.get("https://api.netlify.com/api/v1/sites", {
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`,
|
||||
},
|
||||
});
|
||||
|
||||
const res3 = (
|
||||
await standardRequest.get("https://api.netlify.com/api/v1/accounts", {
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`
|
||||
}
|
||||
});
|
||||
Authorization: `Bearer ${res.access_token}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const res3 = (
|
||||
await request.get('https://api.netlify.com/api/v1/accounts', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`
|
||||
}
|
||||
})
|
||||
).data;
|
||||
|
||||
accountId = res3[0].id;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Netlify');
|
||||
}
|
||||
const accountId = res3[0].id;
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accountId
|
||||
accountId,
|
||||
};
|
||||
};
|
||||
|
||||
@ -328,33 +286,25 @@ const exchangeCodeNetlify = async ({ code }: { code: string }) => {
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeGithub = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeGithubResponse;
|
||||
try {
|
||||
res = (
|
||||
await request.get(INTEGRATION_GITHUB_TOKEN_URL, {
|
||||
params: {
|
||||
client_id: getClientIdGitHub(),
|
||||
client_secret: getClientSecretGitHub(),
|
||||
code: code,
|
||||
redirect_uri: `${getSiteURL()}/integrations/github/oauth2/callback`
|
||||
},
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
})
|
||||
).data;
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Github');
|
||||
}
|
||||
const res: ExchangeCodeGithubResponse = (
|
||||
await standardRequest.get(INTEGRATION_GITHUB_TOKEN_URL, {
|
||||
params: {
|
||||
client_id: await getClientIdGitHub(),
|
||||
client_secret: await getClientSecretGitHub(),
|
||||
code: code,
|
||||
redirect_uri: `${await getSiteURL()}/integrations/github/oauth2/callback`,
|
||||
},
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: null,
|
||||
accessExpiresAt: null
|
||||
accessExpiresAt: null,
|
||||
};
|
||||
};
|
||||
|
||||
@ -369,42 +319,32 @@ const exchangeCodeGithub = async ({ code }: { code: string }) => {
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeGitlab = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeGitlabResponse;
|
||||
const accessExpiresAt = new Date();
|
||||
|
||||
try {
|
||||
res = (
|
||||
await request.post(
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_id: getClientIdGitLab(),
|
||||
client_secret: getClientSecretGitLab(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/gitlab/oauth2/callback`
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
}
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Gitlab');
|
||||
}
|
||||
const res: ExchangeCodeGitlabResponse = (
|
||||
await standardRequest.post(
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
client_id: await getClientIdGitLab(),
|
||||
client_secret: await getClientSecretGitLab(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/gitlab/oauth2/callback`,
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in);
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
accessExpiresAt,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export { exchangeCode };
|
||||
|
@ -1,29 +1,24 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import request from '../config/request';
|
||||
import { standardRequest } from "../config/request";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import {
|
||||
IIntegrationAuth
|
||||
} from '../models';
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_GITLAB,
|
||||
} from '../variables';
|
||||
} from "../variables";
|
||||
import {
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_GITLAB_TOKEN_URL
|
||||
} from '../variables';
|
||||
import {
|
||||
IntegrationService
|
||||
} from '../services';
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
} from "../variables";
|
||||
import { IntegrationService } from "../services";
|
||||
import {
|
||||
getSiteURL,
|
||||
getClientIdAzure,
|
||||
getClientSecretAzure,
|
||||
getClientSecretHeroku,
|
||||
getClientIdGitLab,
|
||||
getClientSecretGitLab
|
||||
} from '../config';
|
||||
getClientSecretGitLab,
|
||||
} from "../config";
|
||||
|
||||
interface RefreshTokenAzureResponse {
|
||||
token_type: string;
|
||||
@ -60,60 +55,57 @@ interface RefreshTokenGitLabResponse {
|
||||
*/
|
||||
const exchangeRefresh = async ({
|
||||
integrationAuth,
|
||||
refreshToken
|
||||
refreshToken,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
|
||||
interface TokenDetails {
|
||||
accessToken: string;
|
||||
refreshToken: string;
|
||||
accessExpiresAt: Date;
|
||||
}
|
||||
|
||||
|
||||
let tokenDetails: TokenDetails;
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
tokenDetails = await exchangeRefreshAzure({
|
||||
refreshToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
tokenDetails = await exchangeRefreshHeroku({
|
||||
refreshToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
tokenDetails = await exchangeRefreshGitLab({
|
||||
refreshToken
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error('Failed to exchange token for incompatible integration');
|
||||
}
|
||||
|
||||
if (tokenDetails?.accessToken && tokenDetails?.refreshToken && tokenDetails?.accessExpiresAt) {
|
||||
await IntegrationService.setIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId: null,
|
||||
accessToken: tokenDetails.accessToken,
|
||||
accessExpiresAt: tokenDetails.accessExpiresAt
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
tokenDetails = await exchangeRefreshAzure({
|
||||
refreshToken,
|
||||
});
|
||||
|
||||
await IntegrationService.setIntegrationAuthRefresh({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
refreshToken: tokenDetails.refreshToken
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
tokenDetails = await exchangeRefreshHeroku({
|
||||
refreshToken,
|
||||
});
|
||||
}
|
||||
|
||||
return tokenDetails.accessToken;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get new OAuth2 access token');
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
tokenDetails = await exchangeRefreshGitLab({
|
||||
refreshToken,
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error("Failed to exchange token for incompatible integration");
|
||||
}
|
||||
|
||||
if (
|
||||
tokenDetails?.accessToken &&
|
||||
tokenDetails?.refreshToken &&
|
||||
tokenDetails?.accessExpiresAt
|
||||
) {
|
||||
await IntegrationService.setIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId: null,
|
||||
accessToken: tokenDetails.accessToken,
|
||||
accessExpiresAt: tokenDetails.accessExpiresAt,
|
||||
});
|
||||
|
||||
await IntegrationService.setIntegrationAuthRefresh({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
refreshToken: tokenDetails.refreshToken,
|
||||
});
|
||||
}
|
||||
|
||||
return tokenDetails.accessToken;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -124,38 +116,30 @@ const exchangeRefresh = async ({
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshAzure = async ({
|
||||
refreshToken
|
||||
refreshToken,
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const accessExpiresAt = new Date();
|
||||
const { data }: { data: RefreshTokenAzureResponse } = await request.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
client_id: getClientIdAzure(),
|
||||
scope: 'openid offline_access',
|
||||
refresh_token: refreshToken,
|
||||
grant_type: 'refresh_token',
|
||||
client_secret: getClientSecretAzure()
|
||||
} as any)
|
||||
);
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + data.expires_in
|
||||
);
|
||||
const accessExpiresAt = new Date();
|
||||
const { data }: { data: RefreshTokenAzureResponse } = await standardRequest.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
client_id: await getClientIdAzure(),
|
||||
scope: "openid offline_access",
|
||||
refresh_token: refreshToken,
|
||||
grant_type: "refresh_token",
|
||||
client_secret: await getClientSecretAzure(),
|
||||
} as any)
|
||||
);
|
||||
|
||||
return ({
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get refresh OAuth2 access token for Azure');
|
||||
}
|
||||
}
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
|
||||
return {
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||
@ -165,39 +149,31 @@ const exchangeRefreshAzure = async ({
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshHeroku = async ({
|
||||
refreshToken
|
||||
refreshToken,
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data
|
||||
}: {
|
||||
data: RefreshTokenHerokuResponse
|
||||
} = await request.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
client_secret: getClientSecretHeroku()
|
||||
} as any)
|
||||
);
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data,
|
||||
}: {
|
||||
data: RefreshTokenHerokuResponse;
|
||||
} = await standardRequest.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: refreshToken,
|
||||
client_secret: await getClientSecretHeroku(),
|
||||
} as any)
|
||||
);
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + data.expires_in
|
||||
);
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
|
||||
return ({
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to refresh OAuth2 access token for Heroku');
|
||||
}
|
||||
return {
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
@ -208,45 +184,38 @@ const exchangeRefreshHeroku = async ({
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshGitLab = async ({
|
||||
refreshToken
|
||||
refreshToken,
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data
|
||||
}: {
|
||||
data: RefreshTokenGitLabResponse
|
||||
} = await request.post(
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
client_id: getClientIdGitLab,
|
||||
client_secret: getClientSecretGitLab(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/gitlab/oauth2/callback`
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
}
|
||||
});
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data,
|
||||
}: {
|
||||
data: RefreshTokenGitLabResponse;
|
||||
} = await standardRequest.post(
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: refreshToken,
|
||||
client_id: await getClientIdGitLab(),
|
||||
client_secret: await getClientSecretGitLab(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/gitlab/oauth2/callback`,
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + data.expires_in
|
||||
);
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
|
||||
return ({
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to refresh OAuth2 access token for GitLab');
|
||||
}
|
||||
return {
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
export { exchangeRefresh };
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
IIntegrationAuth,
|
||||
IntegrationAuth,
|
||||
@ -22,34 +21,28 @@ const revokeAccess = async ({
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let deletedIntegrationAuth;
|
||||
try {
|
||||
// add any integration-specific revocation logic
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
break;
|
||||
}
|
||||
// add any integration-specific revocation logic
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
break;
|
||||
}
|
||||
|
||||
deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
_id: integrationAuth._id
|
||||
deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
_id: integrationAuth._id
|
||||
});
|
||||
|
||||
if (deletedIntegrationAuth) {
|
||||
await Integration.deleteMany({
|
||||
integrationAuth: deletedIntegrationAuth._id
|
||||
});
|
||||
|
||||
if (deletedIntegrationAuth) {
|
||||
await Integration.deleteMany({
|
||||
integrationAuth: deletedIntegrationAuth._id
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to delete integration authorization');
|
||||
}
|
||||
|
||||
return deletedIntegrationAuth;
|
||||
|
@ -37,8 +37,7 @@ import {
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
INTEGRATION_SUPABASE_API_URL
|
||||
} from "../variables";
|
||||
import request from '../config/request';
|
||||
import axios from "axios";
|
||||
import { standardRequest} from '../config/request';
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to [app] in integration named [integration]
|
||||
@ -215,7 +214,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
let result: GetAzureKeyVaultSecret[] = [];
|
||||
try {
|
||||
while (url) {
|
||||
const res = await request.get(url, {
|
||||
const res = await standardRequest.get(url, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
@ -242,7 +241,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
lastSlashIndex = getAzureKeyVaultSecret.id.lastIndexOf('/');
|
||||
}
|
||||
|
||||
const azureKeyVaultSecret = await request.get(`${getAzureKeyVaultSecret.id}?api-version=7.3`, {
|
||||
const azureKeyVaultSecret = await standardRequest.get(`${getAzureKeyVaultSecret.id}?api-version=7.3`, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`
|
||||
}
|
||||
@ -308,7 +307,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
while (!isSecretSet && maxTries > 0) {
|
||||
// try to set secret
|
||||
try {
|
||||
await request.put(
|
||||
await standardRequest.put(
|
||||
`${integration.app}/secrets/${key}?api-version=7.3`,
|
||||
{
|
||||
value
|
||||
@ -325,7 +324,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
} catch (err) {
|
||||
const error: any = err;
|
||||
if (error?.response?.data?.error?.innererror?.code === 'ObjectIsDeletedButRecoverable') {
|
||||
await request.post(
|
||||
await standardRequest.post(
|
||||
`${integration.app}/deletedsecrets/${key}/recover?api-version=7.3`, {},
|
||||
{
|
||||
headers: {
|
||||
@ -355,7 +354,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
|
||||
for await (const deleteSecret of deleteSecrets) {
|
||||
const { key } = deleteSecret;
|
||||
await request.delete(`${integration.app}/secrets/${key}?api-version=7.3`, {
|
||||
await standardRequest.delete(`${integration.app}/secrets/${key}?api-version=7.3`, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`
|
||||
}
|
||||
@ -568,7 +567,7 @@ const syncSecretsHeroku = async ({
|
||||
}) => {
|
||||
try {
|
||||
const herokuSecrets = (
|
||||
await request.get(
|
||||
await standardRequest.get(
|
||||
`${INTEGRATION_HEROKU_API_URL}/apps/${integration.app}/config-vars`,
|
||||
{
|
||||
headers: {
|
||||
@ -586,7 +585,7 @@ const syncSecretsHeroku = async ({
|
||||
}
|
||||
});
|
||||
|
||||
await request.patch(
|
||||
await standardRequest.patch(
|
||||
`${INTEGRATION_HEROKU_API_URL}/apps/${integration.app}/config-vars`,
|
||||
secrets,
|
||||
{
|
||||
@ -642,7 +641,7 @@ const syncSecretsVercel = async ({
|
||||
: {}),
|
||||
};
|
||||
|
||||
const vercelSecrets: VercelSecret[] = (await request.get(
|
||||
const vercelSecrets: VercelSecret[] = (await standardRequest.get(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env`,
|
||||
{
|
||||
params,
|
||||
@ -675,7 +674,7 @@ const syncSecretsVercel = async ({
|
||||
for await (const vercelSecret of vercelSecrets) {
|
||||
if (vercelSecret.type === 'encrypted') {
|
||||
// case: secret is encrypted -> need to decrypt
|
||||
const decryptedSecret = (await request.get(
|
||||
const decryptedSecret = (await standardRequest.get(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${vercelSecret.id}`,
|
||||
{
|
||||
params,
|
||||
@ -747,7 +746,7 @@ const syncSecretsVercel = async ({
|
||||
|
||||
// Sync/push new secrets
|
||||
if (newSecrets.length > 0) {
|
||||
await request.post(
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v10/projects/${integration.app}/env`,
|
||||
newSecrets,
|
||||
{
|
||||
@ -763,7 +762,7 @@ const syncSecretsVercel = async ({
|
||||
for await (const secret of updateSecrets) {
|
||||
if (secret.type !== 'sensitive') {
|
||||
const { id, ...updatedSecret } = secret;
|
||||
await request.patch(
|
||||
await standardRequest.patch(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
|
||||
updatedSecret,
|
||||
{
|
||||
@ -778,7 +777,7 @@ const syncSecretsVercel = async ({
|
||||
}
|
||||
|
||||
for await (const secret of deleteSecrets) {
|
||||
await request.delete(
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
|
||||
{
|
||||
params,
|
||||
@ -837,7 +836,7 @@ const syncSecretsNetlify = async ({
|
||||
});
|
||||
|
||||
const res = (
|
||||
await request.get(
|
||||
await standardRequest.get(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env`,
|
||||
{
|
||||
params: getParams,
|
||||
@ -951,7 +950,7 @@ const syncSecretsNetlify = async ({
|
||||
});
|
||||
|
||||
if (newSecrets.length > 0) {
|
||||
await request.post(
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env`,
|
||||
newSecrets,
|
||||
{
|
||||
@ -966,7 +965,7 @@ const syncSecretsNetlify = async ({
|
||||
|
||||
if (updateSecrets.length > 0) {
|
||||
updateSecrets.forEach(async (secret: NetlifySecret) => {
|
||||
await request.patch(
|
||||
await standardRequest.patch(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${secret.key}`,
|
||||
{
|
||||
context: secret.values[0].context,
|
||||
@ -985,7 +984,7 @@ const syncSecretsNetlify = async ({
|
||||
|
||||
if (deleteSecrets.length > 0) {
|
||||
deleteSecrets.forEach(async (key: string) => {
|
||||
await request.delete(
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${key}`,
|
||||
{
|
||||
params: syncParams,
|
||||
@ -1000,7 +999,7 @@ const syncSecretsNetlify = async ({
|
||||
|
||||
if (deleteSecretValues.length > 0) {
|
||||
deleteSecretValues.forEach(async (secret: NetlifySecret) => {
|
||||
await request.delete(
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${secret.key}/value/${secret.values[0].id}`,
|
||||
{
|
||||
params: syncParams,
|
||||
@ -1151,7 +1150,7 @@ const syncSecretsRender = async ({
|
||||
accessToken: string;
|
||||
}) => {
|
||||
try {
|
||||
await request.put(
|
||||
await standardRequest.put(
|
||||
`${INTEGRATION_RENDER_API_URL}/v1/services/${integration.appId}/env-vars`,
|
||||
Object.keys(secrets).map((key) => ({
|
||||
key,
|
||||
@ -1203,7 +1202,7 @@ const syncSecretsRailway = async ({
|
||||
variables: secrets
|
||||
};
|
||||
|
||||
await request.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
await standardRequest.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
query,
|
||||
variables: {
|
||||
input,
|
||||
@ -1261,7 +1260,7 @@ const syncSecretsFlyio = async ({
|
||||
}
|
||||
`;
|
||||
|
||||
await request.post(INTEGRATION_FLYIO_API_URL, {
|
||||
await standardRequest.post(INTEGRATION_FLYIO_API_URL, {
|
||||
query: SetSecrets,
|
||||
variables: {
|
||||
input: {
|
||||
@ -1296,7 +1295,7 @@ const syncSecretsFlyio = async ({
|
||||
}
|
||||
}`;
|
||||
|
||||
const getSecretsRes = (await request.post(INTEGRATION_FLYIO_API_URL, {
|
||||
const getSecretsRes = (await standardRequest.post(INTEGRATION_FLYIO_API_URL, {
|
||||
query: GetSecrets,
|
||||
variables: {
|
||||
appName: integration.app,
|
||||
@ -1332,7 +1331,7 @@ const syncSecretsFlyio = async ({
|
||||
}
|
||||
}`;
|
||||
|
||||
await request.post(INTEGRATION_FLYIO_API_URL, {
|
||||
await standardRequest.post(INTEGRATION_FLYIO_API_URL, {
|
||||
query: DeleteSecrets,
|
||||
variables: {
|
||||
input: {
|
||||
@ -1373,7 +1372,7 @@ const syncSecretsCircleCI = async ({
|
||||
}) => {
|
||||
try {
|
||||
const circleciOrganizationDetail = (
|
||||
await request.get(`${INTEGRATION_CIRCLECI_API_URL}/v2/me/collaborations`, {
|
||||
await standardRequest.get(`${INTEGRATION_CIRCLECI_API_URL}/v2/me/collaborations`, {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json",
|
||||
@ -1386,7 +1385,7 @@ const syncSecretsCircleCI = async ({
|
||||
// sync secrets to CircleCI
|
||||
Object.keys(secrets).forEach(
|
||||
async (key) =>
|
||||
await request.post(
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`,
|
||||
{
|
||||
name: key,
|
||||
@ -1403,7 +1402,7 @@ const syncSecretsCircleCI = async ({
|
||||
|
||||
// get secrets from CircleCI
|
||||
const getSecretsRes = (
|
||||
await request.get(
|
||||
await standardRequest.get(
|
||||
`${INTEGRATION_CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`,
|
||||
{
|
||||
headers: {
|
||||
@ -1417,7 +1416,7 @@ const syncSecretsCircleCI = async ({
|
||||
// delete secrets from CircleCI
|
||||
getSecretsRes.forEach(async (sec: any) => {
|
||||
if (!(sec.name in secrets)) {
|
||||
await request.delete(
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar/${sec.name}`,
|
||||
{
|
||||
headers: {
|
||||
@ -1454,7 +1453,7 @@ const syncSecretsTravisCI = async ({
|
||||
try {
|
||||
// get secrets from travis-ci
|
||||
const getSecretsRes = (
|
||||
await request.get(
|
||||
await standardRequest.get(
|
||||
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars?repository_id=${integration.appId}`,
|
||||
{
|
||||
headers: {
|
||||
@ -1476,7 +1475,7 @@ const syncSecretsTravisCI = async ({
|
||||
if (!(key in getSecretsRes)) {
|
||||
// case: secret does not exist in travis ci
|
||||
// -> add secret
|
||||
await request.post(
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars?repository_id=${integration.appId}`,
|
||||
{
|
||||
env_var: {
|
||||
@ -1495,7 +1494,7 @@ const syncSecretsTravisCI = async ({
|
||||
} else {
|
||||
// case: secret exists in travis ci
|
||||
// -> update/set secret
|
||||
await request.patch(
|
||||
await standardRequest.patch(
|
||||
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars/${getSecretsRes[key].id}?repository_id=${getSecretsRes[key].repository_id}`,
|
||||
{
|
||||
env_var: {
|
||||
@ -1517,7 +1516,7 @@ const syncSecretsTravisCI = async ({
|
||||
for await (const key of Object.keys(getSecretsRes)) {
|
||||
if (!(key in secrets)){
|
||||
// delete secret
|
||||
await request.delete(
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars/${getSecretsRes[key].id}?repository_id=${getSecretsRes[key].repository_id}`,
|
||||
{
|
||||
headers: {
|
||||
@ -1554,9 +1553,15 @@ const syncSecretsGitLab = async ({
|
||||
accessToken: string;
|
||||
}) => {
|
||||
try {
|
||||
interface GitLabSecret {
|
||||
key: string;
|
||||
value: string;
|
||||
environment_scope: string;
|
||||
}
|
||||
|
||||
// get secrets from gitlab
|
||||
const getSecretsRes = (
|
||||
await request.get(
|
||||
const getSecretsRes: GitLabSecret[] = (
|
||||
await standardRequest.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables`,
|
||||
{
|
||||
headers: {
|
||||
@ -1565,12 +1570,16 @@ const syncSecretsGitLab = async ({
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
)
|
||||
.data
|
||||
.filter((secret: GitLabSecret) =>
|
||||
secret.environment_scope === integration.targetEnvironment
|
||||
);
|
||||
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
const existingSecret = getSecretsRes.find((s: any) => s.key == key);
|
||||
if (!existingSecret) {
|
||||
await request.post(
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables`,
|
||||
{
|
||||
key: key,
|
||||
@ -1578,7 +1587,7 @@ const syncSecretsGitLab = async ({
|
||||
protected: false,
|
||||
masked: false,
|
||||
raw: false,
|
||||
environment_scope:'*'
|
||||
environment_scope: integration.targetEnvironment
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
@ -1589,29 +1598,31 @@ const syncSecretsGitLab = async ({
|
||||
}
|
||||
)
|
||||
} else {
|
||||
// udpate secret
|
||||
await request.put(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables/${existingSecret.key}`,
|
||||
{
|
||||
...existingSecret,
|
||||
value: secrets[existingSecret.key]
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
// update secret
|
||||
if (secrets[key] !== existingSecret.value) {
|
||||
await standardRequest.put(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables/${existingSecret.key}?filter[environment_scope]=${integration.targetEnvironment}`,
|
||||
{
|
||||
...existingSecret,
|
||||
value: secrets[existingSecret.key]
|
||||
},
|
||||
}
|
||||
)
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// delete secrets
|
||||
for await (const sec of getSecretsRes) {
|
||||
if (!(sec.key in secrets)) {
|
||||
await request.delete(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables/${sec.key}`,
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables/${sec.key}?filter[environment_scope]=${integration.targetEnvironment}`,
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
@ -1620,7 +1631,7 @@ const syncSecretsGitLab = async ({
|
||||
);
|
||||
}
|
||||
}
|
||||
}catch (err) {
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to sync secrets to GitLab");
|
||||
@ -1645,7 +1656,7 @@ const syncSecretsSupabase = async ({
|
||||
accessToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const { data: getSecretsRes } = await request.get(
|
||||
const { data: getSecretsRes } = await standardRequest.get(
|
||||
`${INTEGRATION_SUPABASE_API_URL}/v1/projects/${integration.appId}/secrets`,
|
||||
{
|
||||
headers: {
|
||||
@ -1665,7 +1676,7 @@ const syncSecretsSupabase = async ({
|
||||
}
|
||||
);
|
||||
|
||||
await request.post(
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_SUPABASE_API_URL}/v1/projects/${integration.appId}/secrets`,
|
||||
modifiedFormatForSecretInjection,
|
||||
{
|
||||
@ -1683,7 +1694,7 @@ const syncSecretsSupabase = async ({
|
||||
}
|
||||
});
|
||||
|
||||
await request.delete(
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_SUPABASE_API_URL}/v1/projects/${integration.appId}/secrets`,
|
||||
{
|
||||
headers: {
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from "@sentry/node";
|
||||
import {
|
||||
IIntegrationAuth
|
||||
} from '../models';
|
||||
@ -6,7 +5,7 @@ import {
|
||||
INTEGRATION_GITLAB,
|
||||
INTEGRATION_GITLAB_API_URL
|
||||
} from '../variables';
|
||||
import request from '../config/request';
|
||||
import { standardRequest } from '../config/request';
|
||||
|
||||
interface Team {
|
||||
name: string;
|
||||
@ -31,21 +30,15 @@ const getTeams = async ({
|
||||
}) => {
|
||||
|
||||
let teams: Team[] = [];
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_GITLAB:
|
||||
teams = await getTeamsGitLab({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get integration teams');
|
||||
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_GITLAB:
|
||||
teams = await getTeamsGitLab({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
return teams;
|
||||
}
|
||||
|
||||
@ -63,30 +56,24 @@ const getTeamsGitLab = async ({
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let teams: Team[] = [];
|
||||
try {
|
||||
const res = (await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/groups`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
const res = (await standardRequest.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/groups`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
)).data;
|
||||
|
||||
teams = res.map((t: any) => ({
|
||||
name: t.name,
|
||||
teamId: t.id
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get GitLab integration teams");
|
||||
}
|
||||
}
|
||||
)).data;
|
||||
|
||||
teams = res.map((t: any) => ({
|
||||
name: t.name,
|
||||
teamId: t.id
|
||||
}));
|
||||
|
||||
return teams;
|
||||
}
|
||||
|
||||
export {
|
||||
getTeams
|
||||
}
|
||||
}
|
||||
|
@ -5,9 +5,9 @@ import { getLogger } from "../utils/logger";
|
||||
import RequestError, { LogLevel } from "../utils/requestError";
|
||||
import { getNodeEnv } from '../config';
|
||||
|
||||
export const requestErrorHandler: ErrorRequestHandler = (error: RequestError | Error, req, res, next) => {
|
||||
export const requestErrorHandler: ErrorRequestHandler = async (error: RequestError | Error, req, res, next) => {
|
||||
if (res.headersSent) return next();
|
||||
if (getNodeEnv() !== "production") {
|
||||
if ((await getNodeEnv()) !== "production") {
|
||||
/* eslint-disable no-console */
|
||||
console.log(error)
|
||||
/* eslint-enable no-console */
|
||||
@ -15,8 +15,8 @@ export const requestErrorHandler: ErrorRequestHandler = (error: RequestError | E
|
||||
|
||||
//TODO: Find better way to type check for error. In current setting you need to cast type to get the functions and variables from RequestError
|
||||
if (!(error instanceof RequestError)) {
|
||||
error = InternalServerError({ context: { exception: error.message }, stack: error.stack })
|
||||
getLogger('backend-main').log((<RequestError>error).levelName.toLowerCase(), (<RequestError>error).message)
|
||||
error = InternalServerError({ context: { exception: error.message }, stack: error.stack });
|
||||
(await getLogger('backend-main')).log((<RequestError>error).levelName.toLowerCase(), (<RequestError>error).message)
|
||||
}
|
||||
|
||||
//* Set Sentry user identification if req.user is populated
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { IntegrationAuth, IWorkspace } from '../models';
|
||||
|
@ -26,7 +26,7 @@ const requireMfaAuth = async (
|
||||
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(AUTH_TOKEN_VALUE, getJwtMfaSecret())
|
||||
jwt.verify(AUTH_TOKEN_VALUE, await getJwtMfaSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
|
@ -33,7 +33,7 @@ const requireServiceTokenAuth = async (
|
||||
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(AUTH_TOKEN_VALUE, getJwtServiceSecret())
|
||||
jwt.verify(AUTH_TOKEN_VALUE, await getJwtServiceSecret())
|
||||
);
|
||||
|
||||
const serviceToken = await ServiceToken.findOne({
|
||||
|
@ -27,7 +27,7 @@ const requireSignupAuth = async (
|
||||
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(AUTH_TOKEN_VALUE, getJwtSignupSecret())
|
||||
jwt.verify(AUTH_TOKEN_VALUE, await getJwtSignupSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
|
@ -1,8 +1,6 @@
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { Types } from 'mongoose';
|
||||
import { validateMembership } from '../helpers/membership';
|
||||
import { validateClientForWorkspace } from '../helpers/workspace';
|
||||
import { UnauthorizedRequestError } from '../utils/errors';
|
||||
|
||||
type req = 'params' | 'body' | 'query';
|
||||
|
||||
@ -31,7 +29,7 @@ const requireWorkspaceAuth = ({
|
||||
const environment = locationEnvironment ? req[locationEnvironment]?.environment : undefined;
|
||||
|
||||
// validate clients
|
||||
const { membership } = await validateClientForWorkspace({
|
||||
const { membership, workspace } = await validateClientForWorkspace({
|
||||
authData: req.authData,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
@ -43,6 +41,10 @@ const requireWorkspaceAuth = ({
|
||||
if (membership) {
|
||||
req.membership = membership;
|
||||
}
|
||||
|
||||
if (workspace) {
|
||||
req.workspace = workspace;
|
||||
}
|
||||
|
||||
return next();
|
||||
};
|
||||
|
36
backend/src/models/folder.ts
Normal file
36
backend/src/models/folder.ts
Normal file
@ -0,0 +1,36 @@
|
||||
import { Schema, Types, model } from 'mongoose';
|
||||
|
||||
const folderSchema = new Schema({
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true,
|
||||
},
|
||||
environment: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
parent: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Folder',
|
||||
required: false, // optional for root folders
|
||||
},
|
||||
path: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
parentPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
}, {
|
||||
timestamps: true
|
||||
});
|
||||
|
||||
const Folder = model('Folder', folderSchema);
|
||||
|
||||
export default Folder;
|
@ -21,6 +21,7 @@ export interface IIntegration {
|
||||
workspace: Types.ObjectId;
|
||||
environment: string;
|
||||
isActive: boolean;
|
||||
url: string;
|
||||
app: string;
|
||||
appId: string;
|
||||
owner: string;
|
||||
@ -63,6 +64,11 @@ const integrationSchema = new Schema<IIntegration>(
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
url: {
|
||||
// for custom self-hosted integrations (e.g. self-hosted GitHub enterprise)
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
app: {
|
||||
// name of app in provider
|
||||
type: String,
|
||||
|
@ -3,6 +3,7 @@ import {
|
||||
SECRET_SHARED,
|
||||
SECRET_PERSONAL,
|
||||
} from '../variables';
|
||||
import { ROOT_FOLDER_PATH } from '../utils/folder';
|
||||
|
||||
export interface ISecret {
|
||||
_id: Types.ObjectId;
|
||||
@ -25,6 +26,8 @@ export interface ISecret {
|
||||
secretCommentTag?: string;
|
||||
secretCommentHash?: string;
|
||||
tags?: string[];
|
||||
path?: string;
|
||||
folder?: Types.ObjectId;
|
||||
}
|
||||
|
||||
const secretSchema = new Schema<ISecret>(
|
||||
@ -107,7 +110,18 @@ const secretSchema = new Schema<ISecret>(
|
||||
secretCommentHash: {
|
||||
type: String,
|
||||
required: false
|
||||
}
|
||||
},
|
||||
// the full path to the secret in relation to folders
|
||||
path: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: ROOT_FOLDER_PATH
|
||||
},
|
||||
folder: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Folder',
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
|
@ -5,11 +5,11 @@ const router = express.Router();
|
||||
|
||||
router.get(
|
||||
'/status',
|
||||
(req: Request, res: Response) => {
|
||||
async (req: Request, res: Response) => {
|
||||
res.status(200).json({
|
||||
date: new Date(),
|
||||
message: 'Ok',
|
||||
emailConfigured: getSmtpConfigured()
|
||||
emailConfigured: await getSmtpConfigured()
|
||||
})
|
||||
}
|
||||
);
|
||||
|
@ -15,6 +15,7 @@ import password from './password';
|
||||
import stripe from './stripe';
|
||||
import integration from './integration';
|
||||
import integrationAuth from './integrationAuth';
|
||||
import secretsFolder from './secretsFolder'
|
||||
|
||||
export {
|
||||
signup,
|
||||
@ -33,5 +34,6 @@ export {
|
||||
password,
|
||||
stripe,
|
||||
integration,
|
||||
integrationAuth
|
||||
integrationAuth,
|
||||
secretsFolder
|
||||
};
|
||||
|
@ -19,6 +19,7 @@ router.post(
|
||||
router.post(
|
||||
'/verify',
|
||||
body('email').exists().trim().notEmpty(),
|
||||
body('organizationId').exists().trim().notEmpty(),
|
||||
body('code').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
membershipOrgController.verifyUserToOrganization
|
||||
|
50
backend/src/routes/v1/secretsFolder.ts
Normal file
50
backend/src/routes/v1/secretsFolder.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import express, { Request, Response } from 'express';
|
||||
const router = express.Router();
|
||||
import {
|
||||
requireAuth,
|
||||
requireWorkspaceAuth,
|
||||
validateRequest
|
||||
} from '../../middleware';
|
||||
import { body, param } from 'express-validator';
|
||||
import { createFolder, deleteFolder, getFolderById } from '../../controllers/v1/secretsFolderController';
|
||||
import { ADMIN, MEMBER } from '../../variables';
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: 'body'
|
||||
}),
|
||||
body('workspaceId').exists(),
|
||||
body('environment').exists(),
|
||||
body('folderName').exists(),
|
||||
body('parentFolderId'),
|
||||
validateRequest,
|
||||
createFolder
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:folderId',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
param('folderId').exists(),
|
||||
validateRequest,
|
||||
deleteFolder
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:folderId',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
param('folderId').exists(),
|
||||
validateRequest,
|
||||
getFolderById
|
||||
);
|
||||
|
||||
|
||||
export default router;
|
@ -1,5 +1,3 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { getLogger } from '../utils/logger';
|
||||
import {
|
||||
initDatabaseHelper,
|
||||
closeDatabaseHelper
|
||||
|
@ -24,9 +24,9 @@ class Telemetry {
|
||||
/**
|
||||
* Logs telemetry enable/disable notice.
|
||||
*/
|
||||
static logTelemetryMessage = () => {
|
||||
if(!getTelemetryEnabled()){
|
||||
getLogger("backend-main").info([
|
||||
static logTelemetryMessage = async () => {
|
||||
if(!(await getTelemetryEnabled())){
|
||||
(await getLogger("backend-main")).info([
|
||||
"",
|
||||
"To improve, Infisical collects telemetry data about general usage.",
|
||||
"This helps us understand how the product is doing and guide our product development to create the best possible platform; it also helps us demonstrate growth as we support Infisical as open-source software.",
|
||||
@ -39,12 +39,12 @@ class Telemetry {
|
||||
* Return an instance of the PostHog client initialized.
|
||||
* @returns
|
||||
*/
|
||||
static getPostHogClient = () => {
|
||||
static getPostHogClient = async () => {
|
||||
let postHogClient: any;
|
||||
if (getNodeEnv() === 'production' && getTelemetryEnabled()) {
|
||||
if ((await getNodeEnv()) === 'production' && (await getTelemetryEnabled())) {
|
||||
// case: enable opt-out telemetry in production
|
||||
postHogClient = new PostHog(getPostHogProjectApiKey(), {
|
||||
host: getPostHogHost()
|
||||
postHogClient = new PostHog(await getPostHogProjectApiKey(), {
|
||||
host: await getPostHogHost()
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -3,8 +3,8 @@ import { createTerminus } from '@godaddy/terminus';
|
||||
import { getLogger } from '../utils/logger';
|
||||
|
||||
export const setUpHealthEndpoint = <T>(server: T) => {
|
||||
const onSignal = () => {
|
||||
getLogger('backend-main').info('Server is starting clean-up');
|
||||
const onSignal = async () => {
|
||||
(await getLogger('backend-main')).info('Server is starting clean-up');
|
||||
return Promise.all([
|
||||
new Promise((resolve) => {
|
||||
if (mongoose.connection && mongoose.connection.readyState == 1) {
|
||||
|
@ -3,7 +3,8 @@ import {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_GMAIL
|
||||
} from '../variables';
|
||||
import SMTPConnection from 'nodemailer/lib/smtp-connection';
|
||||
import * as Sentry from '@sentry/node';
|
||||
@ -15,21 +16,21 @@ import {
|
||||
getSmtpPort
|
||||
} from '../config';
|
||||
|
||||
export const initSmtp = () => {
|
||||
export const initSmtp = async () => {
|
||||
const mailOpts: SMTPConnection.Options = {
|
||||
host: getSmtpHost(),
|
||||
port: getSmtpPort()
|
||||
host: await getSmtpHost(),
|
||||
port: await getSmtpPort()
|
||||
};
|
||||
|
||||
if (getSmtpUsername() && getSmtpPassword()) {
|
||||
if ((await getSmtpUsername()) && (await getSmtpPassword())) {
|
||||
mailOpts.auth = {
|
||||
user: getSmtpUsername(),
|
||||
pass: getSmtpPassword()
|
||||
user: await getSmtpUsername(),
|
||||
pass: await getSmtpPassword()
|
||||
};
|
||||
}
|
||||
|
||||
if (getSmtpSecure() ? getSmtpSecure() : false) {
|
||||
switch (getSmtpHost()) {
|
||||
if ((await getSmtpSecure()) ? (await getSmtpSecure()) : false) {
|
||||
switch (await getSmtpHost()) {
|
||||
case SMTP_HOST_SENDGRID:
|
||||
mailOpts.requireTLS = true;
|
||||
break;
|
||||
@ -46,13 +47,19 @@ export const initSmtp = () => {
|
||||
}
|
||||
break;
|
||||
case SMTP_HOST_ZOHOMAIL:
|
||||
mailOpts.requireTLS = true;
|
||||
mailOpts.tls = {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
break;
|
||||
case SMTP_HOST_GMAIL:
|
||||
mailOpts.requireTLS = true;
|
||||
mailOpts.tls = {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (getSmtpHost().includes('amazonaws.com')) {
|
||||
if ((await getSmtpHost()).includes('amazonaws.com')) {
|
||||
mailOpts.tls = {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
@ -70,10 +77,10 @@ export const initSmtp = () => {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureMessage('SMTP - Successfully connected');
|
||||
})
|
||||
.catch((err) => {
|
||||
.catch(async (err) => {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(
|
||||
`SMTP - Failed to connect to ${getSmtpHost()}:${getSmtpPort()} \n\t${err}`
|
||||
`SMTP - Failed to connect to ${await getSmtpHost()}:${await getSmtpPort()} \n\t${err}`
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
<body>
|
||||
<h2>Join your organization on Infisical</h2>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}">Join now</a>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}&organization_id={{organizationId}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
</body>
|
||||
|
@ -19,7 +19,7 @@ export const testWorkspaceKeyId = "63cf48f0225e6955acec5eff"
|
||||
export const plainTextWorkspaceKey = "543fef8224813a46230b0a50a46c5fb2"
|
||||
|
||||
export const createTestUserForDevelopment = async () => {
|
||||
if (getNodeEnv() === "development" || getNodeEnv() === "test") {
|
||||
if ((await getNodeEnv()) === "development" || (await getNodeEnv()) === "test") {
|
||||
const testUser = {
|
||||
_id: testUserId,
|
||||
email: testUserEmail,
|
||||
|
@ -1,7 +1,6 @@
|
||||
import nacl from 'tweetnacl';
|
||||
import util from 'tweetnacl-util';
|
||||
import AesGCM from './aes-gcm';
|
||||
import * as Sentry from '@sentry/node';
|
||||
|
||||
/**
|
||||
* Return new base64, NaCl, public-private key pair.
|
||||
@ -38,20 +37,13 @@ const encryptAsymmetric = ({
|
||||
publicKey: string;
|
||||
privateKey: string;
|
||||
}) => {
|
||||
let nonce, ciphertext;
|
||||
try {
|
||||
nonce = nacl.randomBytes(24);
|
||||
ciphertext = nacl.box(
|
||||
util.decodeUTF8(plaintext),
|
||||
nonce,
|
||||
util.decodeBase64(publicKey),
|
||||
util.decodeBase64(privateKey)
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform asymmetric encryption');
|
||||
}
|
||||
const nonce = nacl.randomBytes(24);
|
||||
const ciphertext = nacl.box(
|
||||
util.decodeUTF8(plaintext),
|
||||
nonce,
|
||||
util.decodeBase64(publicKey),
|
||||
util.decodeBase64(privateKey)
|
||||
);
|
||||
|
||||
return {
|
||||
ciphertext: util.encodeBase64(ciphertext),
|
||||
@ -80,19 +72,12 @@ const decryptAsymmetric = ({
|
||||
publicKey: string;
|
||||
privateKey: string;
|
||||
}): string => {
|
||||
let plaintext: any;
|
||||
try {
|
||||
plaintext = nacl.box.open(
|
||||
util.decodeBase64(ciphertext),
|
||||
util.decodeBase64(nonce),
|
||||
util.decodeBase64(publicKey),
|
||||
util.decodeBase64(privateKey)
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform asymmetric decryption');
|
||||
}
|
||||
const plaintext: any = nacl.box.open(
|
||||
util.decodeBase64(ciphertext),
|
||||
util.decodeBase64(nonce),
|
||||
util.decodeBase64(publicKey),
|
||||
util.decodeBase64(privateKey)
|
||||
);
|
||||
|
||||
return util.encodeUTF8(plaintext);
|
||||
};
|
||||
@ -110,17 +95,8 @@ const encryptSymmetric = ({
|
||||
plaintext: string;
|
||||
key: string;
|
||||
}) => {
|
||||
let ciphertext, iv, tag;
|
||||
try {
|
||||
const obj = AesGCM.encrypt(plaintext, key);
|
||||
ciphertext = obj.ciphertext;
|
||||
iv = obj.iv;
|
||||
tag = obj.tag;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric encryption');
|
||||
}
|
||||
const obj = AesGCM.encrypt(plaintext, key);
|
||||
const { ciphertext, iv, tag } = obj;
|
||||
|
||||
return {
|
||||
ciphertext,
|
||||
@ -150,15 +126,7 @@ const decryptSymmetric = ({
|
||||
tag: string;
|
||||
key: string;
|
||||
}): string => {
|
||||
let plaintext;
|
||||
try {
|
||||
plaintext = AesGCM.decrypt(ciphertext, iv, tag, key);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric decryption');
|
||||
}
|
||||
|
||||
const plaintext = AesGCM.decrypt(ciphertext, iv, tag, key);
|
||||
return plaintext;
|
||||
};
|
||||
|
||||
|
87
backend/src/utils/folder.ts
Normal file
87
backend/src/utils/folder.ts
Normal file
@ -0,0 +1,87 @@
|
||||
import Folder from "../models/folder";
|
||||
|
||||
export const ROOT_FOLDER_PATH = "/"
|
||||
|
||||
export const getFolderPath = async (folderId: string) => {
|
||||
let currentFolder = await Folder.findById(folderId);
|
||||
const pathSegments = [];
|
||||
|
||||
while (currentFolder) {
|
||||
pathSegments.unshift(currentFolder.name);
|
||||
currentFolder = currentFolder.parent ? await Folder.findById(currentFolder.parent) : null;
|
||||
}
|
||||
|
||||
return '/' + pathSegments.join('/');
|
||||
};
|
||||
|
||||
/**
|
||||
Returns the folder ID associated with the specified secret path in the given workspace and environment.
|
||||
@param workspaceId - The ID of the workspace to search in.
|
||||
@param environment - The environment to search in.
|
||||
@param secretPath - The secret path to search for.
|
||||
@returns The folder ID associated with the specified secret path, or undefined if the path is at the root folder level.
|
||||
@throws Error if the specified secret path is not found.
|
||||
*/
|
||||
export const getFolderIdFromPath = async (workspaceId: string, environment: string, secretPath: string) => {
|
||||
const secretPathParts = secretPath.split("/").filter(path => path != "")
|
||||
if (secretPathParts.length <= 1) {
|
||||
return undefined // root folder, so no folder id
|
||||
}
|
||||
|
||||
const folderId = await Folder.find({ path: secretPath, workspace: workspaceId, environment: environment })
|
||||
if (!folderId) {
|
||||
throw Error("Secret path not found")
|
||||
}
|
||||
|
||||
return folderId
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up a path by removing empty parts, duplicate slashes,
|
||||
* and ensuring it starts with ROOT_FOLDER_PATH.
|
||||
* @param path - The input path to clean up.
|
||||
* @returns The cleaned-up path string.
|
||||
*/
|
||||
export const normalizePath = (path: string) => {
|
||||
if (path == undefined || path == "" || path == ROOT_FOLDER_PATH) {
|
||||
return ROOT_FOLDER_PATH
|
||||
}
|
||||
|
||||
const pathParts = path.split("/").filter(part => part != "")
|
||||
const cleanPathString = ROOT_FOLDER_PATH + pathParts.join("/")
|
||||
|
||||
return cleanPathString
|
||||
}
|
||||
|
||||
export const getFoldersInDirectory = async (workspaceId: string, environment: string, pathString: string) => {
|
||||
const normalizedPath = normalizePath(pathString)
|
||||
const foldersInDirectory = await Folder.find({
|
||||
workspace: workspaceId,
|
||||
environment: environment,
|
||||
parentPath: normalizedPath,
|
||||
});
|
||||
|
||||
return foldersInDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the parent path of the given path.
|
||||
* @param path - The input path.
|
||||
* @returns The parent path string.
|
||||
*/
|
||||
export const getParentPath = (path: string) => {
|
||||
const normalizedPath = normalizePath(path);
|
||||
const folderParts = normalizedPath.split('/').filter(part => part !== '');
|
||||
|
||||
let folderParent = ROOT_FOLDER_PATH;
|
||||
if (folderParts.length > 1) {
|
||||
folderParent = ROOT_FOLDER_PATH + folderParts.slice(0, folderParts.length - 1).join('/');
|
||||
}
|
||||
|
||||
return folderParent;
|
||||
}
|
||||
|
||||
export const validateFolderName = (folderName: string) => {
|
||||
const validNameRegex = /^[a-zA-Z0-9-_]+$/;
|
||||
return validNameRegex.test(folderName);
|
||||
}
|
@ -12,7 +12,7 @@ const logFormat = (prefix: string) => combine(
|
||||
printf((info) => `${info.timestamp} ${info.label} ${info.level}: ${info.message}`)
|
||||
);
|
||||
|
||||
const createLoggerWithLabel = (level: string, label: string) => {
|
||||
const createLoggerWithLabel = async (level: string, label: string) => {
|
||||
const _level = level.toLowerCase() || 'info'
|
||||
//* Always add Console output to transports
|
||||
const _transports: any[] = [
|
||||
@ -25,10 +25,10 @@ const createLoggerWithLabel = (level: string, label: string) => {
|
||||
})
|
||||
]
|
||||
//* Add LokiTransport if it's enabled
|
||||
if(getLokiHost() !== undefined){
|
||||
if((await getLokiHost()) !== undefined){
|
||||
_transports.push(
|
||||
new LokiTransport({
|
||||
host: getLokiHost(),
|
||||
host: await getLokiHost(),
|
||||
handleExceptions: true,
|
||||
handleRejections: true,
|
||||
batching: true,
|
||||
@ -40,7 +40,7 @@ const createLoggerWithLabel = (level: string, label: string) => {
|
||||
labels: {
|
||||
app: process.env.npm_package_name,
|
||||
version: process.env.npm_package_version,
|
||||
environment: getNodeEnv()
|
||||
environment: await getNodeEnv()
|
||||
},
|
||||
onConnectionError: (err: Error)=> console.error('Connection error while connecting to Loki Server.\n', err)
|
||||
})
|
||||
@ -58,12 +58,10 @@ const createLoggerWithLabel = (level: string, label: string) => {
|
||||
});
|
||||
}
|
||||
|
||||
const DEFAULT_LOGGERS = {
|
||||
"backend-main": createLoggerWithLabel('info', '[IFSC:backend-main]'),
|
||||
"database": createLoggerWithLabel('info', '[IFSC:database]'),
|
||||
}
|
||||
type LoggerNames = keyof typeof DEFAULT_LOGGERS
|
||||
|
||||
export const getLogger = (loggerName: LoggerNames) => {
|
||||
return DEFAULT_LOGGERS[loggerName]
|
||||
export const getLogger = async (loggerName: 'backend-main' | 'database') => {
|
||||
const logger = {
|
||||
"backend-main": await createLoggerWithLabel('info', '[IFSC:backend-main]'),
|
||||
"database": await createLoggerWithLabel('info', '[IFSC:database]'),
|
||||
}
|
||||
return logger[loggerName]
|
||||
}
|
||||
|
@ -81,13 +81,13 @@ export default class RequestError extends Error{
|
||||
return obj
|
||||
}
|
||||
|
||||
public format(req: Request){
|
||||
public async format(req: Request){
|
||||
let _context = Object.assign({
|
||||
stacktrace: this.stacktrace
|
||||
}, this.context)
|
||||
|
||||
//* Omit sensitive information from context that can leak internal workings of this program if user is not developer
|
||||
if(!getVerboseErrorOutput()){
|
||||
if(!(await getVerboseErrorOutput())){
|
||||
_context = this._omit(_context, [
|
||||
'stacktrace',
|
||||
'exception',
|
||||
|
@ -55,7 +55,8 @@ import {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_GMAIL
|
||||
} from './smtp';
|
||||
import { PLAN_STARTER, PLAN_PRO } from './stripe';
|
||||
import {
|
||||
@ -138,6 +139,7 @@ export {
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_GMAIL,
|
||||
PLAN_STARTER,
|
||||
PLAN_PRO,
|
||||
MFA_METHOD_EMAIL,
|
||||
|
@ -61,7 +61,7 @@ const INTEGRATION_CIRCLECI_API_URL = "https://circleci.com/api";
|
||||
const INTEGRATION_TRAVISCI_API_URL = "https://api.travis-ci.com";
|
||||
const INTEGRATION_SUPABASE_API_URL = 'https://api.supabase.com';
|
||||
|
||||
const getIntegrationOptions = () => {
|
||||
const getIntegrationOptions = async () => {
|
||||
const INTEGRATION_OPTIONS = [
|
||||
{
|
||||
name: 'Heroku',
|
||||
@ -69,7 +69,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'Heroku.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: getClientIdHeroku(),
|
||||
clientId: await getClientIdHeroku(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -79,7 +79,7 @@ const getIntegrationOptions = () => {
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: '',
|
||||
clientSlug: getClientSlugVercel(),
|
||||
clientSlug: await getClientSlugVercel(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -88,7 +88,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'Netlify.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: getClientIdNetlify(),
|
||||
clientId: await getClientIdNetlify(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -97,7 +97,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'GitHub.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: getClientIdGitHub(),
|
||||
clientId: await getClientIdGitHub(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -151,7 +151,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'Microsoft Azure.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: getClientIdAzure(),
|
||||
clientId: await getClientIdAzure(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -169,7 +169,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'GitLab.png',
|
||||
isAvailable: true,
|
||||
type: 'custom',
|
||||
clientId: getClientIdGitLab(),
|
||||
clientId: await getClientIdGitLab(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
|
@ -2,10 +2,12 @@ const SMTP_HOST_SENDGRID = 'smtp.sendgrid.net';
|
||||
const SMTP_HOST_MAILGUN = 'smtp.mailgun.org';
|
||||
const SMTP_HOST_SOCKETLABS = 'smtp.socketlabs.com';
|
||||
const SMTP_HOST_ZOHOMAIL = 'smtp.zoho.com';
|
||||
const SMTP_HOST_GMAIL = 'smtp.gmail.com';
|
||||
|
||||
export {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_GMAIL
|
||||
}
|
@ -1,408 +1,408 @@
|
||||
import request from 'supertest'
|
||||
import main from '../../../../src/index'
|
||||
import { testWorkspaceId } from '../../../../src/utils/addDevelopmentUser';
|
||||
import { deleteAllSecrets, getAllSecrets, getJWTFromTestUser, getServiceTokenFromTestUser } from '../../../helper/helper';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const batchSecretRequestWithNoOverride = require('../../../data/batch-secrets-no-override.json');
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const batchSecretRequestWithOverrides = require('../../../data/batch-secrets-with-overrides.json');
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const batchSecretRequestWithBadRequest = require('../../../data/batch-create-secrets-with-some-missing-params.json');
|
||||
|
||||
let server: any;
|
||||
beforeAll(async () => {
|
||||
server = await main;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
server.close();
|
||||
});
|
||||
|
||||
describe("GET /api/v2/secrets", () => {
|
||||
describe("Get secrets via JTW", () => {
|
||||
test("should create secrets and read secrets via jwt", async () => {
|
||||
try {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create creates
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithNoOverride
|
||||
})
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
|
||||
const getSecrets = await request(server)
|
||||
.get("/api/v2/secrets")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.query({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev"
|
||||
})
|
||||
|
||||
expect(getSecrets.statusCode).toBe(200)
|
||||
expect(getSecrets.body).toHaveProperty("secrets")
|
||||
expect(getSecrets.body.secrets).toHaveLength(3)
|
||||
expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
getSecrets.body.secrets.forEach((secret: any) => {
|
||||
expect(secret).toHaveProperty('_id');
|
||||
expect(secret._id).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('version');
|
||||
expect(secret.version).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('workspace');
|
||||
expect(secret.workspace).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('type');
|
||||
expect(secret.type).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('tags');
|
||||
expect(secret.tags).toHaveLength(0);
|
||||
|
||||
expect(secret).toHaveProperty('environment');
|
||||
expect(secret.environment).toEqual("dev");
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyIV');
|
||||
expect(secret.secretKeyIV).toBeTruthy();
|
||||
// import request from 'supertest'
|
||||
// import main from '../../../../src/index'
|
||||
// import { testWorkspaceId } from '../../../../src/utils/addDevelopmentUser';
|
||||
// import { deleteAllSecrets, getAllSecrets, getJWTFromTestUser, getServiceTokenFromTestUser } from '../../../helper/helper';
|
||||
// // eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// const batchSecretRequestWithNoOverride = require('../../../data/batch-secrets-no-override.json');
|
||||
// // eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// const batchSecretRequestWithOverrides = require('../../../data/batch-secrets-with-overrides.json');
|
||||
|
||||
// // eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// const batchSecretRequestWithBadRequest = require('../../../data/batch-create-secrets-with-some-missing-params.json');
|
||||
|
||||
// let server: any;
|
||||
// beforeAll(async () => {
|
||||
// server = await main;
|
||||
// });
|
||||
|
||||
// afterAll(async () => {
|
||||
// server.close();
|
||||
// });
|
||||
|
||||
// describe("GET /api/v2/secrets", () => {
|
||||
// describe("Get secrets via JTW", () => {
|
||||
// test("should create secrets and read secrets via jwt", async () => {
|
||||
// try {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create creates
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithNoOverride
|
||||
// })
|
||||
|
||||
// expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
|
||||
// const getSecrets = await request(server)
|
||||
// .get("/api/v2/secrets")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .query({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev"
|
||||
// })
|
||||
|
||||
// expect(getSecrets.statusCode).toBe(200)
|
||||
// expect(getSecrets.body).toHaveProperty("secrets")
|
||||
// expect(getSecrets.body.secrets).toHaveLength(3)
|
||||
// expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
// getSecrets.body.secrets.forEach((secret: any) => {
|
||||
// expect(secret).toHaveProperty('_id');
|
||||
// expect(secret._id).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('version');
|
||||
// expect(secret.version).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('workspace');
|
||||
// expect(secret.workspace).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('type');
|
||||
// expect(secret.type).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('tags');
|
||||
// expect(secret.tags).toHaveLength(0);
|
||||
|
||||
// expect(secret).toHaveProperty('environment');
|
||||
// expect(secret.environment).toEqual("dev");
|
||||
|
||||
// expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
// expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretKeyIV');
|
||||
// expect(secret.secretKeyIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyTag');
|
||||
expect(secret.secretKeyTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyTag');
|
||||
// expect(secret.secretKeyTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
// expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueIV');
|
||||
expect(secret.secretValueIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueIV');
|
||||
// expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueTag');
|
||||
expect(secret.secretValueTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueTag');
|
||||
// expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
// expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
// expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentIV');
|
||||
expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentTag');
|
||||
expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('createdAt');
|
||||
expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('updatedAt');
|
||||
expect(secret.updatedAt).toBeTruthy();
|
||||
});
|
||||
} finally {
|
||||
// clean up
|
||||
await deleteAllSecrets()
|
||||
}
|
||||
})
|
||||
|
||||
test("Get secrets via jwt when personal overrides exist", async () => {
|
||||
try {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create creates
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithOverrides
|
||||
})
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
const getSecrets = await request(server)
|
||||
.get("/api/v2/secrets")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.query({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev"
|
||||
})
|
||||
// expect(secret).toHaveProperty('secretCommentIV');
|
||||
// expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretCommentTag');
|
||||
// expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('createdAt');
|
||||
// expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('updatedAt');
|
||||
// expect(secret.updatedAt).toBeTruthy();
|
||||
// });
|
||||
// } finally {
|
||||
// // clean up
|
||||
// await deleteAllSecrets()
|
||||
// }
|
||||
// })
|
||||
|
||||
// test("Get secrets via jwt when personal overrides exist", async () => {
|
||||
// try {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create creates
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithOverrides
|
||||
// })
|
||||
|
||||
// expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
// const getSecrets = await request(server)
|
||||
// .get("/api/v2/secrets")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .query({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev"
|
||||
// })
|
||||
|
||||
expect(getSecrets.statusCode).toBe(200)
|
||||
expect(getSecrets.body).toHaveProperty("secrets")
|
||||
expect(getSecrets.body.secrets).toHaveLength(2)
|
||||
expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
getSecrets.body.secrets.forEach((secret: any) => {
|
||||
expect(secret).toHaveProperty('_id');
|
||||
expect(secret._id).toBeTruthy();
|
||||
// expect(getSecrets.statusCode).toBe(200)
|
||||
// expect(getSecrets.body).toHaveProperty("secrets")
|
||||
// expect(getSecrets.body.secrets).toHaveLength(2)
|
||||
// expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
// getSecrets.body.secrets.forEach((secret: any) => {
|
||||
// expect(secret).toHaveProperty('_id');
|
||||
// expect(secret._id).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('version');
|
||||
expect(secret.version).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('version');
|
||||
// expect(secret.version).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('workspace');
|
||||
expect(secret.workspace).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('workspace');
|
||||
// expect(secret.workspace).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('type');
|
||||
expect(secret.type).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('type');
|
||||
// expect(secret.type).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('tags');
|
||||
expect(secret.tags).toHaveLength(0);
|
||||
// expect(secret).toHaveProperty('tags');
|
||||
// expect(secret.tags).toHaveLength(0);
|
||||
|
||||
expect(secret).toHaveProperty('environment');
|
||||
expect(secret.environment).toEqual("dev");
|
||||
// expect(secret).toHaveProperty('environment');
|
||||
// expect(secret.environment).toEqual("dev");
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
// expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyIV');
|
||||
expect(secret.secretKeyIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyIV');
|
||||
// expect(secret.secretKeyIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyTag');
|
||||
expect(secret.secretKeyTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyTag');
|
||||
// expect(secret.secretKeyTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
// expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueIV');
|
||||
expect(secret.secretValueIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueIV');
|
||||
// expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueTag');
|
||||
expect(secret.secretValueTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueTag');
|
||||
// expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
// expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
// expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentIV');
|
||||
expect(secret.secretCommentIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretCommentIV');
|
||||
// expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentTag');
|
||||
expect(secret.secretCommentTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretCommentTag');
|
||||
// expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('createdAt');
|
||||
expect(secret.createdAt).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('createdAt');
|
||||
// expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('updatedAt');
|
||||
expect(secret.updatedAt).toBeTruthy();
|
||||
});
|
||||
} finally {
|
||||
// clean up
|
||||
await deleteAllSecrets()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetch secrets via service token", () => {
|
||||
test("Get secrets via jwt when personal overrides exist", async () => {
|
||||
try {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create creates
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithOverrides
|
||||
})
|
||||
// expect(secret).toHaveProperty('updatedAt');
|
||||
// expect(secret.updatedAt).toBeTruthy();
|
||||
// });
|
||||
// } finally {
|
||||
// // clean up
|
||||
// await deleteAllSecrets()
|
||||
// }
|
||||
// })
|
||||
// })
|
||||
|
||||
// describe("fetch secrets via service token", () => {
|
||||
// test("Get secrets via jwt when personal overrides exist", async () => {
|
||||
// try {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create creates
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithOverrides
|
||||
// })
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
// now use the service token to fetch secrets
|
||||
const serviceToken = await getServiceTokenFromTestUser()
|
||||
// expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
// // now use the service token to fetch secrets
|
||||
// const serviceToken = await getServiceTokenFromTestUser()
|
||||
|
||||
const getSecrets = await request(server)
|
||||
.get("/api/v2/secrets")
|
||||
.set('Authorization', `Bearer ${serviceToken}`)
|
||||
.query({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev"
|
||||
})
|
||||
|
||||
expect(getSecrets.statusCode).toBe(200)
|
||||
expect(getSecrets.body).toHaveProperty("secrets")
|
||||
expect(getSecrets.body.secrets).toHaveLength(2)
|
||||
expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
// const getSecrets = await request(server)
|
||||
// .get("/api/v2/secrets")
|
||||
// .set('Authorization', `Bearer ${serviceToken}`)
|
||||
// .query({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev"
|
||||
// })
|
||||
|
||||
// expect(getSecrets.statusCode).toBe(200)
|
||||
// expect(getSecrets.body).toHaveProperty("secrets")
|
||||
// expect(getSecrets.body.secrets).toHaveLength(2)
|
||||
// expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
getSecrets.body.secrets.forEach((secret: any) => {
|
||||
expect(secret).toHaveProperty('_id');
|
||||
expect(secret._id).toBeTruthy();
|
||||
// getSecrets.body.secrets.forEach((secret: any) => {
|
||||
// expect(secret).toHaveProperty('_id');
|
||||
// expect(secret._id).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('version');
|
||||
expect(secret.version).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('version');
|
||||
// expect(secret.version).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('workspace');
|
||||
expect(secret.workspace).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('workspace');
|
||||
// expect(secret.workspace).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('type');
|
||||
expect(secret.type).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('type');
|
||||
// expect(secret.type).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('tags');
|
||||
expect(secret.tags).toHaveLength(0);
|
||||
// expect(secret).toHaveProperty('tags');
|
||||
// expect(secret.tags).toHaveLength(0);
|
||||
|
||||
expect(secret).toHaveProperty('environment');
|
||||
expect(secret.environment).toEqual("dev");
|
||||
// expect(secret).toHaveProperty('environment');
|
||||
// expect(secret.environment).toEqual("dev");
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
// expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyIV');
|
||||
expect(secret.secretKeyIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyIV');
|
||||
// expect(secret.secretKeyIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyTag');
|
||||
expect(secret.secretKeyTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyTag');
|
||||
// expect(secret.secretKeyTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
// expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueIV');
|
||||
expect(secret.secretValueIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueIV');
|
||||
// expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueTag');
|
||||
expect(secret.secretValueTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueTag');
|
||||
// expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
// expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
// expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentIV');
|
||||
expect(secret.secretCommentIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretCommentIV');
|
||||
// expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentTag');
|
||||
expect(secret.secretCommentTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretCommentTag');
|
||||
// expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('createdAt');
|
||||
expect(secret.createdAt).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('createdAt');
|
||||
// expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('updatedAt');
|
||||
expect(secret.updatedAt).toBeTruthy();
|
||||
});
|
||||
} finally {
|
||||
// clean up
|
||||
await deleteAllSecrets()
|
||||
}
|
||||
})
|
||||
|
||||
test("should create secrets and read secrets via service token when no overrides", async () => {
|
||||
try {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create secrets
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithNoOverride
|
||||
})
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(200)
|
||||
// expect(secret).toHaveProperty('updatedAt');
|
||||
// expect(secret.updatedAt).toBeTruthy();
|
||||
// });
|
||||
// } finally {
|
||||
// // clean up
|
||||
// await deleteAllSecrets()
|
||||
// }
|
||||
// })
|
||||
|
||||
// test("should create secrets and read secrets via service token when no overrides", async () => {
|
||||
// try {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create secrets
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithNoOverride
|
||||
// })
|
||||
|
||||
// expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
|
||||
// now use the service token to fetch secrets
|
||||
const serviceToken = await getServiceTokenFromTestUser()
|
||||
// // now use the service token to fetch secrets
|
||||
// const serviceToken = await getServiceTokenFromTestUser()
|
||||
|
||||
const getSecrets = await request(server)
|
||||
.get("/api/v2/secrets")
|
||||
.set('Authorization', `Bearer ${serviceToken}`)
|
||||
.query({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev"
|
||||
})
|
||||
// const getSecrets = await request(server)
|
||||
// .get("/api/v2/secrets")
|
||||
// .set('Authorization', `Bearer ${serviceToken}`)
|
||||
// .query({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev"
|
||||
// })
|
||||
|
||||
expect(getSecrets.statusCode).toBe(200)
|
||||
expect(getSecrets.body).toHaveProperty("secrets")
|
||||
expect(getSecrets.body.secrets).toHaveLength(3)
|
||||
expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
// expect(getSecrets.statusCode).toBe(200)
|
||||
// expect(getSecrets.body).toHaveProperty("secrets")
|
||||
// expect(getSecrets.body.secrets).toHaveLength(3)
|
||||
// expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
getSecrets.body.secrets.forEach((secret: any) => {
|
||||
expect(secret).toHaveProperty('_id');
|
||||
expect(secret._id).toBeTruthy();
|
||||
// getSecrets.body.secrets.forEach((secret: any) => {
|
||||
// expect(secret).toHaveProperty('_id');
|
||||
// expect(secret._id).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('version');
|
||||
expect(secret.version).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('version');
|
||||
// expect(secret.version).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('workspace');
|
||||
expect(secret.workspace).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('workspace');
|
||||
// expect(secret.workspace).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('type');
|
||||
expect(secret.type).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('type');
|
||||
// expect(secret.type).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('tags');
|
||||
expect(secret.tags).toHaveLength(0);
|
||||
// expect(secret).toHaveProperty('tags');
|
||||
// expect(secret.tags).toHaveLength(0);
|
||||
|
||||
expect(secret).toHaveProperty('environment');
|
||||
expect(secret.environment).toEqual("dev");
|
||||
// expect(secret).toHaveProperty('environment');
|
||||
// expect(secret.environment).toEqual("dev");
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
// expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyIV');
|
||||
expect(secret.secretKeyIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyIV');
|
||||
// expect(secret.secretKeyIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyTag');
|
||||
expect(secret.secretKeyTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyTag');
|
||||
// expect(secret.secretKeyTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
// expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueIV');
|
||||
expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueTag');
|
||||
expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentIV');
|
||||
expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentTag');
|
||||
expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('createdAt');
|
||||
expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('updatedAt');
|
||||
expect(secret.updatedAt).toBeTruthy();
|
||||
});
|
||||
} finally {
|
||||
// clean up
|
||||
await deleteAllSecrets()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("create secrets via JWT", () => {
|
||||
test("Create secrets via jwt when some requests have missing required parameters", async () => {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create creates
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithBadRequest
|
||||
})
|
||||
|
||||
const allSecretsInDB = await getAllSecrets()
|
||||
// expect(secret).toHaveProperty('secretValueIV');
|
||||
// expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretValueTag');
|
||||
// expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
// expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretCommentIV');
|
||||
// expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretCommentTag');
|
||||
// expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('createdAt');
|
||||
// expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('updatedAt');
|
||||
// expect(secret.updatedAt).toBeTruthy();
|
||||
// });
|
||||
// } finally {
|
||||
// // clean up
|
||||
// await deleteAllSecrets()
|
||||
// }
|
||||
// })
|
||||
// })
|
||||
|
||||
// describe("create secrets via JWT", () => {
|
||||
// test("Create secrets via jwt when some requests have missing required parameters", async () => {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create creates
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithBadRequest
|
||||
// })
|
||||
|
||||
// const allSecretsInDB = await getAllSecrets()
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(500) // TODO should be set to 400
|
||||
expect(allSecretsInDB).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
// expect(createSecretsResponse.statusCode).toBe(500) // TODO should be set to 400
|
||||
// expect(allSecretsInDB).toHaveLength(0)
|
||||
// })
|
||||
// })
|
||||
// })
|
@ -28,14 +28,14 @@ describe('Crypto', () => {
|
||||
test('should throw error if publicKey is undefined', () => {
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('invalid encoding');
|
||||
});
|
||||
|
||||
test('should throw error if publicKey is empty string', () => {
|
||||
publicKey = '';
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('bad public key size');
|
||||
});
|
||||
});
|
||||
|
||||
@ -47,14 +47,14 @@ describe('Crypto', () => {
|
||||
test('should throw error if privateKey is undefined', () => {
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('invalid encoding');
|
||||
});
|
||||
|
||||
test('should throw error if privateKey is empty string', () => {
|
||||
privateKey = '';
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('bad secret key size');
|
||||
});
|
||||
});
|
||||
|
||||
@ -66,7 +66,7 @@ describe('Crypto', () => {
|
||||
test('should throw error if plaintext is undefined', () => {
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('expected string');
|
||||
});
|
||||
|
||||
test('should encrypt plaintext containing special characters', () => {
|
||||
@ -130,7 +130,7 @@ describe('Crypto', () => {
|
||||
publicKey,
|
||||
privateKey
|
||||
});
|
||||
}).toThrowError('Failed to perform asymmetric decryption');
|
||||
}).toThrowError('invalid encoding');
|
||||
});
|
||||
|
||||
test('should throw error if nonce is modified', () => {
|
||||
@ -149,7 +149,7 @@ describe('Crypto', () => {
|
||||
publicKey,
|
||||
privateKey
|
||||
});
|
||||
}).toThrowError('Failed to perform asymmetric decryption');
|
||||
}).toThrowError('invalid encoding');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -170,7 +170,7 @@ describe('Crypto', () => {
|
||||
const invalidKey = 'invalid-key';
|
||||
expect(() => {
|
||||
encryptSymmetric({ plaintext, key: invalidKey });
|
||||
}).toThrowError('Failed to perform symmetric encryption');
|
||||
}).toThrowError('Invalid key length');
|
||||
});
|
||||
|
||||
test('should throw an error when invalid key is provided', () => {
|
||||
@ -179,7 +179,7 @@ describe('Crypto', () => {
|
||||
|
||||
expect(() => {
|
||||
encryptSymmetric({ plaintext, key: invalidKey });
|
||||
}).toThrowError('Failed to perform symmetric encryption');
|
||||
}).toThrowError('Invalid key length');
|
||||
});
|
||||
});
|
||||
|
||||
@ -209,7 +209,7 @@ describe('Crypto', () => {
|
||||
tag,
|
||||
key
|
||||
});
|
||||
}).toThrowError('Failed to perform symmetric decryption');
|
||||
}).toThrowError('Unsupported state or unable to authenticate data');
|
||||
});
|
||||
|
||||
test('should fail if iv is modified', () => {
|
||||
@ -221,7 +221,7 @@ describe('Crypto', () => {
|
||||
tag,
|
||||
key
|
||||
});
|
||||
}).toThrowError('Failed to perform symmetric decryption');
|
||||
}).toThrowError('Unsupported state or unable to authenticate data');
|
||||
});
|
||||
|
||||
test('should fail if tag is modified', () => {
|
||||
@ -233,7 +233,7 @@ describe('Crypto', () => {
|
||||
tag: modifiedTag,
|
||||
key
|
||||
});
|
||||
}).toThrowError('Failed to perform symmetric decryption');
|
||||
}).toThrowError(/Invalid authentication tag length: \d+/);
|
||||
});
|
||||
|
||||
test('should throw an error when decryption fails', () => {
|
||||
@ -245,7 +245,7 @@ describe('Crypto', () => {
|
||||
tag,
|
||||
key: invalidKey
|
||||
});
|
||||
}).toThrowError('Failed to perform symmetric decryption');
|
||||
}).toThrowError('Invalid key length');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
3
cli/.infisicalignore
Normal file
3
cli/.infisicalignore
Normal file
@ -0,0 +1,3 @@
|
||||
bea0ff6e05a4de73a5db625d4ae181a015b50855:frontend/components/utilities/attemptLogin.js:stripe-access-token:147
|
||||
bea0ff6e05a4de73a5db625d4ae181a015b50855:backend/src/json/integrations.json:generic-api-key:5
|
||||
1961b92340e5d2613acae528b886c842427ce5d0:frontend/components/utilities/attemptLogin.js:stripe-access-token:148
|
85
cli/config/allowlist.go
Normal file
85
cli/config/allowlist.go
Normal file
@ -0,0 +1,85 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Allowlist allows a rule to be ignored for specific
|
||||
// regexes, paths, and/or commits
|
||||
type Allowlist struct {
|
||||
// Short human readable description of the allowlist.
|
||||
Description string
|
||||
|
||||
// Regexes is slice of content regular expressions that are allowed to be ignored.
|
||||
Regexes []*regexp.Regexp
|
||||
|
||||
// RegexTarget
|
||||
RegexTarget string
|
||||
|
||||
// Paths is a slice of path regular expressions that are allowed to be ignored.
|
||||
Paths []*regexp.Regexp
|
||||
|
||||
// Commits is a slice of commit SHAs that are allowed to be ignored.
|
||||
Commits []string
|
||||
|
||||
// StopWords is a slice of stop words that are allowed to be ignored.
|
||||
// This targets the _secret_, not the content of the regex match like the
|
||||
// Regexes slice.
|
||||
StopWords []string
|
||||
}
|
||||
|
||||
// CommitAllowed returns true if the commit is allowed to be ignored.
|
||||
func (a *Allowlist) CommitAllowed(c string) bool {
|
||||
if c == "" {
|
||||
return false
|
||||
}
|
||||
for _, commit := range a.Commits {
|
||||
if commit == c {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// PathAllowed returns true if the path is allowed to be ignored.
|
||||
func (a *Allowlist) PathAllowed(path string) bool {
|
||||
return anyRegexMatch(path, a.Paths)
|
||||
}
|
||||
|
||||
// RegexAllowed returns true if the regex is allowed to be ignored.
|
||||
func (a *Allowlist) RegexAllowed(s string) bool {
|
||||
return anyRegexMatch(s, a.Regexes)
|
||||
}
|
||||
|
||||
func (a *Allowlist) ContainsStopWord(s string) bool {
|
||||
s = strings.ToLower(s)
|
||||
for _, stopWord := range a.StopWords {
|
||||
if strings.Contains(s, strings.ToLower(stopWord)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
115
cli/config/allowlist_test.go
Normal file
115
cli/config/allowlist_test.go
Normal file
@ -0,0 +1,115 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestCommitAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
commit string
|
||||
commitAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitA"},
|
||||
},
|
||||
commit: "commitA",
|
||||
commitAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitB"},
|
||||
},
|
||||
commit: "commitA",
|
||||
commitAllowed: false,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitB"},
|
||||
},
|
||||
commit: "",
|
||||
commitAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.commitAllowed, tt.allowlist.CommitAllowed(tt.commit))
|
||||
}
|
||||
}
|
||||
|
||||
func TestRegexAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
secret string
|
||||
regexAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
|
||||
},
|
||||
secret: "a secret: matchthis, done",
|
||||
regexAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
|
||||
},
|
||||
secret: "a secret",
|
||||
regexAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.regexAllowed, tt.allowlist.RegexAllowed(tt.secret))
|
||||
}
|
||||
}
|
||||
|
||||
func TestPathAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
path string
|
||||
pathAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
|
||||
},
|
||||
path: "a path",
|
||||
pathAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
|
||||
},
|
||||
path: "a ???",
|
||||
pathAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.pathAllowed, tt.allowlist.PathAllowed(tt.path))
|
||||
}
|
||||
}
|
279
cli/config/config.go
Normal file
279
cli/config/config.go
Normal file
@ -0,0 +1,279 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
//go:embed infisical-scan.toml
|
||||
var DefaultConfig string
|
||||
|
||||
// use to keep track of how many configs we can extend
|
||||
// yea I know, globals bad
|
||||
var extendDepth int
|
||||
|
||||
const maxExtendDepth = 2
|
||||
|
||||
const DefaultScanConfigFileName = ".infisical-scan.toml"
|
||||
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
|
||||
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
|
||||
|
||||
// ViperConfig is the config struct used by the Viper config package
|
||||
// to parse the config file. This struct does not include regular expressions.
|
||||
// It is used as an intermediary to convert the Viper config to the Config struct.
|
||||
type ViperConfig struct {
|
||||
Description string
|
||||
Extend Extend
|
||||
Rules []struct {
|
||||
ID string
|
||||
Description string
|
||||
Entropy float64
|
||||
SecretGroup int
|
||||
Regex string
|
||||
Keywords []string
|
||||
Path string
|
||||
Tags []string
|
||||
|
||||
Allowlist struct {
|
||||
RegexTarget string
|
||||
Regexes []string
|
||||
Paths []string
|
||||
Commits []string
|
||||
StopWords []string
|
||||
}
|
||||
}
|
||||
Allowlist struct {
|
||||
RegexTarget string
|
||||
Regexes []string
|
||||
Paths []string
|
||||
Commits []string
|
||||
StopWords []string
|
||||
}
|
||||
}
|
||||
|
||||
// Config is a configuration struct that contains rules and an allowlist if present.
|
||||
type Config struct {
|
||||
Extend Extend
|
||||
Path string
|
||||
Description string
|
||||
Rules map[string]Rule
|
||||
Allowlist Allowlist
|
||||
Keywords []string
|
||||
|
||||
// used to keep sarif results consistent
|
||||
orderedRules []string
|
||||
}
|
||||
|
||||
// Extend is a struct that allows users to define how they want their
|
||||
// configuration extended by other configuration files.
|
||||
type Extend struct {
|
||||
Path string
|
||||
URL string
|
||||
UseDefault bool
|
||||
}
|
||||
|
||||
func (vc *ViperConfig) Translate() (Config, error) {
|
||||
var (
|
||||
keywords []string
|
||||
orderedRules []string
|
||||
)
|
||||
rulesMap := make(map[string]Rule)
|
||||
|
||||
for _, r := range vc.Rules {
|
||||
var allowlistRegexes []*regexp.Regexp
|
||||
for _, a := range r.Allowlist.Regexes {
|
||||
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
|
||||
}
|
||||
var allowlistPaths []*regexp.Regexp
|
||||
for _, a := range r.Allowlist.Paths {
|
||||
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
|
||||
}
|
||||
|
||||
if r.Keywords == nil {
|
||||
r.Keywords = []string{}
|
||||
} else {
|
||||
for _, k := range r.Keywords {
|
||||
keywords = append(keywords, strings.ToLower(k))
|
||||
}
|
||||
}
|
||||
|
||||
if r.Tags == nil {
|
||||
r.Tags = []string{}
|
||||
}
|
||||
|
||||
var configRegex *regexp.Regexp
|
||||
var configPathRegex *regexp.Regexp
|
||||
if r.Regex == "" {
|
||||
configRegex = nil
|
||||
} else {
|
||||
configRegex = regexp.MustCompile(r.Regex)
|
||||
}
|
||||
if r.Path == "" {
|
||||
configPathRegex = nil
|
||||
} else {
|
||||
configPathRegex = regexp.MustCompile(r.Path)
|
||||
}
|
||||
r := Rule{
|
||||
Description: r.Description,
|
||||
RuleID: r.ID,
|
||||
Regex: configRegex,
|
||||
Path: configPathRegex,
|
||||
SecretGroup: r.SecretGroup,
|
||||
Entropy: r.Entropy,
|
||||
Tags: r.Tags,
|
||||
Keywords: r.Keywords,
|
||||
Allowlist: Allowlist{
|
||||
RegexTarget: r.Allowlist.RegexTarget,
|
||||
Regexes: allowlistRegexes,
|
||||
Paths: allowlistPaths,
|
||||
Commits: r.Allowlist.Commits,
|
||||
StopWords: r.Allowlist.StopWords,
|
||||
},
|
||||
}
|
||||
orderedRules = append(orderedRules, r.RuleID)
|
||||
|
||||
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
|
||||
return Config{}, fmt.Errorf("%s invalid regex secret group %d, max regex secret group %d", r.Description, r.SecretGroup, r.Regex.NumSubexp())
|
||||
}
|
||||
rulesMap[r.RuleID] = r
|
||||
}
|
||||
var allowlistRegexes []*regexp.Regexp
|
||||
for _, a := range vc.Allowlist.Regexes {
|
||||
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
|
||||
}
|
||||
var allowlistPaths []*regexp.Regexp
|
||||
for _, a := range vc.Allowlist.Paths {
|
||||
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
|
||||
}
|
||||
c := Config{
|
||||
Description: vc.Description,
|
||||
Extend: vc.Extend,
|
||||
Rules: rulesMap,
|
||||
Allowlist: Allowlist{
|
||||
RegexTarget: vc.Allowlist.RegexTarget,
|
||||
Regexes: allowlistRegexes,
|
||||
Paths: allowlistPaths,
|
||||
Commits: vc.Allowlist.Commits,
|
||||
StopWords: vc.Allowlist.StopWords,
|
||||
},
|
||||
Keywords: keywords,
|
||||
orderedRules: orderedRules,
|
||||
}
|
||||
|
||||
if maxExtendDepth != extendDepth {
|
||||
// disallow both usedefault and path from being set
|
||||
if c.Extend.Path != "" && c.Extend.UseDefault {
|
||||
log.Fatal().Msg("unable to load config due to extend.path and extend.useDefault being set")
|
||||
}
|
||||
if c.Extend.UseDefault {
|
||||
c.extendDefault()
|
||||
} else if c.Extend.Path != "" {
|
||||
c.extendPath()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (c *Config) OrderedRules() []Rule {
|
||||
var orderedRules []Rule
|
||||
for _, id := range c.orderedRules {
|
||||
if _, ok := c.Rules[id]; ok {
|
||||
orderedRules = append(orderedRules, c.Rules[id])
|
||||
}
|
||||
}
|
||||
return orderedRules
|
||||
}
|
||||
|
||||
func (c *Config) extendDefault() {
|
||||
extendDepth++
|
||||
viper.SetConfigType("toml")
|
||||
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
defaultViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
cfg, err := defaultViperConfig.Translate()
|
||||
if err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
log.Debug().Msg("extending config with default config")
|
||||
c.extend(cfg)
|
||||
|
||||
}
|
||||
|
||||
func (c *Config) extendPath() {
|
||||
extendDepth++
|
||||
viper.SetConfigFile(c.Extend.Path)
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
extensionViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
cfg, err := extensionViperConfig.Translate()
|
||||
if err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
log.Debug().Msgf("extending config with %s", c.Extend.Path)
|
||||
c.extend(cfg)
|
||||
}
|
||||
|
||||
func (c *Config) extendURL() {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func (c *Config) extend(extensionConfig Config) {
|
||||
for ruleID, rule := range extensionConfig.Rules {
|
||||
if _, ok := c.Rules[ruleID]; !ok {
|
||||
log.Trace().Msgf("adding %s to base config", ruleID)
|
||||
c.Rules[ruleID] = rule
|
||||
c.Keywords = append(c.Keywords, rule.Keywords...)
|
||||
}
|
||||
}
|
||||
|
||||
// append allowlists, not attempting to merge
|
||||
c.Allowlist.Commits = append(c.Allowlist.Commits,
|
||||
extensionConfig.Allowlist.Commits...)
|
||||
c.Allowlist.Paths = append(c.Allowlist.Paths,
|
||||
extensionConfig.Allowlist.Paths...)
|
||||
c.Allowlist.Regexes = append(c.Allowlist.Regexes,
|
||||
extensionConfig.Allowlist.Regexes...)
|
||||
}
|
170
cli/config/config_test.go
Normal file
170
cli/config/config_test.go
Normal file
@ -0,0 +1,170 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const configPath = "../testdata/config/"
|
||||
|
||||
func TestTranslate(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
cfg Config
|
||||
wantError error
|
||||
}{
|
||||
{
|
||||
cfgName: "allow_aws_re",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{
|
||||
regexp.MustCompile("AKIALALEMEL33243OLIA"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_commit",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Commits: []string{"allowthiscommit"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_path",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{
|
||||
regexp.MustCompile(".go"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "entropy_group",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"discord-api-key": {
|
||||
Description: "Discord API key",
|
||||
Regex: regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
|
||||
RuleID: "discord-api-key",
|
||||
Allowlist: Allowlist{},
|
||||
Entropy: 3.5,
|
||||
SecretGroup: 3,
|
||||
Tags: []string{},
|
||||
Keywords: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "bad_entropy_group",
|
||||
cfg: Config{},
|
||||
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
|
||||
},
|
||||
{
|
||||
cfgName: "base",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{
|
||||
"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
},
|
||||
"aws-secret-key": {
|
||||
Description: "AWS Secret Key",
|
||||
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-secret-key",
|
||||
},
|
||||
"aws-secret-key-again": {
|
||||
Description: "AWS Secret Key",
|
||||
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-secret-key-again",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.Reset()
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName(tt.cfgName)
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if tt.wantError != nil {
|
||||
if err == nil {
|
||||
t.Errorf("expected error")
|
||||
}
|
||||
assert.Equal(t, tt.wantError, err)
|
||||
}
|
||||
|
||||
assert.Equal(t, cfg.Rules, tt.cfg.Rules)
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user