mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-29 13:26:20 +00:00
Compare commits
129 Commits
snyk-upgra
...
snyk-upgra
Author | SHA1 | Date | |
---|---|---|---|
b4f336a5bb | |||
43e61c94f0 | |||
69fa4a80c5 | |||
cf9e8b8a6b | |||
c6d5498a42 | |||
ad7972e7e1 | |||
c6d8f24968 | |||
d8ff0bef0d | |||
29b96246b9 | |||
8503c9355b | |||
ddf0a272f6 | |||
e3980f8666 | |||
d52534b185 | |||
db07a033e1 | |||
3c71bcaa8d | |||
476d0be101 | |||
2eff7b6128 | |||
d8a781af1f | |||
8b42f4f998 | |||
da127a3c0a | |||
d4aa75a182 | |||
d097003e9b | |||
b615a5084e | |||
379f086828 | |||
f11a7d0f87 | |||
f5aeb85c62 | |||
2966aa6eda | |||
b1f2515731 | |||
c5094ec37d | |||
6c745f617d | |||
82995fbd02 | |||
8d09a45454 | |||
38f578c4ae | |||
65b12eee5e | |||
9043db4727 | |||
0eceeb6aa9 | |||
2d2bbbd0ad | |||
c9b4e11539 | |||
fd4ea97e18 | |||
49d2ecc460 | |||
ca31a70032 | |||
3334338eaa | |||
6d5e281811 | |||
87d36ac47a | |||
b72e1198df | |||
837ea2ef40 | |||
b462ca3e89 | |||
f639f682c9 | |||
365fcb3044 | |||
01d9695153 | |||
21eb1815c4 | |||
85f3ae95b6 | |||
e888eed1bf | |||
addac63700 | |||
efd13e6b19 | |||
4ac74e6e9a | |||
1d422fa82c | |||
8ba3f8d1f7 | |||
6b83393952 | |||
da07d71e15 | |||
82d3971d9e | |||
3dd21374e7 | |||
c5fe41ae57 | |||
9f0313f50b | |||
a6e670e93a | |||
ec97e1a930 | |||
55ca6938db | |||
1401c7f6bc | |||
bb6d0fd7c6 | |||
689a20dca2 | |||
e4b4126971 | |||
04b04cba5c | |||
89e5f644a4 | |||
c5619d27d7 | |||
12a1d8e822 | |||
a85a7d1b00 | |||
fc2846534f | |||
2b605856a3 | |||
191582ef26 | |||
213b5d465b | |||
75f550caf2 | |||
daabf5ab70 | |||
7b11976a60 | |||
39be52c6b2 | |||
bced5d0151 | |||
939d7eb433 | |||
6de25174aa | |||
2aa79d4ad6 | |||
44b4de754a | |||
db0f0d0d9c | |||
3471e387ae | |||
aadd964409 | |||
102e45891c | |||
b9ae224aef | |||
e5cb0cbca3 | |||
330968c7af | |||
68e8e727cd | |||
3b94ee42e9 | |||
09286b4421 | |||
04a9604ba9 | |||
d86f88db92 | |||
fc53c094b7 | |||
6726ca1882 | |||
ddbe4d7040 | |||
3f6b0a9e66 | |||
c3a47597b6 | |||
a696a99232 | |||
8b1e64f75e | |||
f137087ef1 | |||
2157fab181 | |||
d2acab57e0 | |||
811929987b | |||
4ac13f61e0 | |||
3d2b0fa3fc | |||
242809ce26 | |||
492bf39243 | |||
dbfa4f5277 | |||
3fd2e22cbd | |||
150eb1f5ee | |||
6314a949f8 | |||
660c5806e3 | |||
c6d2828262 | |||
a5c5ec1f4d | |||
9e42a7a33e | |||
34c79b08bc | |||
aacdaf4556 | |||
a7484f8be5 | |||
e1bf31b371 | |||
3817831577 |
.env.exampleecosystem.config.js
.github/workflows
.goreleaser.yamlDockerfile.standalone-infisicalREADME.mdSECURITY.mdbackend
package-lock.jsonpackage.json
src
config
controllers
v1
authController.tsintegrationAuthController.tsmembershipController.tsmembershipOrgController.tsorganizationController.tspasswordController.tssecretController.tssecretsFolderController.tsserviceTokenController.tssignupController.tsstripeController.ts
v2
ee
helpers
auth.tsbot.tsdatabase.tsevent.tsintegration.tskey.tsmembershipOrg.tsnodemailer.tsorganization.tssecret.tssecrets.tstoken.tsuser.tsworkspace.ts
index.tsintegrations
middleware
requestErrorHandler.tsrequireIntegrationAuthorizationAuth.tsrequireMfaAuth.tsrequireServiceTokenAuth.tsrequireSignupAuth.ts
models
routes
services
templates
utils
variables
tests
cli/packages
docs
api-reference/overview
authentication.mdx
examples
cli
documentation
getting-started
guides
platform
getting-started
images
integrations
mint.jsonsdks
self-hosting
frontend
package-lock.jsonpackage.json
public/lotties
system-outline-109-slider-toggle-settings.jsonsystem-outline-168-view-headline.jsonsystem-outline-82-extension.jsonsystem-outline-90-lock-closed.jsonsystem-outline-96-groups.json
src
components
navigation
v2
Button
IconButton
Input
Menu
Popoverv2
hooks/api/secrets
layouts/AppLayout
pages
reactQuery.tsviews
DashboardPage
DashboardEnvOverview.tsxDashboardPage.tsxDashboardPage.utils.ts
components
EnvComparisonRow
SecretDetailDrawer
SecretDropzone
SecretInputRow
SecretTableHeader
Settings
OrgSettingsPage/components/OrgMembersTable
ProjectSettingsPage/components/CopyProjectIDSection
helm-charts/infisical/templates
i18n
README.de.mdREADME.en.mdREADME.es.mdREADME.hi.mdREADME.id.mdREADME.it.mdREADME.ja.mdREADME.ko.mdREADME.pt-br.mdREADME.tr.md
nginx
render.yaml
15
.env.example
15
.env.example
@ -1,5 +1,6 @@
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NOT BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
|
||||
# JWT
|
||||
@ -30,14 +31,12 @@ MONGO_PASSWORD=example
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_PORT=587
|
||||
SMTP_SECURE=false
|
||||
SMTP_FROM_ADDRESS=
|
||||
SMTP_FROM_NAME=Infisical
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_NAME=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Integration
|
||||
# Optional only if integration is used
|
||||
|
21
.github/workflows/docker-image.yml
vendored
21
.github/workflows/docker-image.yml
vendored
@ -1,12 +1,17 @@
|
||||
name: Build, Publish and Deploy to Gamma
|
||||
on: [workflow_dispatch]
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
@ -51,15 +56,19 @@ jobs:
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: backend
|
||||
tags: infisical/backend:${{ steps.commit.outputs.short }},
|
||||
tags: |
|
||||
infisical/backend:${{ steps.commit.outputs.short }}
|
||||
infisical/backend:latest
|
||||
infisical/backend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Save commit hashes for tag
|
||||
@ -100,8 +109,10 @@ jobs:
|
||||
push: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: frontend
|
||||
tags: infisical/frontend:${{ steps.commit.outputs.short }},
|
||||
tags: |
|
||||
infisical/frontend:${{ steps.commit.outputs.short }}
|
||||
infisical/frontend:latest
|
||||
infisical/frontend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
|
68
.github/workflows/release-standalone-docker-img.yml
vendored
Normal file
68
.github/workflows/release-standalone-docker-img.yml
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
name: Release standalone docker image
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- uses: paulhatch/semantic-version@v5.0.2
|
||||
id: version
|
||||
with:
|
||||
# The prefix to use to identify tags
|
||||
tag_prefix: "infisical-standalone/v"
|
||||
# A string which, if present in a git commit, indicates that a change represents a
|
||||
# major (breaking) change, supports regular expressions wrapped with '/'
|
||||
major_pattern: "(MAJOR)"
|
||||
# Same as above except indicating a minor change, supports regular expressions wrapped with '/'
|
||||
minor_pattern: "(MINOR)"
|
||||
# A string to determine the format of the version output
|
||||
version_format: "${major}.${minor}.${patch}-prerelease${increment}"
|
||||
# Optional path to check for changes. If any changes are detected in the path the
|
||||
# 'changed' output will true. Enter multiple paths separated by spaces.
|
||||
change_path: "backend,frontend"
|
||||
# Prevents pre-v1.0.0 version from automatically incrementing the major version.
|
||||
# If enabled, when the major version is 0, major releases will be treated as minor and minor as patch. Note that the version_type output is unchanged.
|
||||
enable_prerelease_mode: true
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
5
.github/workflows/release_build.yml
vendored
5
.github/workflows/release_build.yml
vendored
@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "v*"
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@ -41,13 +41,14 @@ jobs:
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser
|
||||
distribution: goreleaser-pro
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
|
@ -11,6 +11,10 @@ before:
|
||||
- ./cli/scripts/completions.sh
|
||||
- ./cli/scripts/manpages.sh
|
||||
|
||||
monorepo:
|
||||
tag_prefix: infisical-cli/
|
||||
dir: cli
|
||||
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
@ -61,10 +65,10 @@ archives:
|
||||
- goos: windows
|
||||
format: zip
|
||||
files:
|
||||
- README*
|
||||
- LICENSE*
|
||||
- manpages/*
|
||||
- completions/*
|
||||
- ../README*
|
||||
- ../LICENSE*
|
||||
- ../manpages/*
|
||||
- ../completions/*
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
@ -74,14 +78,7 @@ checksum:
|
||||
name_template: "checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}-devel"
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- "^docs:"
|
||||
- "^test:"
|
||||
name_template: "{{ .Version }}-devel"
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
@ -164,7 +161,7 @@ aurs:
|
||||
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
102
Dockerfile.standalone-infisical
Normal file
102
Dockerfile.standalone-infisical
Normal file
@ -0,0 +1,102 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
|
||||
FROM node:16-alpine AS frontend-dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM node:16-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM node:16-alpine AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 nextjs
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown nextjs:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
|
||||
COPY --chown=nextjs:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown nextjs:nodejs ./public/data
|
||||
COPY --from=frontend-builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER nextjs
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM node:16-alpine AS backend-build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:16-alpine AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
# Production stage
|
||||
FROM node:14-alpine AS production
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install PM2
|
||||
RUN npm install -g pm2
|
||||
# Copy ecosystem.config.js
|
||||
COPY ecosystem.config.js .
|
||||
|
||||
RUN apk add --no-cache nginx
|
||||
|
||||
COPY nginx/default-stand-alone-docker.conf /etc/nginx/nginx.conf
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app/ /app/
|
||||
|
||||
EXPOSE 80
|
||||
ENV HTTPS_ENABLED false
|
||||
|
||||
CMD ["pm2-runtime", "start", "ecosystem.config.js"]
|
||||
|
||||
|
356
README.md
356
README.md
File diff suppressed because one or more lines are too long
10
SECURITY.md
10
SECURITY.md
@ -1,9 +1,13 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
## Supported versions
|
||||
|
||||
We always recommend using the latest version of Infisical to ensure you get all security updates.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
## Reporting vulnerabilities
|
||||
|
||||
Please report security vulnerabilities or concerns to team@infisical.com.
|
||||
Please do not file GitHub issues or post on our public forum for security vulnerabilities, as they are public!
|
||||
|
||||
Infisical takes security issues very seriously. If you have any concerns about Infisical or believe you have uncovered a vulnerability, please get in touch via the e-mail address security@infisical.com. In the message, try to provide a description of the issue and ideally a way of reproducing it. The security team will get back to you as soon as possible.
|
||||
|
||||
Note that this security address should be used only for undisclosed vulnerabilities. Please report any security problems to us before disclosing it publicly.
|
2016
backend/package-lock.json
generated
2016
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,20 +1,19 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-secrets-manager": "^3.306.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.312.0",
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.45.0",
|
||||
"@sentry/tracing": "^7.46.0",
|
||||
"@sentry/node": "^7.41.0",
|
||||
"@sentry/tracing": "^7.48.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/libsodium-wrappers": "^0.7.10",
|
||||
"argon2": "^0.30.3",
|
||||
"await-to-js": "^3.0.0",
|
||||
"aws-sdk": "^2.1338.0",
|
||||
"axios": "^1.1.3",
|
||||
"aws-sdk": "^2.1360.0",
|
||||
"axios": "^1.3.5",
|
||||
"axios-retry": "^3.4.0",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"bigint-conversion": "^2.4.0",
|
||||
"builder-pattern": "^2.2.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
@ -25,13 +24,13 @@
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"infisical-node": "^1.0.37",
|
||||
"infisical-node": "^1.1.3",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jsrp": "^0.2.4",
|
||||
"libsodium-wrappers": "^0.7.10",
|
||||
"lodash": "^4.17.21",
|
||||
"mongoose": "^6.10.4",
|
||||
"mongoose": "^6.10.5",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.6.0",
|
||||
"query-string": "^7.1.3",
|
||||
|
@ -1,64 +1,69 @@
|
||||
import infisical from 'infisical-node';
|
||||
export const getPort = () => infisical.get('PORT')! || 4000;
|
||||
export const getInviteOnlySignup = () => infisical.get('INVITE_ONLY_SIGNUP')! == undefined ? false : infisical.get('INVITE_ONLY_SIGNUP');
|
||||
export const getEncryptionKey = () => infisical.get('ENCRYPTION_KEY')!;
|
||||
export const getSaltRounds = () => parseInt(infisical.get('SALT_ROUNDS')!) || 10;
|
||||
export const getJwtAuthLifetime = () => infisical.get('JWT_AUTH_LIFETIME')! || '10d';
|
||||
export const getJwtAuthSecret = () => infisical.get('JWT_AUTH_SECRET')!;
|
||||
export const getJwtMfaLifetime = () => infisical.get('JWT_MFA_LIFETIME')! || '5m';
|
||||
export const getJwtMfaSecret = () => infisical.get('JWT_MFA_LIFETIME')! || '5m';
|
||||
export const getJwtRefreshLifetime = () => infisical.get('JWT_REFRESH_LIFETIME')! || '90d';
|
||||
export const getJwtRefreshSecret = () => infisical.get('JWT_REFRESH_SECRET')!;
|
||||
export const getJwtServiceSecret = () => infisical.get('JWT_SERVICE_SECRET')!;
|
||||
export const getJwtSignupLifetime = () => infisical.get('JWT_SIGNUP_LIFETIME')! || '15m';
|
||||
export const getJwtSignupSecret = () => infisical.get('JWT_SIGNUP_SECRET')!;
|
||||
export const getMongoURL = () => infisical.get('MONGO_URL')!;
|
||||
export const getNodeEnv = () => infisical.get('NODE_ENV')! || 'production';
|
||||
export const getVerboseErrorOutput = () => infisical.get('VERBOSE_ERROR_OUTPUT')! === 'true' && true;
|
||||
export const getLokiHost = () => infisical.get('LOKI_HOST')!;
|
||||
export const getClientIdAzure = () => infisical.get('CLIENT_ID_AZURE')!;
|
||||
export const getClientIdHeroku = () => infisical.get('CLIENT_ID_HEROKU')!;
|
||||
export const getClientIdVercel = () => infisical.get('CLIENT_ID_VERCEL')!;
|
||||
export const getClientIdNetlify = () => infisical.get('CLIENT_ID_NETLIFY')!;
|
||||
export const getClientIdGitHub = () => infisical.get('CLIENT_ID_GITHUB')!;
|
||||
export const getClientIdGitLab = () => infisical.get('CLIENT_ID_GITLAB')!;
|
||||
export const getClientSecretAzure = () => infisical.get('CLIENT_SECRET_AZURE')!;
|
||||
export const getClientSecretHeroku = () => infisical.get('CLIENT_SECRET_HEROKU')!;
|
||||
export const getClientSecretVercel = () => infisical.get('CLIENT_SECRET_VERCEL')!;
|
||||
export const getClientSecretNetlify = () => infisical.get('CLIENT_SECRET_NETLIFY')!;
|
||||
export const getClientSecretGitHub = () => infisical.get('CLIENT_SECRET_GITHUB')!;
|
||||
export const getClientSecretGitLab = () => infisical.get('CLIENT_SECRET_GITLAB')!;
|
||||
export const getClientSlugVercel = () => infisical.get('CLIENT_SLUG_VERCEL')!;
|
||||
export const getPostHogHost = () => infisical.get('POSTHOG_HOST')! || 'https://app.posthog.com';
|
||||
export const getPostHogProjectApiKey = () => infisical.get('POSTHOG_PROJECT_API_KEY')! || 'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
export const getSentryDSN = () => infisical.get('SENTRY_DSN')!;
|
||||
export const getSiteURL = () => infisical.get('SITE_URL')!;
|
||||
export const getSmtpHost = () => infisical.get('SMTP_HOST')!;
|
||||
export const getSmtpSecure = () => infisical.get('SMTP_SECURE')! === 'true' || false;
|
||||
export const getSmtpPort = () => parseInt(infisical.get('SMTP_PORT')!) || 587;
|
||||
export const getSmtpUsername = () => infisical.get('SMTP_USERNAME')!;
|
||||
export const getSmtpPassword = () => infisical.get('SMTP_PASSWORD')!;
|
||||
export const getSmtpFromAddress = () => infisical.get('SMTP_FROM_ADDRESS')!;
|
||||
export const getSmtpFromName = () => infisical.get('SMTP_FROM_NAME')! || 'Infisical';
|
||||
export const getStripeProductStarter = () => infisical.get('STRIPE_PRODUCT_STARTER')!;
|
||||
export const getStripeProductPro = () => infisical.get('STRIPE_PRODUCT_PRO')!;
|
||||
export const getStripeProductTeam = () => infisical.get('STRIPE_PRODUCT_TEAM')!;
|
||||
export const getStripePublishableKey = () => infisical.get('STRIPE_PUBLISHABLE_KEY')!;
|
||||
export const getStripeSecretKey = () => infisical.get('STRIPE_SECRET_KEY')!;
|
||||
export const getStripeWebhookSecret = () => infisical.get('STRIPE_WEBHOOK_SECRET')!;
|
||||
export const getTelemetryEnabled = () => infisical.get('TELEMETRY_ENABLED')! !== 'false' && true;
|
||||
export const getLoopsApiKey = () => infisical.get('LOOPS_API_KEY')!;
|
||||
export const getSmtpConfigured = () => infisical.get('SMTP_HOST') == '' || infisical.get('SMTP_HOST') == undefined ? false : true
|
||||
export const getHttpsEnabled = () => {
|
||||
if (getNodeEnv() != "production") {
|
||||
import InfisicalClient from 'infisical-node';
|
||||
|
||||
const client = new InfisicalClient({
|
||||
token: process.env.INFISICAL_TOKEN!
|
||||
});
|
||||
|
||||
export const getPort = async () => (await client.getSecret('PORT')).secretValue || 4000;
|
||||
export const getInviteOnlySignup = async () => (await client.getSecret('INVITE_ONLY_SIGNUP')).secretValue == undefined ? false : (await client.getSecret('INVITE_ONLY_SIGNUP')).secretValue;
|
||||
export const getEncryptionKey = async () => (await client.getSecret('ENCRYPTION_KEY')).secretValue;
|
||||
export const getSaltRounds = async () => parseInt((await client.getSecret('SALT_ROUNDS')).secretValue) || 10;
|
||||
export const getJwtAuthLifetime = async () => (await client.getSecret('JWT_AUTH_LIFETIME')).secretValue || '10d';
|
||||
export const getJwtAuthSecret = async () => (await client.getSecret('JWT_AUTH_SECRET')).secretValue;
|
||||
export const getJwtMfaLifetime = async () => (await client.getSecret('JWT_MFA_LIFETIME')).secretValue || '5m';
|
||||
export const getJwtMfaSecret = async () => (await client.getSecret('JWT_MFA_LIFETIME')).secretValue || '5m';
|
||||
export const getJwtRefreshLifetime = async () => (await client.getSecret('JWT_REFRESH_LIFETIME')).secretValue || '90d';
|
||||
export const getJwtRefreshSecret = async () => (await client.getSecret('JWT_REFRESH_SECRET')).secretValue;
|
||||
export const getJwtServiceSecret = async () => (await client.getSecret('JWT_SERVICE_SECRET')).secretValue;
|
||||
export const getJwtSignupLifetime = async () => (await client.getSecret('JWT_SIGNUP_LIFETIME')).secretValue || '15m';
|
||||
export const getJwtSignupSecret = async () => (await client.getSecret('JWT_SIGNUP_SECRET')).secretValue;
|
||||
export const getMongoURL = async () => (await client.getSecret('MONGO_URL')).secretValue;
|
||||
export const getNodeEnv = async () => (await client.getSecret('NODE_ENV')).secretValue || 'production';
|
||||
export const getVerboseErrorOutput = async () => (await client.getSecret('VERBOSE_ERROR_OUTPUT')).secretValue === 'true' && true;
|
||||
export const getLokiHost = async () => (await client.getSecret('LOKI_HOST')).secretValue;
|
||||
export const getClientIdAzure = async () => (await client.getSecret('CLIENT_ID_AZURE')).secretValue;
|
||||
export const getClientIdHeroku = async () => (await client.getSecret('CLIENT_ID_HEROKU')).secretValue;
|
||||
export const getClientIdVercel = async () => (await client.getSecret('CLIENT_ID_VERCEL')).secretValue;
|
||||
export const getClientIdNetlify = async () => (await client.getSecret('CLIENT_ID_NETLIFY')).secretValue;
|
||||
export const getClientIdGitHub = async () => (await client.getSecret('CLIENT_ID_GITHUB')).secretValue;
|
||||
export const getClientIdGitLab = async () => (await client.getSecret('CLIENT_ID_GITLAB')).secretValue;
|
||||
export const getClientSecretAzure = async () => (await client.getSecret('CLIENT_SECRET_AZURE')).secretValue;
|
||||
export const getClientSecretHeroku = async () => (await client.getSecret('CLIENT_SECRET_HEROKU')).secretValue;
|
||||
export const getClientSecretVercel = async () => (await client.getSecret('CLIENT_SECRET_VERCEL')).secretValue;
|
||||
export const getClientSecretNetlify = async () => (await client.getSecret('CLIENT_SECRET_NETLIFY')).secretValue;
|
||||
export const getClientSecretGitHub = async () => (await client.getSecret('CLIENT_SECRET_GITHUB')).secretValue;
|
||||
export const getClientSecretGitLab = async () => (await client.getSecret('CLIENT_SECRET_GITLAB')).secretValue;
|
||||
export const getClientSlugVercel = async () => (await client.getSecret('CLIENT_SLUG_VERCEL')).secretValue;
|
||||
export const getPostHogHost = async () => (await client.getSecret('POSTHOG_HOST')).secretValue || 'https://app.posthog.com';
|
||||
export const getPostHogProjectApiKey = async () => (await client.getSecret('POSTHOG_PROJECT_API_KEY')).secretValue || 'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
export const getSentryDSN = async () => (await client.getSecret('SENTRY_DSN')).secretValue;
|
||||
export const getSiteURL = async () => (await client.getSecret('SITE_URL')).secretValue;
|
||||
export const getSmtpHost = async () => (await client.getSecret('SMTP_HOST')).secretValue;
|
||||
export const getSmtpSecure = async () => (await client.getSecret('SMTP_SECURE')).secretValue === 'true' || false;
|
||||
export const getSmtpPort = async () => parseInt((await client.getSecret('SMTP_PORT')).secretValue) || 587;
|
||||
export const getSmtpUsername = async () => (await client.getSecret('SMTP_USERNAME')).secretValue;
|
||||
export const getSmtpPassword = async () => (await client.getSecret('SMTP_PASSWORD')).secretValue;
|
||||
export const getSmtpFromAddress = async () => (await client.getSecret('SMTP_FROM_ADDRESS')).secretValue;
|
||||
export const getSmtpFromName = async () => (await client.getSecret('SMTP_FROM_NAME')).secretValue || 'Infisical';
|
||||
export const getStripeProductStarter = async () => (await client.getSecret('STRIPE_PRODUCT_STARTER')).secretValue;
|
||||
export const getStripeProductPro = async () => (await client.getSecret('STRIPE_PRODUCT_PRO')).secretValue;
|
||||
export const getStripeProductTeam = async () => (await client.getSecret('STRIPE_PRODUCT_TEAM')).secretValue;
|
||||
export const getStripePublishableKey = async () => (await client.getSecret('STRIPE_PUBLISHABLE_KEY')).secretValue;
|
||||
export const getStripeSecretKey = async () => (await client.getSecret('STRIPE_SECRET_KEY')).secretValue;
|
||||
export const getStripeWebhookSecret = async () => (await client.getSecret('STRIPE_WEBHOOK_SECRET')).secretValue;
|
||||
export const getTelemetryEnabled = async () => (await client.getSecret('TELEMETRY_ENABLED')).secretValue !== 'false' && true;
|
||||
export const getLoopsApiKey = async () => (await client.getSecret('LOOPS_API_KEY')).secretValue;
|
||||
export const getSmtpConfigured = async () => (await client.getSecret('SMTP_HOST')).secretValue == '' || (await client.getSecret('SMTP_HOST')).secretValue == undefined ? false : true
|
||||
export const getHttpsEnabled = async () => {
|
||||
if ((await getNodeEnv()) != "production") {
|
||||
// no https for anything other than prod
|
||||
return false
|
||||
}
|
||||
|
||||
if (infisical.get('HTTPS_ENABLED') == undefined || infisical.get('HTTPS_ENABLED') == "") {
|
||||
if ((await client.getSecret('HTTPS_ENABLED')).secretValue == undefined || (await client.getSecret('HTTPS_ENABLED')).secretValue == "") {
|
||||
// default when no value present
|
||||
return true
|
||||
}
|
||||
|
||||
return infisical.get('HTTPS_ENABLED') === 'true' && true
|
||||
return (await client.getSecret('HTTPS_ENABLED')).secretValue === 'true' && true
|
||||
}
|
@ -126,7 +126,7 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
|
||||
const loginAction = await EELogService.createAction({
|
||||
@ -182,7 +182,7 @@ export const logout = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled() as boolean
|
||||
secure: (await getHttpsEnabled()) as boolean
|
||||
});
|
||||
|
||||
const logoutAction = await EELogService.createAction({
|
||||
@ -237,7 +237,7 @@ export const getNewToken = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(refreshToken, getJwtRefreshSecret())
|
||||
jwt.verify(refreshToken, await getJwtRefreshSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
@ -252,8 +252,8 @@ export const getNewToken = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: decodedToken.userId
|
||||
},
|
||||
expiresIn: getJwtAuthLifetime(),
|
||||
secret: getJwtAuthSecret()
|
||||
expiresIn: await getJwtAuthLifetime(),
|
||||
secret: await getJwtAuthSecret()
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
|
@ -44,7 +44,7 @@ export const getIntegrationAuth = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
export const getIntegrationOptions = async (req: Request, res: Response) => {
|
||||
const INTEGRATION_OPTIONS = getIntegrationOptionsFunc();
|
||||
const INTEGRATION_OPTIONS = await getIntegrationOptionsFunc();
|
||||
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS,
|
||||
|
@ -215,7 +215,7 @@ export const inviteUserToWorkspace = async (req: Request, res: Response) => {
|
||||
inviterFirstName: req.user.firstName,
|
||||
inviterEmail: req.user.email,
|
||||
workspaceName: req.membership.workspace.name,
|
||||
callback_url: getSiteURL() + '/login'
|
||||
callback_url: (await getSiteURL()) + '/login'
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { MembershipOrg, Organization, User } from '../../models';
|
||||
@ -139,7 +140,7 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
inviteEmail: inviteeEmail,
|
||||
organization: organizationId,
|
||||
role: MEMBER,
|
||||
status: invitee?.publicKey ? ACCEPTED : INVITED
|
||||
status: INVITED
|
||||
}).save();
|
||||
}
|
||||
} else {
|
||||
@ -164,6 +165,7 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
const organization = await Organization.findOne({ _id: organizationId });
|
||||
|
||||
if (organization) {
|
||||
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_ORG_INVITATION,
|
||||
email: inviteeEmail,
|
||||
@ -179,13 +181,14 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
inviterEmail: req.user.email,
|
||||
organizationName: organization.name,
|
||||
email: inviteeEmail,
|
||||
organizationId: organization._id.toString(),
|
||||
token,
|
||||
callback_url: getSiteURL() + '/signupinvite'
|
||||
callback_url: (await getSiteURL()) + '/signupinvite'
|
||||
}
|
||||
});
|
||||
|
||||
if (!getSmtpConfigured()) {
|
||||
completeInviteLink = `${siteUrl + '/signupinvite'}?token=${token}&to=${inviteeEmail}`
|
||||
if (!(await getSmtpConfigured())) {
|
||||
completeInviteLink = `${siteUrl + '/signupinvite'}?token=${token}&to=${inviteeEmail}&organization_id=${organization._id}`
|
||||
}
|
||||
}
|
||||
|
||||
@ -214,13 +217,18 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
let user, token;
|
||||
try {
|
||||
const { email, code } = req.body;
|
||||
const {
|
||||
email,
|
||||
organizationId,
|
||||
code
|
||||
} = req.body;
|
||||
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
status: INVITED,
|
||||
organization: new Types.ObjectId(organizationId)
|
||||
});
|
||||
|
||||
if (!membershipOrg)
|
||||
@ -257,8 +265,8 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: getJwtSignupLifetime(),
|
||||
secret: getJwtSignupSecret()
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -85,7 +85,7 @@ export const createOrganization = async (req: Request, res: Response) => {
|
||||
export const getOrganization = async (req: Request, res: Response) => {
|
||||
let organization;
|
||||
try {
|
||||
organization = req.membershipOrg.organization;
|
||||
organization = req.organization
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
@ -317,29 +317,29 @@ export const createOrganizationPortalSession = async (
|
||||
) => {
|
||||
let session;
|
||||
try {
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
// check if there is a payment method on file
|
||||
const paymentMethods = await stripe.paymentMethods.list({
|
||||
customer: req.membershipOrg.organization.customerId,
|
||||
customer: req.organization.customerId,
|
||||
type: 'card'
|
||||
});
|
||||
|
||||
|
||||
if (paymentMethods.data.length < 1) {
|
||||
// case: no payment method on file
|
||||
session = await stripe.checkout.sessions.create({
|
||||
customer: req.membershipOrg.organization.customerId,
|
||||
customer: req.organization.customerId,
|
||||
mode: 'setup',
|
||||
payment_method_types: ['card'],
|
||||
success_url: getSiteURL() + '/dashboard',
|
||||
cancel_url: getSiteURL() + '/dashboard'
|
||||
success_url: (await getSiteURL()) + '/dashboard',
|
||||
cancel_url: (await getSiteURL()) + '/dashboard'
|
||||
});
|
||||
} else {
|
||||
session = await stripe.billingPortal.sessions.create({
|
||||
customer: req.membershipOrg.organization.customerId,
|
||||
return_url: getSiteURL() + '/dashboard'
|
||||
customer: req.organization.customerId,
|
||||
return_url: (await getSiteURL()) + '/dashboard'
|
||||
});
|
||||
}
|
||||
|
||||
@ -365,12 +365,12 @@ export const getOrganizationSubscriptions = async (
|
||||
) => {
|
||||
let subscriptions;
|
||||
try {
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
subscriptions = await stripe.subscriptions.list({
|
||||
customer: req.membershipOrg.organization.customerId
|
||||
customer: req.organization.customerId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
|
@ -44,7 +44,7 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
substitutions: {
|
||||
email,
|
||||
token,
|
||||
callback_url: getSiteURL() + '/password-reset'
|
||||
callback_url: (await getSiteURL()) + '/password-reset'
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
@ -91,8 +91,8 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: getJwtSignupLifetime(),
|
||||
secret: getJwtSignupSecret()
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -39,7 +39,7 @@ export const pushSecrets = async (req: Request, res: Response) => {
|
||||
// upload (encrypted) secrets to workspace with id [workspaceId]
|
||||
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
let { secrets }: { secrets: PushSecret[] } = req.body;
|
||||
const { keys, environment, channel } = req.body;
|
||||
const { workspaceId } = req.params;
|
||||
@ -114,7 +114,7 @@ export const pullSecrets = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
let key;
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
const { workspaceId } = req.params;
|
||||
@ -183,7 +183,7 @@ export const pullSecretsServiceToken = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
let key;
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
const { workspaceId } = req.params;
|
||||
|
@ -86,4 +86,12 @@ export const deleteFolder = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
res.send()
|
||||
}
|
||||
|
||||
export const getFolderById = async (req: Request, res: Response) => {
|
||||
const { folderId } = req.params
|
||||
|
||||
const folder = await Folder.findById(folderId)
|
||||
|
||||
res.send({ folder })
|
||||
}
|
@ -61,7 +61,7 @@ export const createServiceToken = async (req: Request, res: Response) => {
|
||||
workspaceId
|
||||
},
|
||||
expiresIn: expiresIn,
|
||||
secret: getJwtServiceSecret()
|
||||
secret: await getJwtServiceSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
return res.status(400).send({
|
||||
|
@ -21,7 +21,7 @@ export const beginEmailSignup = async (req: Request, res: Response) => {
|
||||
try {
|
||||
email = req.body.email;
|
||||
|
||||
if (getInviteOnlySignup()) {
|
||||
if (await getInviteOnlySignup()) {
|
||||
// Only one user can create an account without being invited. The rest need to be invited in order to make an account
|
||||
const userCount = await User.countDocuments({})
|
||||
if (userCount != 0) {
|
||||
@ -75,7 +75,7 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
// verify email
|
||||
if (getSmtpConfigured()) {
|
||||
if (await getSmtpConfigured()) {
|
||||
await checkEmailVerification({
|
||||
email,
|
||||
code
|
||||
@ -93,8 +93,8 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: getJwtSignupLifetime(),
|
||||
secret: getJwtSignupSecret()
|
||||
expiresIn: await getJwtSignupLifetime(),
|
||||
secret: await getJwtSignupSecret()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -13,7 +13,7 @@ export const handleWebhook = async (req: Request, res: Response) => {
|
||||
let event;
|
||||
try {
|
||||
// check request for valid stripe signature
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
@ -21,7 +21,7 @@ export const handleWebhook = async (req: Request, res: Response) => {
|
||||
event = stripe.webhooks.constructEvent(
|
||||
req.body,
|
||||
sig,
|
||||
getStripeWebhookSecret()
|
||||
await getStripeWebhookSecret()
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
|
@ -43,7 +43,7 @@ export const createAPIKeyData = async (req: Request, res: Response) => {
|
||||
const { name, expiresIn } = req.body;
|
||||
|
||||
const secret = crypto.randomBytes(16).toString('hex');
|
||||
const secretHash = await bcrypt.hash(secret, getSaltRounds());
|
||||
const secretHash = await bcrypt.hash(secret, await getSaltRounds());
|
||||
|
||||
const expiresAt = new Date();
|
||||
expiresAt.setSeconds(expiresAt.getSeconds() + expiresIn);
|
||||
|
@ -124,8 +124,8 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: getJwtMfaLifetime(),
|
||||
secret: getJwtMfaSecret()
|
||||
expiresIn: await getJwtMfaLifetime(),
|
||||
secret: await getJwtMfaSecret()
|
||||
});
|
||||
|
||||
const code = await TokenService.createToken({
|
||||
@ -163,7 +163,7 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
|
||||
// case: user does not have MFA enablgged
|
||||
@ -302,7 +302,7 @@ export const verifyMfaToken = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
|
||||
interface VerifyMfaTokenRes {
|
||||
|
@ -17,7 +17,7 @@ import { AccountNotFoundError } from '../../utils/errors';
|
||||
* @param res
|
||||
*/
|
||||
export const createSecret = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const secretToCreate: CreateSecretRequestBody = req.body.secret;
|
||||
const { workspaceId, environment } = req.params
|
||||
const sanitizedSecret: SanitizedSecretForCreate = {
|
||||
@ -70,7 +70,7 @@ export const createSecret = async (req: Request, res: Response) => {
|
||||
* @param res
|
||||
*/
|
||||
export const createSecrets = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const secretsToCreate: CreateSecretRequestBody[] = req.body.secrets;
|
||||
const { workspaceId, environment } = req.params
|
||||
const sanitizedSecretesToCreate: SanitizedSecretForCreate[] = []
|
||||
@ -132,7 +132,7 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
* @param res
|
||||
*/
|
||||
export const deleteSecrets = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const { workspaceId, environmentName } = req.params
|
||||
const secretIdsToDelete: string[] = req.body.secretIds
|
||||
|
||||
@ -186,7 +186,7 @@ export const deleteSecrets = async (req: Request, res: Response) => {
|
||||
* @param res
|
||||
*/
|
||||
export const deleteSecret = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
await Secret.findByIdAndDelete(req._secret._id)
|
||||
|
||||
if (postHogClient) {
|
||||
@ -215,7 +215,7 @@ export const deleteSecret = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const updateSecrets = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const { workspaceId, environmentName } = req.params
|
||||
const secretsModificationsRequested: ModifySecretRequestBody[] = req.body.secrets;
|
||||
const [secretIdsUserCanModifyError, secretIdsUserCanModify] = await to(Secret.find({ workspace: workspaceId, environment: environmentName }, { _id: 1 }).then())
|
||||
@ -283,7 +283,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const updateSecret = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const { workspaceId, environmentName } = req.params
|
||||
const secretModificationsRequested: ModifySecretRequestBody = req.body.secret;
|
||||
|
||||
@ -337,7 +337,7 @@ export const updateSecret = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getSecrets = async (req: Request, res: Response) => {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const { environment } = req.query;
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
|
@ -25,6 +25,8 @@ import {
|
||||
BatchSecretRequest,
|
||||
BatchSecret
|
||||
} from '../../types/secret';
|
||||
import { getFolderPath, getFoldersInDirectory, normalizePath } from '../../utils/folder';
|
||||
import { ROOT_FOLDER_PATH } from '../../utils/folder';
|
||||
|
||||
/**
|
||||
* Peform a batch of any specified CUD secret operations
|
||||
@ -35,7 +37,7 @@ import {
|
||||
export const batchSecrets = async (req: Request, res: Response) => {
|
||||
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent']);
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
const {
|
||||
workspaceId,
|
||||
@ -51,13 +53,18 @@ export const batchSecrets = async (req: Request, res: Response) => {
|
||||
const updateSecrets: BatchSecret[] = [];
|
||||
const deleteSecrets: Types.ObjectId[] = [];
|
||||
const actions: IAction[] = [];
|
||||
|
||||
|
||||
// get secret blind index salt
|
||||
const salt = await SecretService.getSecretBlindIndexSalt({
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
for await (const request of requests) {
|
||||
const folderId = request.secret.folderId
|
||||
|
||||
// TODO: need to auth folder
|
||||
const fullFolderPath = await getFolderPath(folderId)
|
||||
|
||||
let secretBlindIndex = '';
|
||||
switch (request.method) {
|
||||
case 'POST':
|
||||
@ -72,19 +79,23 @@ export const batchSecrets = async (req: Request, res: Response) => {
|
||||
user: request.secret.type === SECRET_PERSONAL ? req.user : undefined,
|
||||
environment,
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
path: fullFolderPath,
|
||||
folder: folderId,
|
||||
secretBlindIndex
|
||||
});
|
||||
break;
|
||||
case 'PATCH':
|
||||
secretBlindIndex = await SecretService.generateSecretBlindIndexWithSalt({
|
||||
secretName: request.secret.secretName,
|
||||
salt
|
||||
salt,
|
||||
});
|
||||
|
||||
updateSecrets.push({
|
||||
...request.secret,
|
||||
_id: new Types.ObjectId(request.secret._id),
|
||||
secretBlindIndex
|
||||
secretBlindIndex,
|
||||
folder: folderId,
|
||||
path: fullFolderPath,
|
||||
});
|
||||
break;
|
||||
case 'DELETE':
|
||||
@ -437,9 +448,9 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
const newlyCreatedSecrets: ISecret[] = (await Secret.insertMany(secretsToInsert)).map((insertedSecret) => insertedSecret.toObject());
|
||||
|
||||
|
||||
setTimeout(async () => {
|
||||
// trigger event - push secrets
|
||||
await EventService.handleEvent({
|
||||
@ -508,7 +519,7 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets added',
|
||||
@ -578,9 +589,11 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
*/
|
||||
|
||||
const { tagSlugs } = req.query;
|
||||
const { tagSlugs, secretsPath } = req.query;
|
||||
const workspaceId = req.query.workspaceId as string;
|
||||
const environment = req.query.environment as string;
|
||||
const normalizedPath = normalizePath(secretsPath as string)
|
||||
const folders = await getFoldersInDirectory(workspaceId as string, environment as string, normalizedPath)
|
||||
|
||||
// secrets to return
|
||||
let secrets: ISecret[] = [];
|
||||
@ -613,6 +626,12 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
]
|
||||
}
|
||||
|
||||
if (normalizedPath == ROOT_FOLDER_PATH) {
|
||||
secretQuery.path = { $in: [ROOT_FOLDER_PATH, null, undefined] }
|
||||
} else if (normalizedPath) {
|
||||
secretQuery.path = normalizedPath
|
||||
}
|
||||
|
||||
if (tagIds.length > 0) {
|
||||
secretQuery.tags = { $in: tagIds };
|
||||
}
|
||||
@ -638,6 +657,13 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
]
|
||||
}
|
||||
|
||||
// TODO: check if user can query for given path
|
||||
if (normalizedPath == ROOT_FOLDER_PATH) {
|
||||
secretQuery.path = { $in: [ROOT_FOLDER_PATH, null, undefined] }
|
||||
} else if (normalizedPath) {
|
||||
secretQuery.path = normalizedPath
|
||||
}
|
||||
|
||||
if (tagIds.length > 0) {
|
||||
secretQuery.tags = { $in: tagIds };
|
||||
}
|
||||
@ -655,6 +681,12 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
user: { $exists: false } // shared secrets only from workspace
|
||||
}
|
||||
|
||||
if (normalizedPath == ROOT_FOLDER_PATH) {
|
||||
secretQuery.path = { $in: [ROOT_FOLDER_PATH, null, undefined] }
|
||||
} else if (normalizedPath) {
|
||||
secretQuery.path = normalizedPath
|
||||
}
|
||||
|
||||
if (tagIds.length > 0) {
|
||||
secretQuery.tags = { $in: tagIds };
|
||||
}
|
||||
@ -683,7 +715,7 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets pulled',
|
||||
@ -701,7 +733,8 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets
|
||||
secrets,
|
||||
folders
|
||||
});
|
||||
}
|
||||
|
||||
@ -905,7 +938,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId: new Types.ObjectId(key)
|
||||
})
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets modified',
|
||||
@ -1039,7 +1072,7 @@ export const deleteSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId: new Types.ObjectId(key)
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets deleted',
|
||||
|
@ -72,7 +72,7 @@ export const createServiceAccount = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
const secret = crypto.randomBytes(16).toString('base64');
|
||||
const secretHash = await bcrypt.hash(secret, getSaltRounds());
|
||||
const secretHash = await bcrypt.hash(secret, await getSaltRounds());
|
||||
|
||||
// create service account
|
||||
const serviceAccount = await new ServiceAccount({
|
||||
|
@ -84,7 +84,7 @@ export const createServiceTokenData = async (req: Request, res: Response) => {
|
||||
} = req.body;
|
||||
|
||||
const secret = crypto.randomBytes(16).toString('hex');
|
||||
const secretHash = await bcrypt.hash(secret, getSaltRounds());
|
||||
const secretHash = await bcrypt.hash(secret, await getSaltRounds());
|
||||
|
||||
let expiresAt;
|
||||
if (expiresIn) {
|
||||
|
@ -108,7 +108,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
token = tokens.token;
|
||||
|
||||
// sending a welcome email to new users
|
||||
if (getLoopsApiKey()) {
|
||||
if (await getLoopsApiKey()) {
|
||||
await request.post("https://app.loops.so/api/v1/events/send", {
|
||||
"email": email,
|
||||
"eventName": "Sign Up",
|
||||
@ -117,7 +117,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
}, {
|
||||
headers: {
|
||||
"Accept": "application/json",
|
||||
"Authorization": "Bearer " + getLoopsApiKey()
|
||||
"Authorization": "Bearer " + (await getLoopsApiKey())
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -127,7 +127,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
@ -232,7 +232,7 @@ export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: getHttpsEnabled()
|
||||
secure: await getHttpsEnabled()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -48,7 +48,7 @@ interface V2PushSecret {
|
||||
export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
// upload (encrypted) secrets to workspace with id [workspaceId]
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
let { secrets }: { secrets: V2PushSecret[] } = req.body;
|
||||
const { keys, environment, channel } = req.body;
|
||||
const { workspaceId } = req.params;
|
||||
@ -123,7 +123,7 @@ export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
export const pullSecrets = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
try {
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
const { workspaceId } = req.params;
|
||||
|
@ -12,7 +12,7 @@ import { getStripeSecretKey, getStripeWebhookSecret } from '../../../config';
|
||||
export const handleWebhook = async (req: Request, res: Response) => {
|
||||
let event;
|
||||
try {
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
|
||||
@ -21,7 +21,7 @@ export const handleWebhook = async (req: Request, res: Response) => {
|
||||
event = stripe.webhooks.constructEvent(
|
||||
req.body,
|
||||
sig,
|
||||
getStripeWebhookSecret()
|
||||
await getStripeWebhookSecret()
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { Action } from '../models';
|
||||
import {
|
||||
@ -36,33 +35,25 @@ const createActionUpdateSecret = async ({
|
||||
workspaceId: Types.ObjectId;
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
const latestSecretVersions = (await getLatestNSecretSecretVersionIds({
|
||||
secretIds,
|
||||
n: 2
|
||||
}))
|
||||
.map((s) => ({
|
||||
oldSecretVersion: s.versions[0]._id,
|
||||
newSecretVersion: s.versions[1]._id
|
||||
}));
|
||||
|
||||
action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create update secret action');
|
||||
}
|
||||
const latestSecretVersions = (await getLatestNSecretSecretVersionIds({
|
||||
secretIds,
|
||||
n: 2
|
||||
}))
|
||||
.map((s) => ({
|
||||
oldSecretVersion: s.versions[0]._id,
|
||||
newSecretVersion: s.versions[1]._id
|
||||
}));
|
||||
|
||||
const action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
return action;
|
||||
}
|
||||
@ -90,33 +81,25 @@ const createActionSecret = async ({
|
||||
workspaceId: Types.ObjectId;
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
// case: action is adding, deleting, or reading secrets
|
||||
// -> add new secret versions
|
||||
const latestSecretVersions = (await getLatestSecretVersionIds({
|
||||
secretIds
|
||||
}))
|
||||
.map((s) => ({
|
||||
newSecretVersion: s.versionId
|
||||
}));
|
||||
|
||||
action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create action create/read/delete secret action');
|
||||
}
|
||||
// case: action is adding, deleting, or reading secrets
|
||||
// -> add new secret versions
|
||||
const latestSecretVersions = (await getLatestSecretVersionIds({
|
||||
secretIds
|
||||
}))
|
||||
.map((s) => ({
|
||||
newSecretVersion: s.versionId
|
||||
}));
|
||||
|
||||
const action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
return action;
|
||||
}
|
||||
@ -140,19 +123,12 @@ const createActionClient = ({
|
||||
serviceAccountId?: Types.ObjectId;
|
||||
serviceTokenDataId?: Types.ObjectId;
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
action = new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create client action');
|
||||
}
|
||||
const action = new Action({
|
||||
name,
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId
|
||||
}).save();
|
||||
|
||||
return action;
|
||||
}
|
||||
@ -181,40 +157,34 @@ const createActionHelper = async ({
|
||||
secretIds?: Types.ObjectId[];
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
switch (name) {
|
||||
case ACTION_LOGIN:
|
||||
case ACTION_LOGOUT:
|
||||
action = await createActionClient({
|
||||
name,
|
||||
userId
|
||||
});
|
||||
break;
|
||||
case ACTION_ADD_SECRETS:
|
||||
case ACTION_READ_SECRETS:
|
||||
case ACTION_DELETE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
case ACTION_UPDATE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionUpdateSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create action');
|
||||
switch (name) {
|
||||
case ACTION_LOGIN:
|
||||
case ACTION_LOGOUT:
|
||||
action = await createActionClient({
|
||||
name,
|
||||
userId
|
||||
});
|
||||
break;
|
||||
case ACTION_ADD_SECRETS:
|
||||
case ACTION_READ_SECRETS:
|
||||
case ACTION_DELETE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
case ACTION_UPDATE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionUpdateSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return action;
|
||||
@ -222,4 +192,4 @@ const createActionHelper = async ({
|
||||
|
||||
export {
|
||||
createActionHelper
|
||||
};
|
||||
};
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
Log,
|
||||
@ -32,27 +31,20 @@ const createLogHelper = async ({
|
||||
channel: string;
|
||||
ipAddress: string;
|
||||
}) => {
|
||||
let log;
|
||||
try {
|
||||
log = await new Log({
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId ?? undefined,
|
||||
actionNames: actions.map((a) => a.name),
|
||||
actions,
|
||||
channel,
|
||||
ipAddress
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create log');
|
||||
}
|
||||
const log = await new Log({
|
||||
user: userId,
|
||||
serviceAccount: serviceAccountId,
|
||||
serviceTokenData: serviceTokenDataId,
|
||||
workspace: workspaceId ?? undefined,
|
||||
actionNames: actions.map((a) => a.name),
|
||||
actions,
|
||||
channel,
|
||||
ipAddress
|
||||
}).save();
|
||||
|
||||
return log;
|
||||
}
|
||||
|
||||
export {
|
||||
createLogHelper
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,6 @@
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Secret,
|
||||
ISecret,
|
||||
} from '../../models';
|
||||
import {
|
||||
SecretSnapshot,
|
||||
SecretVersion,
|
||||
ISecretVersion
|
||||
} from '../models';
|
||||
import { Types } from "mongoose";
|
||||
import { Secret, ISecret } from "../../models";
|
||||
import { SecretSnapshot, SecretVersion, ISecretVersion } from "../models";
|
||||
|
||||
/**
|
||||
* Save a secret snapshot that is a copy of the current state of secrets in workspace with id
|
||||
@ -19,56 +11,53 @@ import {
|
||||
* @returns {SecretSnapshot} secretSnapshot - new secret snapshot
|
||||
*/
|
||||
const takeSecretSnapshotHelper = async ({
|
||||
workspaceId
|
||||
workspaceId,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
workspaceId: Types.ObjectId;
|
||||
}) => {
|
||||
const secretIds = (
|
||||
await Secret.find(
|
||||
{
|
||||
workspace: workspaceId,
|
||||
},
|
||||
"_id"
|
||||
)
|
||||
).map((s) => s._id);
|
||||
|
||||
let secretSnapshot;
|
||||
try {
|
||||
const secretIds = (await Secret.find({
|
||||
workspace: workspaceId
|
||||
}, '_id')).map((s) => s._id);
|
||||
const latestSecretVersions = (
|
||||
await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$secret",
|
||||
version: { $max: "$version" },
|
||||
versionId: { $max: "$_id" }, // secret version id
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 },
|
||||
},
|
||||
]).exec()
|
||||
).map((s) => s.versionId);
|
||||
|
||||
const latestSecretVersions = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$secret',
|
||||
version: { $max: '$version' },
|
||||
versionId: { $max: '$_id' } // secret version id
|
||||
}
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 }
|
||||
}
|
||||
])
|
||||
.exec())
|
||||
.map((s) => s.versionId);
|
||||
const latestSecretSnapshot = await SecretSnapshot.findOne({
|
||||
workspace: workspaceId,
|
||||
}).sort({ version: -1 });
|
||||
|
||||
const latestSecretSnapshot = await SecretSnapshot.findOne({
|
||||
workspace: workspaceId
|
||||
}).sort({ version: -1 });
|
||||
const secretSnapshot = await new SecretSnapshot({
|
||||
workspace: workspaceId,
|
||||
version: latestSecretSnapshot ? latestSecretSnapshot.version + 1 : 1,
|
||||
secretVersions: latestSecretVersions,
|
||||
}).save();
|
||||
|
||||
secretSnapshot = await new SecretSnapshot({
|
||||
workspace: workspaceId,
|
||||
version: latestSecretSnapshot ? latestSecretSnapshot.version + 1 : 1,
|
||||
secretVersions: latestSecretVersions
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to take a secret snapshot');
|
||||
}
|
||||
|
||||
return secretSnapshot;
|
||||
}
|
||||
return secretSnapshot;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add secret versions [secretVersions] to the SecretVersion collection.
|
||||
@ -77,93 +66,79 @@ const takeSecretSnapshotHelper = async ({
|
||||
* @returns {SecretVersion[]} newSecretVersions - new secret versions
|
||||
*/
|
||||
const addSecretVersionsHelper = async ({
|
||||
secretVersions
|
||||
secretVersions,
|
||||
}: {
|
||||
secretVersions: ISecretVersion[]
|
||||
secretVersions: ISecretVersion[];
|
||||
}) => {
|
||||
let newSecretVersions;
|
||||
try {
|
||||
newSecretVersions = await SecretVersion.insertMany(secretVersions);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(`Failed to add secret versions [err=${err}]`);
|
||||
}
|
||||
const newSecretVersions = await SecretVersion.insertMany(secretVersions);
|
||||
|
||||
return newSecretVersions;
|
||||
}
|
||||
return newSecretVersions;
|
||||
};
|
||||
|
||||
const markDeletedSecretVersionsHelper = async ({
|
||||
secretIds
|
||||
secretIds,
|
||||
}: {
|
||||
secretIds: Types.ObjectId[];
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
try {
|
||||
await SecretVersion.updateMany({
|
||||
secret: { $in: secretIds }
|
||||
}, {
|
||||
isDeleted: true
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to mark secret versions as deleted');
|
||||
}
|
||||
}
|
||||
await SecretVersion.updateMany(
|
||||
{
|
||||
secret: { $in: secretIds },
|
||||
},
|
||||
{
|
||||
isDeleted: true,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Initialize secret versioning by setting previously unversioned
|
||||
* secrets to version 1 and begin populating secret versions.
|
||||
*/
|
||||
const initSecretVersioningHelper = async () => {
|
||||
try {
|
||||
await Secret.updateMany(
|
||||
{ version: { $exists: false } },
|
||||
{ $set: { version: 1 } }
|
||||
);
|
||||
|
||||
await Secret.updateMany(
|
||||
{ version: { $exists: false } },
|
||||
{ $set: { version: 1 } }
|
||||
);
|
||||
const unversionedSecrets: ISecret[] = await Secret.aggregate([
|
||||
{
|
||||
$lookup: {
|
||||
from: "secretversions",
|
||||
localField: "_id",
|
||||
foreignField: "secret",
|
||||
as: "versions",
|
||||
},
|
||||
},
|
||||
{
|
||||
$match: {
|
||||
versions: { $size: 0 },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const unversionedSecrets: ISecret[] = await Secret.aggregate([
|
||||
{
|
||||
$lookup: {
|
||||
from: 'secretversions',
|
||||
localField: '_id',
|
||||
foreignField: 'secret',
|
||||
as: 'versions',
|
||||
},
|
||||
},
|
||||
{
|
||||
$match: {
|
||||
versions: { $size: 0 },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
if (unversionedSecrets.length > 0) {
|
||||
await addSecretVersionsHelper({
|
||||
secretVersions: unversionedSecrets.map((s, idx) => new SecretVersion({
|
||||
...s,
|
||||
secret: s._id,
|
||||
version: s.version ? s.version : 1,
|
||||
isDeleted: false,
|
||||
workspace: s.workspace,
|
||||
environment: s.environment
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to ensure that secrets are versioned');
|
||||
}
|
||||
}
|
||||
if (unversionedSecrets.length > 0) {
|
||||
await addSecretVersionsHelper({
|
||||
secretVersions: unversionedSecrets.map(
|
||||
(s, idx) =>
|
||||
new SecretVersion({
|
||||
...s,
|
||||
secret: s._id,
|
||||
version: s.version ? s.version : 1,
|
||||
isDeleted: false,
|
||||
workspace: s.workspace,
|
||||
environment: s.environment,
|
||||
})
|
||||
),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
takeSecretSnapshotHelper,
|
||||
addSecretVersionsHelper,
|
||||
markDeletedSecretVersionsHelper,
|
||||
initSecretVersioningHelper
|
||||
}
|
||||
takeSecretSnapshotHelper,
|
||||
addSecretVersionsHelper,
|
||||
markDeletedSecretVersionsHelper,
|
||||
initSecretVersioningHelper,
|
||||
};
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { SecretVersion } from '../models';
|
||||
|
||||
@ -13,41 +12,32 @@ const getLatestSecretVersionIds = async ({
|
||||
}: {
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
|
||||
interface LatestSecretVersionId {
|
||||
_id: Types.ObjectId;
|
||||
version: number;
|
||||
versionId: Types.ObjectId;
|
||||
}
|
||||
|
||||
let latestSecretVersionIds: LatestSecretVersionId[];
|
||||
try {
|
||||
latestSecretVersionIds = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$secret',
|
||||
version: { $max: '$version' },
|
||||
versionId: { $max: '$_id' } // id of latest secret version
|
||||
}
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 }
|
||||
const latestSecretVersionIds = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds
|
||||
}
|
||||
}
|
||||
])
|
||||
.exec());
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get latest secret versions');
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$secret',
|
||||
version: { $max: '$version' },
|
||||
versionId: { $max: '$_id' } // id of latest secret version
|
||||
}
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 }
|
||||
}
|
||||
])
|
||||
.exec());
|
||||
|
||||
return latestSecretVersionIds;
|
||||
}
|
||||
@ -66,40 +56,32 @@ const getLatestNSecretSecretVersionIds = async ({
|
||||
secretIds: Types.ObjectId[];
|
||||
n: number;
|
||||
}) => {
|
||||
|
||||
// TODO: optimize query
|
||||
let latestNSecretVersions;
|
||||
try {
|
||||
latestNSecretVersions = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds,
|
||||
},
|
||||
},
|
||||
const latestNSecretVersions = (await SecretVersion.aggregate([
|
||||
{
|
||||
$match: {
|
||||
secret: {
|
||||
$in: secretIds,
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 },
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$secret",
|
||||
versions: { $push: "$$ROOT" },
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: { version: -1 },
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$secret",
|
||||
versions: { $push: "$$ROOT" },
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 0,
|
||||
secret: "$_id",
|
||||
versions: { $slice: ["$versions", n] },
|
||||
},
|
||||
}
|
||||
]));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get latest n secret versions');
|
||||
}
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 0,
|
||||
secret: "$_id",
|
||||
versions: { $slice: ["$versions", n] },
|
||||
},
|
||||
}
|
||||
]));
|
||||
|
||||
return latestNSecretVersions;
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import bcrypt from 'bcrypt';
|
||||
@ -104,7 +103,7 @@ const getAuthUserPayload = async ({
|
||||
authTokenValue: string;
|
||||
}) => {
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(authTokenValue, getJwtAuthSecret())
|
||||
jwt.verify(authTokenValue, await getJwtAuthSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
@ -263,16 +262,16 @@ const issueAuthTokens = async ({ userId }: { userId: string }) => {
|
||||
payload: {
|
||||
userId
|
||||
},
|
||||
expiresIn: getJwtAuthLifetime(),
|
||||
secret: getJwtAuthSecret()
|
||||
expiresIn: await getJwtAuthLifetime(),
|
||||
secret: await getJwtAuthSecret()
|
||||
});
|
||||
|
||||
const refreshToken = createToken({
|
||||
payload: {
|
||||
userId
|
||||
},
|
||||
expiresIn: getJwtRefreshLifetime(),
|
||||
secret: getJwtRefreshSecret()
|
||||
expiresIn: await getJwtRefreshLifetime(),
|
||||
secret: await getJwtRefreshSecret()
|
||||
});
|
||||
|
||||
return {
|
||||
|
@ -1,41 +1,34 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { Types } from "mongoose";
|
||||
import {
|
||||
Bot,
|
||||
BotKey,
|
||||
Secret,
|
||||
ISecret,
|
||||
IUser,
|
||||
User,
|
||||
IServiceAccount,
|
||||
ServiceAccount,
|
||||
IServiceTokenData,
|
||||
ServiceTokenData
|
||||
} from '../models';
|
||||
import {
|
||||
generateKeyPair,
|
||||
encryptSymmetric,
|
||||
decryptSymmetric,
|
||||
decryptAsymmetric
|
||||
} from '../utils/crypto';
|
||||
Bot,
|
||||
BotKey,
|
||||
Secret,
|
||||
ISecret,
|
||||
IUser,
|
||||
User,
|
||||
IServiceAccount,
|
||||
ServiceAccount,
|
||||
IServiceTokenData,
|
||||
ServiceTokenData,
|
||||
} from "../models";
|
||||
import {
|
||||
SECRET_SHARED,
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY
|
||||
} from '../variables';
|
||||
import { getEncryptionKey } from '../config';
|
||||
import { BotNotFoundError, UnauthorizedRequestError } from '../utils/errors';
|
||||
generateKeyPair,
|
||||
encryptSymmetric,
|
||||
decryptSymmetric,
|
||||
decryptAsymmetric,
|
||||
} from "../utils/crypto";
|
||||
import {
|
||||
validateMembership
|
||||
} from '../helpers/membership';
|
||||
import {
|
||||
validateUserClientForWorkspace
|
||||
} from '../helpers/user';
|
||||
import {
|
||||
validateServiceAccountClientForWorkspace
|
||||
} from '../helpers/serviceAccount';
|
||||
SECRET_SHARED,
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY,
|
||||
} from "../variables";
|
||||
import { getEncryptionKey } from "../config";
|
||||
import { BotNotFoundError, UnauthorizedRequestError } from "../utils/errors";
|
||||
import { validateMembership } from "../helpers/membership";
|
||||
import { validateUserClientForWorkspace } from "../helpers/user";
|
||||
import { validateServiceAccountClientForWorkspace } from "../helpers/serviceAccount";
|
||||
|
||||
/**
|
||||
* Validate authenticated clients for bot with id [botId] based
|
||||
@ -46,99 +39,104 @@ import {
|
||||
* @param {Array<'admin' | 'member'>} obj.acceptedRoles - accepted workspace roles
|
||||
*/
|
||||
const validateClientForBot = async ({
|
||||
authData,
|
||||
botId,
|
||||
acceptedRoles
|
||||
authData,
|
||||
botId,
|
||||
acceptedRoles,
|
||||
}: {
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
};
|
||||
botId: Types.ObjectId;
|
||||
acceptedRoles: Array<'admin' | 'member'>;
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
};
|
||||
botId: Types.ObjectId;
|
||||
acceptedRoles: Array<"admin" | "member">;
|
||||
}) => {
|
||||
const bot = await Bot.findById(botId);
|
||||
|
||||
if (!bot) throw BotNotFoundError();
|
||||
|
||||
if (authData.authMode === AUTH_MODE_JWT && authData.authPayload instanceof User) {
|
||||
await validateUserClientForWorkspace({
|
||||
user: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
acceptedRoles
|
||||
});
|
||||
|
||||
return bot;
|
||||
}
|
||||
const bot = await Bot.findById(botId);
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_ACCOUNT && authData.authPayload instanceof ServiceAccount) {
|
||||
await validateServiceAccountClientForWorkspace({
|
||||
serviceAccount: authData.authPayload,
|
||||
workspaceId: bot.workspace
|
||||
});
|
||||
if (!bot) throw BotNotFoundError();
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_TOKEN && authData.authPayload instanceof ServiceTokenData) {
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'Failed service token authorization for bot'
|
||||
});
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_API_KEY && authData.authPayload instanceof User) {
|
||||
await validateUserClientForWorkspace({
|
||||
user: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
acceptedRoles
|
||||
});
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
throw BotNotFoundError({
|
||||
message: 'Failed client authorization for bot'
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_JWT &&
|
||||
authData.authPayload instanceof User
|
||||
) {
|
||||
await validateUserClientForWorkspace({
|
||||
user: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
acceptedRoles,
|
||||
});
|
||||
}
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_SERVICE_ACCOUNT &&
|
||||
authData.authPayload instanceof ServiceAccount
|
||||
) {
|
||||
await validateServiceAccountClientForWorkspace({
|
||||
serviceAccount: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
});
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_SERVICE_TOKEN &&
|
||||
authData.authPayload instanceof ServiceTokenData
|
||||
) {
|
||||
throw UnauthorizedRequestError({
|
||||
message: "Failed service token authorization for bot",
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_API_KEY &&
|
||||
authData.authPayload instanceof User
|
||||
) {
|
||||
await validateUserClientForWorkspace({
|
||||
user: authData.authPayload,
|
||||
workspaceId: bot.workspace,
|
||||
acceptedRoles,
|
||||
});
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
throw BotNotFoundError({
|
||||
message: "Failed client authorization for bot",
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create an inactive bot with name [name] for workspace with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.name - name of bot
|
||||
* @param {String} obj.workspaceId - id of workspace that bot belongs to
|
||||
*/
|
||||
const createBot = async ({
|
||||
name,
|
||||
workspaceId,
|
||||
name,
|
||||
workspaceId,
|
||||
}: {
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
}) => {
|
||||
let bot;
|
||||
try {
|
||||
const { publicKey, privateKey } = generateKeyPair();
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext: privateKey,
|
||||
key: getEncryptionKey()
|
||||
});
|
||||
const { publicKey, privateKey } = generateKeyPair();
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext: privateKey,
|
||||
key: await getEncryptionKey(),
|
||||
});
|
||||
|
||||
bot = await new Bot({
|
||||
name,
|
||||
workspace: workspaceId,
|
||||
isActive: false,
|
||||
publicKey,
|
||||
encryptedPrivateKey: ciphertext,
|
||||
iv,
|
||||
tag
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create bot');
|
||||
}
|
||||
|
||||
return bot;
|
||||
}
|
||||
const bot = await new Bot({
|
||||
name,
|
||||
workspace: workspaceId,
|
||||
isActive: false,
|
||||
publicKey,
|
||||
encryptedPrivateKey: ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
}).save();
|
||||
|
||||
return bot;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return decrypted secrets for workspace with id [workspaceId]
|
||||
@ -148,125 +146,105 @@ const createBot = async ({
|
||||
* @param {String} obj.environment - environment
|
||||
*/
|
||||
const getSecretsHelper = async ({
|
||||
workspaceId,
|
||||
environment
|
||||
workspaceId,
|
||||
environment,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
environment: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
environment: string;
|
||||
}) => {
|
||||
const content = {} as any;
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const secrets = await Secret.find({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
type: SECRET_SHARED
|
||||
});
|
||||
|
||||
secrets.forEach((secret: ISecret) => {
|
||||
const secretKey = decryptSymmetric({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key
|
||||
});
|
||||
const content = {} as any;
|
||||
const key = await getKey({ workspaceId: workspaceId.toString() });
|
||||
const secrets = await Secret.find({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
type: SECRET_SHARED,
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key
|
||||
});
|
||||
secrets.forEach((secret: ISecret) => {
|
||||
const secretKey = decryptSymmetric({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key,
|
||||
});
|
||||
|
||||
content[secretKey] = secretValue;
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get secrets');
|
||||
}
|
||||
const secretValue = decryptSymmetric({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key,
|
||||
});
|
||||
|
||||
return content;
|
||||
}
|
||||
content[secretKey] = secretValue;
|
||||
});
|
||||
|
||||
return content;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return bot's copy of the workspace key for workspace
|
||||
* Return bot's copy of the workspace key for workspace
|
||||
* with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @returns {String} key - decrypted workspace key
|
||||
*/
|
||||
const getKey = async ({ workspaceId }: { workspaceId: Types.ObjectId }) => {
|
||||
let key;
|
||||
try {
|
||||
const botKey = await BotKey.findOne({
|
||||
workspace: workspaceId
|
||||
}).populate<{ sender: IUser }>('sender', 'publicKey');
|
||||
|
||||
if (!botKey) throw new Error('Failed to find bot key');
|
||||
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId
|
||||
}).select('+encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!bot) throw new Error('Failed to find bot');
|
||||
if (!bot.isActive) throw new Error('Bot is not active');
|
||||
|
||||
const privateKeyBot = decryptSymmetric({
|
||||
ciphertext: bot.encryptedPrivateKey,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
key: getEncryptionKey()
|
||||
});
|
||||
|
||||
key = decryptAsymmetric({
|
||||
ciphertext: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
publicKey: botKey.sender.publicKey as string,
|
||||
privateKey: privateKeyBot
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get workspace key');
|
||||
}
|
||||
|
||||
return key;
|
||||
}
|
||||
const getKey = async ({ workspaceId }: { workspaceId: string }) => {
|
||||
const botKey = await BotKey.findOne({
|
||||
workspace: workspaceId,
|
||||
}).populate<{ sender: IUser }>("sender", "publicKey");
|
||||
|
||||
if (!botKey) throw new Error("Failed to find bot key");
|
||||
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
}).select("+encryptedPrivateKey +iv +tag");
|
||||
|
||||
if (!bot) throw new Error("Failed to find bot");
|
||||
if (!bot.isActive) throw new Error("Bot is not active");
|
||||
|
||||
const privateKeyBot = decryptSymmetric({
|
||||
ciphertext: bot.encryptedPrivateKey,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
key: await getEncryptionKey(),
|
||||
});
|
||||
|
||||
const key = decryptAsymmetric({
|
||||
ciphertext: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
publicKey: botKey.sender.publicKey as string,
|
||||
privateKey: privateKeyBot,
|
||||
});
|
||||
|
||||
return key;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return symmetrically encrypted [plaintext] using the
|
||||
* key for workspace with id [workspaceId]
|
||||
* key for workspace with id [workspaceId]
|
||||
* @param {Object} obj1
|
||||
* @param {String} obj1.workspaceId - id of workspace
|
||||
* @param {String} obj1.plaintext - plaintext to encrypt
|
||||
*/
|
||||
const encryptSymmetricHelper = async ({
|
||||
workspaceId,
|
||||
plaintext
|
||||
workspaceId,
|
||||
plaintext,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
plaintext: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
plaintext: string;
|
||||
}) => {
|
||||
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext,
|
||||
key
|
||||
});
|
||||
|
||||
return ({
|
||||
ciphertext,
|
||||
iv,
|
||||
tag
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric encryption with bot');
|
||||
}
|
||||
}
|
||||
const key = await getKey({ workspaceId: workspaceId.toString() });
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext,
|
||||
key,
|
||||
});
|
||||
|
||||
return {
|
||||
ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Return symmetrically decrypted [ciphertext] using the
|
||||
* key for workspace with id [workspaceId]
|
||||
@ -277,40 +255,31 @@ const encryptSymmetricHelper = async ({
|
||||
* @param {String} obj.tag - tag
|
||||
*/
|
||||
const decryptSymmetricHelper = async ({
|
||||
workspaceId,
|
||||
workspaceId,
|
||||
ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
ciphertext: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}) => {
|
||||
const key = await getKey({ workspaceId: workspaceId.toString() });
|
||||
const plaintext = decryptSymmetric({
|
||||
ciphertext,
|
||||
iv,
|
||||
tag
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
ciphertext: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}) => {
|
||||
let plaintext;
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const plaintext = decryptSymmetric({
|
||||
ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
key
|
||||
});
|
||||
|
||||
return plaintext;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric decryption with bot');
|
||||
}
|
||||
|
||||
return plaintext;
|
||||
}
|
||||
tag,
|
||||
key,
|
||||
});
|
||||
|
||||
return plaintext;
|
||||
};
|
||||
|
||||
export {
|
||||
validateClientForBot,
|
||||
createBot,
|
||||
getSecretsHelper,
|
||||
encryptSymmetricHelper,
|
||||
decryptSymmetricHelper
|
||||
}
|
||||
validateClientForBot,
|
||||
createBot,
|
||||
getSecretsHelper,
|
||||
encryptSymmetricHelper,
|
||||
decryptSymmetricHelper,
|
||||
};
|
||||
|
@ -20,12 +20,12 @@ const initDatabaseHelper = async ({
|
||||
// allow empty strings to pass the required validator
|
||||
mongoose.Schema.Types.String.checkRequired(v => typeof v === 'string');
|
||||
|
||||
getLogger("database").info("Database connection established");
|
||||
(await getLogger("database")).info("Database connection established");
|
||||
|
||||
await EESecretService.initSecretVersioning();
|
||||
await SecretService.initSecretBlindIndexDataHelper();
|
||||
} catch (err) {
|
||||
getLogger("database").error(`Unable to establish Database connection due to the error.\n${err}`);
|
||||
(await getLogger("database")).error(`Unable to establish Database connection due to the error.\n${err}`);
|
||||
}
|
||||
|
||||
return mongoose.connection;
|
||||
|
@ -1,14 +1,13 @@
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Bot, IBot } from '../models';
|
||||
import { EVENT_PUSH_SECRETS } from '../variables';
|
||||
import { IntegrationService } from '../services';
|
||||
import { Types } from "mongoose";
|
||||
import { Bot, IBot } from "../models";
|
||||
import { EVENT_PUSH_SECRETS } from "../variables";
|
||||
import { IntegrationService } from "../services";
|
||||
|
||||
interface Event {
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
payload: any;
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
payload: any;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -19,39 +18,25 @@ interface Event {
|
||||
* @param {String} obj.event.workspaceId - id of workspace that event is part of
|
||||
* @param {Object} obj.event.payload - payload of event (depends on event)
|
||||
*/
|
||||
const handleEventHelper = async ({
|
||||
event
|
||||
}: {
|
||||
event: Event;
|
||||
}) => {
|
||||
const {
|
||||
workspaceId,
|
||||
environment
|
||||
} = event;
|
||||
|
||||
// TODO: moduralize bot check into separate function
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (!bot) return;
|
||||
|
||||
try {
|
||||
switch (event.name) {
|
||||
case EVENT_PUSH_SECRETS:
|
||||
IntegrationService.syncIntegrations({
|
||||
workspaceId,
|
||||
environment
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
}
|
||||
}
|
||||
const handleEventHelper = async ({ event }: { event: Event }) => {
|
||||
const { workspaceId, environment } = event;
|
||||
|
||||
export {
|
||||
handleEventHelper
|
||||
}
|
||||
// TODO: moduralize bot check into separate function
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
isActive: true,
|
||||
});
|
||||
|
||||
if (!bot) return;
|
||||
|
||||
switch (event.name) {
|
||||
case EVENT_PUSH_SECRETS:
|
||||
IntegrationService.syncIntegrations({
|
||||
workspaceId,
|
||||
environment,
|
||||
});
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
export { handleEventHelper };
|
||||
|
@ -256,7 +256,7 @@ const syncIntegrationsHelper = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessId: access.accessId,
|
||||
accessId: access.accessId === undefined ? null : access.accessId,
|
||||
accessToken: access.accessToken
|
||||
});
|
||||
}
|
||||
@ -482,4 +482,4 @@ export {
|
||||
getIntegrationAuthAccessHelper,
|
||||
setIntegrationAuthRefreshHelper,
|
||||
setIntegrationAuthAccessHelper
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Key, IKey } from '../models';
|
||||
|
||||
interface Key {
|
||||
@ -27,36 +26,30 @@ const pushKeys = async ({
|
||||
workspaceId: string;
|
||||
keys: Key[];
|
||||
}): Promise<void> => {
|
||||
try {
|
||||
// filter out already-inserted keys
|
||||
const keysSet = new Set(
|
||||
(
|
||||
await Key.find(
|
||||
{
|
||||
workspace: workspaceId
|
||||
},
|
||||
'receiver'
|
||||
)
|
||||
).map((k: IKey) => k.receiver.toString())
|
||||
);
|
||||
// filter out already-inserted keys
|
||||
const keysSet = new Set(
|
||||
(
|
||||
await Key.find(
|
||||
{
|
||||
workspace: workspaceId
|
||||
},
|
||||
'receiver'
|
||||
)
|
||||
).map((k: IKey) => k.receiver.toString())
|
||||
);
|
||||
|
||||
keys = keys.filter((key) => !keysSet.has(key.userId));
|
||||
keys = keys.filter((key) => !keysSet.has(key.userId));
|
||||
|
||||
// add new shared keys only
|
||||
await Key.insertMany(
|
||||
keys.map((k) => ({
|
||||
encryptedKey: k.encryptedKey,
|
||||
nonce: k.nonce,
|
||||
sender: userId,
|
||||
receiver: k.userId,
|
||||
workspace: workspaceId
|
||||
}))
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to push access keys');
|
||||
}
|
||||
// add new shared keys only
|
||||
await Key.insertMany(
|
||||
keys.map((k) => ({
|
||||
encryptedKey: k.encryptedKey,
|
||||
nonce: k.nonce,
|
||||
sender: userId,
|
||||
receiver: k.userId,
|
||||
workspace: workspaceId
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
export { pushKeys };
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
MembershipOrg,
|
||||
@ -144,15 +143,7 @@ const validateMembershipOrg = async ({
|
||||
* @return {Object} membershipOrg - membership
|
||||
*/
|
||||
const findMembershipOrg = (queryObj: any) => {
|
||||
let membershipOrg;
|
||||
try {
|
||||
membershipOrg = MembershipOrg.findOne(queryObj);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to find organization membership');
|
||||
}
|
||||
|
||||
const membershipOrg = MembershipOrg.findOne(queryObj);
|
||||
return membershipOrg;
|
||||
};
|
||||
|
||||
@ -175,33 +166,27 @@ const addMembershipsOrg = async ({
|
||||
roles: string[];
|
||||
statuses: string[];
|
||||
}) => {
|
||||
try {
|
||||
const operations = userIds.map((userId, idx) => {
|
||||
return {
|
||||
updateOne: {
|
||||
filter: {
|
||||
user: userId,
|
||||
organization: organizationId,
|
||||
role: roles[idx],
|
||||
status: statuses[idx]
|
||||
},
|
||||
update: {
|
||||
user: userId,
|
||||
organization: organizationId,
|
||||
role: roles[idx],
|
||||
status: statuses[idx]
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
};
|
||||
});
|
||||
const operations = userIds.map((userId, idx) => {
|
||||
return {
|
||||
updateOne: {
|
||||
filter: {
|
||||
user: userId,
|
||||
organization: organizationId,
|
||||
role: roles[idx],
|
||||
status: statuses[idx]
|
||||
},
|
||||
update: {
|
||||
user: userId,
|
||||
organization: organizationId,
|
||||
role: roles[idx],
|
||||
status: statuses[idx]
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
await MembershipOrg.bulkWrite(operations as any);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to add users to organization');
|
||||
}
|
||||
await MembershipOrg.bulkWrite(operations as any);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -214,43 +199,36 @@ const deleteMembershipOrg = async ({
|
||||
}: {
|
||||
membershipOrgId: string;
|
||||
}) => {
|
||||
let deletedMembershipOrg;
|
||||
try {
|
||||
deletedMembershipOrg = await MembershipOrg.findOneAndDelete({
|
||||
_id: membershipOrgId
|
||||
});
|
||||
const deletedMembershipOrg = await MembershipOrg.findOneAndDelete({
|
||||
_id: membershipOrgId
|
||||
});
|
||||
|
||||
if (!deletedMembershipOrg) throw new Error('Failed to delete organization membership');
|
||||
if (!deletedMembershipOrg) throw new Error('Failed to delete organization membership');
|
||||
|
||||
// delete keys associated with organization membership
|
||||
if (deletedMembershipOrg?.user) {
|
||||
// case: organization membership had a registered user
|
||||
// delete keys associated with organization membership
|
||||
if (deletedMembershipOrg?.user) {
|
||||
// case: organization membership had a registered user
|
||||
|
||||
const workspaces = (
|
||||
await Workspace.find({
|
||||
organization: deletedMembershipOrg.organization
|
||||
})
|
||||
).map((w) => w._id.toString());
|
||||
const workspaces = (
|
||||
await Workspace.find({
|
||||
organization: deletedMembershipOrg.organization
|
||||
})
|
||||
).map((w) => w._id.toString());
|
||||
|
||||
await Membership.deleteMany({
|
||||
user: deletedMembershipOrg.user,
|
||||
workspace: {
|
||||
$in: workspaces
|
||||
}
|
||||
});
|
||||
await Membership.deleteMany({
|
||||
user: deletedMembershipOrg.user,
|
||||
workspace: {
|
||||
$in: workspaces
|
||||
}
|
||||
});
|
||||
|
||||
await Key.deleteMany({
|
||||
receiver: deletedMembershipOrg.user,
|
||||
workspace: {
|
||||
$in: workspaces
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to delete organization membership');
|
||||
}
|
||||
await Key.deleteMany({
|
||||
receiver: deletedMembershipOrg.user,
|
||||
workspace: {
|
||||
$in: workspaces
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return deletedMembershipOrg;
|
||||
};
|
||||
|
@ -25,7 +25,7 @@ const sendMail = async ({
|
||||
recipients: string[];
|
||||
substitutions: any;
|
||||
}) => {
|
||||
if (getSmtpConfigured()) {
|
||||
if (await getSmtpConfigured()) {
|
||||
try {
|
||||
const html = fs.readFileSync(
|
||||
path.resolve(__dirname, '../templates/' + template),
|
||||
@ -35,7 +35,7 @@ const sendMail = async ({
|
||||
const htmlToSend = temp(substitutions);
|
||||
|
||||
await smtpTransporter.sendMail({
|
||||
from: `"${getSmtpFromName()}" <${getSmtpFromAddress()}>`,
|
||||
from: `"${await getSmtpFromName()}" <${await getSmtpFromAddress()}>`,
|
||||
to: recipients.join(', '),
|
||||
subject: subjectLine,
|
||||
html: htmlToSend
|
||||
|
@ -1,39 +1,34 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import Stripe from 'stripe';
|
||||
import { Types } from 'mongoose';
|
||||
import Stripe from "stripe";
|
||||
import { Types } from "mongoose";
|
||||
import {
|
||||
IUser,
|
||||
User,
|
||||
IServiceAccount,
|
||||
ServiceAccount,
|
||||
IServiceTokenData,
|
||||
ServiceTokenData
|
||||
} from '../models';
|
||||
import { Organization, MembershipOrg } from '../models';
|
||||
import {
|
||||
ACCEPTED,
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY,
|
||||
OWNER
|
||||
} from '../variables';
|
||||
import {
|
||||
getStripeSecretKey,
|
||||
getStripeProductPro,
|
||||
getStripeProductTeam,
|
||||
getStripeProductStarter
|
||||
} from '../config';
|
||||
IUser,
|
||||
User,
|
||||
IServiceAccount,
|
||||
ServiceAccount,
|
||||
IServiceTokenData,
|
||||
ServiceTokenData,
|
||||
} from "../models";
|
||||
import { Organization, MembershipOrg } from "../models";
|
||||
import {
|
||||
UnauthorizedRequestError,
|
||||
OrganizationNotFoundError
|
||||
} from '../utils/errors';
|
||||
ACCEPTED,
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY,
|
||||
OWNER,
|
||||
} from "../variables";
|
||||
import {
|
||||
validateUserClientForOrganization
|
||||
} from '../helpers/user';
|
||||
getStripeSecretKey,
|
||||
getStripeProductPro,
|
||||
getStripeProductTeam,
|
||||
getStripeProductStarter,
|
||||
} from "../config";
|
||||
import {
|
||||
validateServiceAccountClientForOrganization
|
||||
} from '../helpers/serviceAccount';
|
||||
UnauthorizedRequestError,
|
||||
OrganizationNotFoundError,
|
||||
} from "../utils/errors";
|
||||
import { validateUserClientForOrganization } from "../helpers/user";
|
||||
import { validateServiceAccountClientForOrganization } from "../helpers/serviceAccount";
|
||||
|
||||
/**
|
||||
* Validate accepted clients for organization with id [organizationId]
|
||||
@ -42,69 +37,80 @@ import {
|
||||
* @param {Types.ObjectId} obj.organizationId - id of organization to validate against
|
||||
*/
|
||||
const validateClientForOrganization = async ({
|
||||
authData,
|
||||
organizationId,
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
authData,
|
||||
organizationId,
|
||||
acceptedRoles,
|
||||
acceptedStatuses,
|
||||
}: {
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
},
|
||||
organizationId: Types.ObjectId;
|
||||
acceptedRoles: Array<'owner' | 'admin' | 'member'>;
|
||||
acceptedStatuses: Array<'invited' | 'accepted'>;
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
};
|
||||
organizationId: Types.ObjectId;
|
||||
acceptedRoles: Array<"owner" | "admin" | "member">;
|
||||
acceptedStatuses: Array<"invited" | "accepted">;
|
||||
}) => {
|
||||
|
||||
const organization = await Organization.findById(organizationId);
|
||||
|
||||
if (!organization) {
|
||||
throw OrganizationNotFoundError({
|
||||
message: 'Failed to find organization'
|
||||
});
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_JWT && authData.authPayload instanceof User) {
|
||||
const membershipOrg = await validateUserClientForOrganization({
|
||||
user: authData.authPayload,
|
||||
organization,
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
});
|
||||
|
||||
return ({ organization, membershipOrg });
|
||||
}
|
||||
const organization = await Organization.findById(organizationId);
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_ACCOUNT && authData.authPayload instanceof ServiceAccount) {
|
||||
await validateServiceAccountClientForOrganization({
|
||||
serviceAccount: authData.authPayload,
|
||||
organization
|
||||
});
|
||||
|
||||
return ({ organization });
|
||||
}
|
||||
if (!organization) {
|
||||
throw OrganizationNotFoundError({
|
||||
message: "Failed to find organization",
|
||||
});
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_TOKEN && authData.authPayload instanceof ServiceTokenData) {
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'Failed service token authorization for organization'
|
||||
});
|
||||
}
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_JWT &&
|
||||
authData.authPayload instanceof User
|
||||
) {
|
||||
const membershipOrg = await validateUserClientForOrganization({
|
||||
user: authData.authPayload,
|
||||
organization,
|
||||
acceptedRoles,
|
||||
acceptedStatuses,
|
||||
});
|
||||
|
||||
if (authData.authMode === AUTH_MODE_API_KEY && authData.authPayload instanceof User) {
|
||||
const membershipOrg = await validateUserClientForOrganization({
|
||||
user: authData.authPayload,
|
||||
organization,
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
});
|
||||
|
||||
return ({ organization, membershipOrg });
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'Failed client authorization for organization'
|
||||
});
|
||||
}
|
||||
return { organization, membershipOrg };
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_SERVICE_ACCOUNT &&
|
||||
authData.authPayload instanceof ServiceAccount
|
||||
) {
|
||||
await validateServiceAccountClientForOrganization({
|
||||
serviceAccount: authData.authPayload,
|
||||
organization,
|
||||
});
|
||||
|
||||
return { organization };
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_SERVICE_TOKEN &&
|
||||
authData.authPayload instanceof ServiceTokenData
|
||||
) {
|
||||
throw UnauthorizedRequestError({
|
||||
message: "Failed service token authorization for organization",
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
authData.authMode === AUTH_MODE_API_KEY &&
|
||||
authData.authPayload instanceof User
|
||||
) {
|
||||
const membershipOrg = await validateUserClientForOrganization({
|
||||
user: authData.authPayload,
|
||||
organization,
|
||||
acceptedRoles,
|
||||
acceptedStatuses,
|
||||
});
|
||||
|
||||
return { organization, membershipOrg };
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: "Failed client authorization for organization",
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create an organization with name [name]
|
||||
@ -114,43 +120,37 @@ const validateClientForOrganization = async ({
|
||||
* @param {Object} organization - new organization
|
||||
*/
|
||||
const createOrganization = async ({
|
||||
name,
|
||||
email
|
||||
name,
|
||||
email,
|
||||
}: {
|
||||
name: string;
|
||||
email: string;
|
||||
name: string;
|
||||
email: string;
|
||||
}) => {
|
||||
let organization;
|
||||
try {
|
||||
// register stripe account
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
let organization;
|
||||
// register stripe account
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: "2022-08-01",
|
||||
});
|
||||
|
||||
if (getStripeSecretKey()) {
|
||||
const customer = await stripe.customers.create({
|
||||
email,
|
||||
description: name
|
||||
});
|
||||
if (await getStripeSecretKey()) {
|
||||
const customer = await stripe.customers.create({
|
||||
email,
|
||||
description: name,
|
||||
});
|
||||
|
||||
organization = await new Organization({
|
||||
name,
|
||||
customerId: customer.id
|
||||
}).save();
|
||||
} else {
|
||||
organization = await new Organization({
|
||||
name
|
||||
}).save();
|
||||
}
|
||||
organization = await new Organization({
|
||||
name,
|
||||
customerId: customer.id,
|
||||
}).save();
|
||||
} else {
|
||||
organization = await new Organization({
|
||||
name,
|
||||
}).save();
|
||||
}
|
||||
|
||||
await initSubscriptionOrg({ organizationId: organization._id });
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email });
|
||||
Sentry.captureException(err);
|
||||
throw new Error(`Failed to create organization [err=${err}]`);
|
||||
}
|
||||
await initSubscriptionOrg({ organizationId: organization._id });
|
||||
|
||||
return organization;
|
||||
return organization;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -162,57 +162,52 @@ const createOrganization = async ({
|
||||
* @return {Subscription} obj.subscription - new subscription
|
||||
*/
|
||||
const initSubscriptionOrg = async ({
|
||||
organizationId
|
||||
organizationId,
|
||||
}: {
|
||||
organizationId: Types.ObjectId;
|
||||
organizationId: Types.ObjectId;
|
||||
}) => {
|
||||
let stripeSubscription;
|
||||
let subscription;
|
||||
try {
|
||||
// find organization
|
||||
const organization = await Organization.findOne({
|
||||
_id: organizationId
|
||||
});
|
||||
let stripeSubscription;
|
||||
let subscription;
|
||||
|
||||
if (organization) {
|
||||
if (organization.customerId) {
|
||||
// initialize starter subscription with quantity of 0
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
// find organization
|
||||
const organization = await Organization.findOne({
|
||||
_id: organizationId,
|
||||
});
|
||||
|
||||
const productToPriceMap = {
|
||||
starter: getStripeProductStarter(),
|
||||
team: getStripeProductTeam(),
|
||||
pro: getStripeProductPro()
|
||||
};
|
||||
if (organization) {
|
||||
if (organization.customerId) {
|
||||
// initialize starter subscription with quantity of 0
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: "2022-08-01",
|
||||
});
|
||||
|
||||
stripeSubscription = await stripe.subscriptions.create({
|
||||
customer: organization.customerId,
|
||||
items: [
|
||||
{
|
||||
price: productToPriceMap['starter'],
|
||||
quantity: 1
|
||||
}
|
||||
],
|
||||
payment_behavior: 'default_incomplete',
|
||||
proration_behavior: 'none',
|
||||
expand: ['latest_invoice.payment_intent']
|
||||
});
|
||||
}
|
||||
} else {
|
||||
throw new Error('Failed to initialize free organization subscription');
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to initialize free organization subscription');
|
||||
}
|
||||
const productToPriceMap = {
|
||||
starter: await getStripeProductStarter(),
|
||||
team: await getStripeProductTeam(),
|
||||
pro: await getStripeProductPro(),
|
||||
};
|
||||
|
||||
return {
|
||||
stripeSubscription,
|
||||
subscription
|
||||
};
|
||||
stripeSubscription = await stripe.subscriptions.create({
|
||||
customer: organization.customerId,
|
||||
items: [
|
||||
{
|
||||
price: productToPriceMap["starter"],
|
||||
quantity: 1,
|
||||
},
|
||||
],
|
||||
payment_behavior: "default_incomplete",
|
||||
proration_behavior: "none",
|
||||
expand: ["latest_invoice.payment_intent"],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
throw new Error("Failed to initialize free organization subscription");
|
||||
}
|
||||
|
||||
return {
|
||||
stripeSubscription,
|
||||
subscription,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
@ -222,54 +217,49 @@ const initSubscriptionOrg = async ({
|
||||
* @param {Number} obj.organizationId - id of subscription's organization
|
||||
*/
|
||||
const updateSubscriptionOrgQuantity = async ({
|
||||
organizationId
|
||||
organizationId,
|
||||
}: {
|
||||
organizationId: string;
|
||||
organizationId: string;
|
||||
}) => {
|
||||
let stripeSubscription;
|
||||
try {
|
||||
// find organization
|
||||
const organization = await Organization.findOne({
|
||||
_id: organizationId
|
||||
});
|
||||
let stripeSubscription;
|
||||
// find organization
|
||||
const organization = await Organization.findOne({
|
||||
_id: organizationId,
|
||||
});
|
||||
|
||||
if (organization && organization.customerId) {
|
||||
const quantity = await MembershipOrg.countDocuments({
|
||||
organization: organizationId,
|
||||
status: ACCEPTED
|
||||
});
|
||||
if (organization && organization.customerId) {
|
||||
const quantity = await MembershipOrg.countDocuments({
|
||||
organization: organizationId,
|
||||
status: ACCEPTED,
|
||||
});
|
||||
|
||||
const stripe = new Stripe(getStripeSecretKey(), {
|
||||
apiVersion: '2022-08-01'
|
||||
});
|
||||
const stripe = new Stripe(await getStripeSecretKey(), {
|
||||
apiVersion: "2022-08-01",
|
||||
});
|
||||
|
||||
const subscription = (
|
||||
await stripe.subscriptions.list({
|
||||
customer: organization.customerId
|
||||
})
|
||||
).data[0];
|
||||
const subscription = (
|
||||
await stripe.subscriptions.list({
|
||||
customer: organization.customerId,
|
||||
})
|
||||
).data[0];
|
||||
|
||||
stripeSubscription = await stripe.subscriptions.update(subscription.id, {
|
||||
items: [
|
||||
{
|
||||
id: subscription.items.data[0].id,
|
||||
price: subscription.items.data[0].price.id,
|
||||
quantity
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
}
|
||||
stripeSubscription = await stripe.subscriptions.update(subscription.id, {
|
||||
items: [
|
||||
{
|
||||
id: subscription.items.data[0].id,
|
||||
price: subscription.items.data[0].price.id,
|
||||
quantity,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
return stripeSubscription;
|
||||
return stripeSubscription;
|
||||
};
|
||||
|
||||
export {
|
||||
validateClientForOrganization,
|
||||
createOrganization,
|
||||
initSubscriptionOrg,
|
||||
updateSubscriptionOrgQuantity
|
||||
};
|
||||
validateClientForOrganization,
|
||||
createOrganization,
|
||||
initSubscriptionOrg,
|
||||
updateSubscriptionOrgQuantity,
|
||||
};
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -233,27 +233,29 @@ const initSecretBlindIndexDataHelper = async () => {
|
||||
}
|
||||
});
|
||||
|
||||
const secretBlindIndexDataToInsert = workspaceIdsToBlindIndex.map((workspaceToBlindIndex) => {
|
||||
const salt = crypto.randomBytes(16).toString('base64');
|
||||
const secretBlindIndexDataToInsert = await Promise.all(
|
||||
workspaceIdsToBlindIndex.map(async (workspaceToBlindIndex) => {
|
||||
const salt = crypto.randomBytes(16).toString('base64');
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSaltCiphertext,
|
||||
iv: saltIV,
|
||||
tag: saltTag
|
||||
} = encryptSymmetric({
|
||||
plaintext: salt,
|
||||
key: getEncryptionKey()
|
||||
});
|
||||
const {
|
||||
ciphertext: encryptedSaltCiphertext,
|
||||
iv: saltIV,
|
||||
tag: saltTag
|
||||
} = encryptSymmetric({
|
||||
plaintext: salt,
|
||||
key: await getEncryptionKey()
|
||||
});
|
||||
|
||||
const secretBlindIndexData = new SecretBlindIndexData({
|
||||
workspace: workspaceToBlindIndex,
|
||||
encryptedSaltCiphertext,
|
||||
saltIV,
|
||||
saltTag
|
||||
const secretBlindIndexData = new SecretBlindIndexData({
|
||||
workspace: workspaceToBlindIndex,
|
||||
encryptedSaltCiphertext,
|
||||
saltIV,
|
||||
saltTag
|
||||
})
|
||||
|
||||
return secretBlindIndexData;
|
||||
})
|
||||
|
||||
return secretBlindIndexData;
|
||||
});
|
||||
);
|
||||
|
||||
if (secretBlindIndexDataToInsert.length > 0) {
|
||||
await SecretBlindIndexData.insertMany(secretBlindIndexDataToInsert);
|
||||
@ -280,7 +282,7 @@ const createSecretBlindIndexDataHelper = async ({
|
||||
tag: saltTag
|
||||
} = encryptSymmetric({
|
||||
plaintext: salt,
|
||||
key: getEncryptionKey()
|
||||
key: await getEncryptionKey()
|
||||
});
|
||||
|
||||
const secretBlindIndexData = await new SecretBlindIndexData({
|
||||
@ -316,7 +318,7 @@ const getSecretBlindIndexSaltHelper = async ({
|
||||
ciphertext: secretBlindIndexData.encryptedSaltCiphertext,
|
||||
iv: secretBlindIndexData.saltIV,
|
||||
tag: secretBlindIndexData.saltTag,
|
||||
key: getEncryptionKey()
|
||||
key: await getEncryptionKey()
|
||||
});
|
||||
|
||||
return salt;
|
||||
@ -378,7 +380,7 @@ const generateSecretBlindIndexHelper = async ({
|
||||
ciphertext: secretBlindIndexData.encryptedSaltCiphertext,
|
||||
iv: secretBlindIndexData.saltIV,
|
||||
tag: secretBlindIndexData.saltTag,
|
||||
key: getEncryptionKey()
|
||||
key: await getEncryptionKey()
|
||||
});
|
||||
|
||||
const secretBlindIndex = await generateSecretBlindIndexWithSaltHelper({
|
||||
@ -508,7 +510,7 @@ const createSecretHelper = async ({
|
||||
workspaceId
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
@ -578,7 +580,7 @@ const getSecretsHelper = async ({
|
||||
ipAddress: authData.authIP
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
@ -660,7 +662,7 @@ const getSecretHelper = async ({
|
||||
ipAddress: authData.authIP
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
@ -798,7 +800,7 @@ const updateSecretHelper = async ({
|
||||
workspaceId
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
@ -905,7 +907,7 @@ const deleteSecretHelper = async ({
|
||||
workspaceId
|
||||
});
|
||||
|
||||
const postHogClient = TelemetryService.getPostHogClient();
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
|
@ -1,16 +1,15 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { TokenData } from '../models';
|
||||
import crypto from 'crypto';
|
||||
import bcrypt from 'bcrypt';
|
||||
import { Types } from "mongoose";
|
||||
import { TokenData } from "../models";
|
||||
import crypto from "crypto";
|
||||
import bcrypt from "bcrypt";
|
||||
import {
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
} from '../variables';
|
||||
import { UnauthorizedRequestError } from '../utils/errors';
|
||||
import { getSaltRounds } from '../config';
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET,
|
||||
} from "../variables";
|
||||
import { UnauthorizedRequestError } from "../utils/errors";
|
||||
import { getSaltRounds } from "../config";
|
||||
|
||||
/**
|
||||
* Create and store a token in the database for purpose [type]
|
||||
@ -22,194 +21,197 @@ import { getSaltRounds } from '../config';
|
||||
* @returns {String} token - the created token
|
||||
*/
|
||||
const createTokenHelper = async ({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
type:
|
||||
| "emailConfirmation"
|
||||
| "emailMfa"
|
||||
| "organizationInvitation"
|
||||
| "passwordReset";
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
}) => {
|
||||
let token, expiresAt, triesLeft;
|
||||
// generate random token based on specified token use-case
|
||||
// type [type]
|
||||
switch (type) {
|
||||
case TOKEN_EMAIL_CONFIRMATION:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
expiresAt = new Date(new Date().getTime() + 86400000);
|
||||
break;
|
||||
case TOKEN_EMAIL_MFA:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
triesLeft = 5;
|
||||
expiresAt = new Date(new Date().getTime() + 300000);
|
||||
break;
|
||||
case TOKEN_EMAIL_ORG_INVITATION:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString("hex");
|
||||
expiresAt = new Date(new Date().getTime() + 259200000);
|
||||
break;
|
||||
case TOKEN_EMAIL_PASSWORD_RESET:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString("hex");
|
||||
expiresAt = new Date(new Date().getTime() + 86400000);
|
||||
break;
|
||||
default:
|
||||
token = crypto.randomBytes(16).toString("hex");
|
||||
expiresAt = new Date();
|
||||
break;
|
||||
}
|
||||
|
||||
interface TokenDataQuery {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId
|
||||
}) => {
|
||||
let token, expiresAt, triesLeft;
|
||||
try {
|
||||
// generate random token based on specified token use-case
|
||||
// type [type]
|
||||
switch (type) {
|
||||
case TOKEN_EMAIL_CONFIRMATION:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
expiresAt = new Date((new Date()).getTime() + 86400000);
|
||||
break;
|
||||
case TOKEN_EMAIL_MFA:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
triesLeft = 5;
|
||||
expiresAt = new Date((new Date()).getTime() + 300000);
|
||||
break;
|
||||
case TOKEN_EMAIL_ORG_INVITATION:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date((new Date()).getTime() + 259200000);
|
||||
break;
|
||||
case TOKEN_EMAIL_PASSWORD_RESET:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date((new Date()).getTime() + 86400000);
|
||||
break;
|
||||
default:
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date();
|
||||
break;
|
||||
}
|
||||
|
||||
interface TokenDataQuery {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
interface TokenDataUpdate {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
tokenHash: string;
|
||||
triesLeft?: number;
|
||||
expiresAt: Date;
|
||||
}
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
const query: TokenDataQuery = { type };
|
||||
const update: TokenDataUpdate = {
|
||||
type,
|
||||
tokenHash: await bcrypt.hash(token, getSaltRounds()),
|
||||
expiresAt
|
||||
}
|
||||
interface TokenDataUpdate {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
tokenHash: string;
|
||||
triesLeft?: number;
|
||||
expiresAt: Date;
|
||||
}
|
||||
|
||||
if (email) {
|
||||
query.email = email;
|
||||
update.email = email;
|
||||
}
|
||||
if (phoneNumber) {
|
||||
query.phoneNumber = phoneNumber;
|
||||
update.phoneNumber = phoneNumber;
|
||||
}
|
||||
if (organizationId) {
|
||||
query.organization = organizationId
|
||||
update.organization = organizationId
|
||||
}
|
||||
|
||||
if (triesLeft) {
|
||||
update.triesLeft = triesLeft;
|
||||
}
|
||||
|
||||
await TokenData.findOneAndUpdate(
|
||||
query,
|
||||
update,
|
||||
{
|
||||
new: true,
|
||||
upsert: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(
|
||||
"Failed to create token"
|
||||
);
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
const query: TokenDataQuery = { type };
|
||||
const update: TokenDataUpdate = {
|
||||
type,
|
||||
tokenHash: await bcrypt.hash(token, await getSaltRounds()),
|
||||
expiresAt,
|
||||
};
|
||||
|
||||
if (email) {
|
||||
query.email = email;
|
||||
update.email = email;
|
||||
}
|
||||
if (phoneNumber) {
|
||||
query.phoneNumber = phoneNumber;
|
||||
update.phoneNumber = phoneNumber;
|
||||
}
|
||||
if (organizationId) {
|
||||
query.organization = organizationId;
|
||||
update.organization = organizationId;
|
||||
}
|
||||
|
||||
if (triesLeft) {
|
||||
update.triesLeft = triesLeft;
|
||||
}
|
||||
|
||||
await TokenData.findOneAndUpdate(query, update, {
|
||||
new: true,
|
||||
upsert: true,
|
||||
});
|
||||
|
||||
return token;
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.email - email associated with the token
|
||||
* @param {String} obj.token - value of the token
|
||||
*/
|
||||
const validateTokenHelper = async ({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token,
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
type:
|
||||
| "emailConfirmation"
|
||||
| "emailMfa"
|
||||
| "organizationInvitation"
|
||||
| "passwordReset";
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
token: string;
|
||||
}) => {
|
||||
interface Query {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
token: string;
|
||||
}) => {
|
||||
interface Query {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
const query: Query = { type };
|
||||
const query: Query = { type };
|
||||
|
||||
if (email) { query.email = email; }
|
||||
if (phoneNumber) { query.phoneNumber = phoneNumber; }
|
||||
if (organizationId) { query.organization = organizationId; }
|
||||
if (email) {
|
||||
query.email = email;
|
||||
}
|
||||
if (phoneNumber) {
|
||||
query.phoneNumber = phoneNumber;
|
||||
}
|
||||
if (organizationId) {
|
||||
query.organization = organizationId;
|
||||
}
|
||||
|
||||
const tokenData = await TokenData.findOne(query).select('+tokenHash');
|
||||
|
||||
if (!tokenData) throw new Error('Failed to find token to validate');
|
||||
|
||||
if (tokenData.expiresAt < new Date()) {
|
||||
// case: token expired
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA session expired. Please log in again',
|
||||
context: {
|
||||
code: 'mfa_expired'
|
||||
}
|
||||
});
|
||||
}
|
||||
const tokenData = await TokenData.findOne(query).select("+tokenHash");
|
||||
|
||||
const isValid = await bcrypt.compare(token, tokenData.tokenHash);
|
||||
if (!isValid) {
|
||||
// case: token is not valid
|
||||
if (tokenData?.triesLeft !== undefined) {
|
||||
// case: token has a try-limit
|
||||
if (tokenData.triesLeft === 1) {
|
||||
// case: token is out of tries
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
} else {
|
||||
// case: token has more than 1 try left
|
||||
await TokenData.findByIdAndUpdate(tokenData._id, {
|
||||
triesLeft: tokenData.triesLeft - 1
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
}
|
||||
if (!tokenData) throw new Error("Failed to find token to validate");
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA code is invalid',
|
||||
context: {
|
||||
code: 'mfa_invalid',
|
||||
triesLeft: tokenData.triesLeft - 1
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA code is invalid',
|
||||
context: {
|
||||
code: 'mfa_invalid'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// case: token is valid
|
||||
if (tokenData.expiresAt < new Date()) {
|
||||
// case: token expired
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
}
|
||||
throw UnauthorizedRequestError({
|
||||
message: "MFA session expired. Please log in again",
|
||||
context: {
|
||||
code: "mfa_expired",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
createTokenHelper,
|
||||
validateTokenHelper
|
||||
}
|
||||
const isValid = await bcrypt.compare(token, tokenData.tokenHash);
|
||||
if (!isValid) {
|
||||
// case: token is not valid
|
||||
if (tokenData?.triesLeft !== undefined) {
|
||||
// case: token has a try-limit
|
||||
if (tokenData.triesLeft === 1) {
|
||||
// case: token is out of tries
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
} else {
|
||||
// case: token has more than 1 try left
|
||||
await TokenData.findByIdAndUpdate(
|
||||
tokenData._id,
|
||||
{
|
||||
triesLeft: tokenData.triesLeft - 1,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: "MFA code is invalid",
|
||||
context: {
|
||||
code: "mfa_invalid",
|
||||
triesLeft: tokenData.triesLeft - 1,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: "MFA code is invalid",
|
||||
context: {
|
||||
code: "mfa_invalid",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// case: token is valid
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
};
|
||||
|
||||
export { createTokenHelper, validateTokenHelper };
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
IUser,
|
||||
@ -28,16 +27,9 @@ import {
|
||||
* @returns {Object} user - the initialized user
|
||||
*/
|
||||
const setupAccount = async ({ email }: { email: string }) => {
|
||||
let user;
|
||||
try {
|
||||
user = await new User({
|
||||
email
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email });
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to set up account');
|
||||
}
|
||||
const user = await new User({
|
||||
email
|
||||
}).save();
|
||||
|
||||
return user;
|
||||
};
|
||||
@ -89,34 +81,27 @@ const completeAccount = async ({
|
||||
salt: string;
|
||||
verifier: string;
|
||||
}) => {
|
||||
let user;
|
||||
try {
|
||||
const options = {
|
||||
new: true
|
||||
};
|
||||
user = await User.findByIdAndUpdate(
|
||||
userId,
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
options
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to complete account set up');
|
||||
}
|
||||
const options = {
|
||||
new: true
|
||||
};
|
||||
const user = await User.findByIdAndUpdate(
|
||||
userId,
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
options
|
||||
);
|
||||
|
||||
return user;
|
||||
};
|
||||
|
@ -28,7 +28,6 @@ import {
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_API_KEY
|
||||
} from '../variables';
|
||||
import { getEncryptionKey } from '../config';
|
||||
import { encryptSymmetric } from '../utils/crypto';
|
||||
import { SecretService } from '../services';
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
import mongoose from 'mongoose';
|
||||
import dotenv from 'dotenv';
|
||||
dotenv.config();
|
||||
import infisical from 'infisical-node';
|
||||
import express from 'express';
|
||||
import helmet from 'helmet';
|
||||
import cors from 'cors';
|
||||
@ -45,7 +44,8 @@ import {
|
||||
password as v1PasswordRouter,
|
||||
stripe as v1StripeRouter,
|
||||
integration as v1IntegrationRouter,
|
||||
integrationAuth as v1IntegrationAuthRouter
|
||||
integrationAuth as v1IntegrationAuthRouter,
|
||||
secretsFolder as v1SecretsFolder
|
||||
} from './routes/v1';
|
||||
import {
|
||||
signup as v2SignupRouter,
|
||||
@ -79,22 +79,16 @@ import {
|
||||
} from './config';
|
||||
|
||||
const main = async () => {
|
||||
if (process.env.INFISICAL_TOKEN != "" || process.env.INFISICAL_TOKEN != undefined) {
|
||||
await infisical.connect({
|
||||
token: process.env.INFISICAL_TOKEN!
|
||||
});
|
||||
}
|
||||
|
||||
TelemetryService.logTelemetryMessage();
|
||||
setTransporter(initSmtp());
|
||||
setTransporter(await initSmtp());
|
||||
|
||||
await DatabaseService.initDatabase(getMongoURL());
|
||||
if (getNodeEnv() !== 'test') {
|
||||
await DatabaseService.initDatabase(await getMongoURL());
|
||||
if ((await getNodeEnv()) !== 'test') {
|
||||
Sentry.init({
|
||||
dsn: getSentryDSN(),
|
||||
dsn: await getSentryDSN(),
|
||||
tracesSampleRate: 1.0,
|
||||
debug: getNodeEnv() === 'production' ? false : true,
|
||||
environment: getNodeEnv()
|
||||
debug: await getNodeEnv() === 'production' ? false : true,
|
||||
environment: await getNodeEnv()
|
||||
});
|
||||
}
|
||||
|
||||
@ -106,13 +100,13 @@ const main = async () => {
|
||||
app.use(
|
||||
cors({
|
||||
credentials: true,
|
||||
origin: getSiteURL()
|
||||
origin: await getSiteURL()
|
||||
})
|
||||
);
|
||||
|
||||
app.use(requestIp.mw());
|
||||
|
||||
if (getNodeEnv() === 'production') {
|
||||
if ((await getNodeEnv()) === 'production') {
|
||||
// enable app-wide rate-limiting + helmet security
|
||||
// in production
|
||||
app.disable('x-powered-by');
|
||||
@ -144,6 +138,7 @@ const main = async () => {
|
||||
app.use('/api/v1/stripe', v1StripeRouter);
|
||||
app.use('/api/v1/integration', v1IntegrationRouter);
|
||||
app.use('/api/v1/integration-auth', v1IntegrationAuthRouter);
|
||||
app.use('/api/v1/folder', v1SecretsFolder)
|
||||
|
||||
// v2 routes (improvements)
|
||||
app.use('/api/v2/signup', v2SignupRouter);
|
||||
@ -158,7 +153,7 @@ const main = async () => {
|
||||
app.use('/api/v2/service-token', v2ServiceTokenDataRouter); // TODO: turn into plural route
|
||||
app.use('/api/v2/service-accounts', v2ServiceAccountsRouter); // new
|
||||
app.use('/api/v2/api-key', v2APIKeyDataRouter);
|
||||
|
||||
|
||||
// v3 routes (experimental)
|
||||
app.use('/api/v3/secrets', v3SecretsRouter);
|
||||
app.use('/api/v3/workspaces', v3WorkspacesRouter);
|
||||
@ -177,8 +172,8 @@ const main = async () => {
|
||||
|
||||
app.use(requestErrorHandler)
|
||||
|
||||
const server = app.listen(getPort(), () => {
|
||||
getLogger("backend-main").info(`Server started listening at port ${getPort()}`)
|
||||
const server = app.listen(await getPort(), async () => {
|
||||
(await getLogger("backend-main")).info(`Server started listening at port ${await getPort()}`)
|
||||
});
|
||||
|
||||
await createTestUserForDevelopment();
|
||||
|
@ -1,7 +1,6 @@
|
||||
import * as Sentry from "@sentry/node";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import request from '../config/request';
|
||||
import request from "../config/request";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
@ -26,7 +25,7 @@ import {
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
INTEGRATION_SUPABASE_API_URL
|
||||
INTEGRATION_SUPABASE_API_URL,
|
||||
} from "../variables";
|
||||
|
||||
interface App {
|
||||
@ -47,87 +46,80 @@ interface App {
|
||||
const getApps = async ({
|
||||
integrationAuth,
|
||||
accessToken,
|
||||
teamId
|
||||
teamId,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
teamId?: string;
|
||||
}) => {
|
||||
|
||||
let apps: App[] = [];
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_PARAMETER_STORE:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_SECRET_MANAGER:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
apps = await getAppsHeroku({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
apps = await getAppsVercel({
|
||||
integrationAuth,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
apps = await getAppsNetlify({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
apps = await getAppsGithub({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
apps = await getAppsGitlab({
|
||||
accessToken,
|
||||
teamId
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RENDER:
|
||||
apps = await getAppsRender({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RAILWAY:
|
||||
apps = await getAppsRailway({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_FLYIO:
|
||||
apps = await getAppsFlyio({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CIRCLECI:
|
||||
apps = await getAppsCircleCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TRAVISCI:
|
||||
apps = await getAppsTravisCI({
|
||||
accessToken,
|
||||
})
|
||||
break;
|
||||
case INTEGRATION_SUPABASE:
|
||||
apps = await getAppsSupabase({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get integration apps");
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_PARAMETER_STORE:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_SECRET_MANAGER:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
apps = await getAppsHeroku({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
apps = await getAppsVercel({
|
||||
integrationAuth,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
apps = await getAppsNetlify({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
apps = await getAppsGithub({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
apps = await getAppsGitlab({
|
||||
accessToken,
|
||||
teamId,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RENDER:
|
||||
apps = await getAppsRender({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RAILWAY:
|
||||
apps = await getAppsRailway({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_FLYIO:
|
||||
apps = await getAppsFlyio({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CIRCLECI:
|
||||
apps = await getAppsCircleCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TRAVISCI:
|
||||
apps = await getAppsTravisCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_SUPABASE:
|
||||
apps = await getAppsSupabase({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -141,25 +133,18 @@ const getApps = async ({
|
||||
* @returns {String} apps.name - name of Heroku app
|
||||
*/
|
||||
const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Heroku integration apps");
|
||||
}
|
||||
const apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -178,33 +163,26 @@ const getAppsVercel = async ({
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
},
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
).data;
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.projects.map((a: any) => ({
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Vercel integration apps");
|
||||
}
|
||||
const apps = res.projects.map((a: any) => ({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -218,43 +196,41 @@ const getAppsVercel = async ({
|
||||
*/
|
||||
const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
|
||||
const apps: any = [];
|
||||
try {
|
||||
let page = 1;
|
||||
const perPage = 10;
|
||||
let hasMorePages = true;
|
||||
|
||||
// paginate through all sites
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage)
|
||||
});
|
||||
let page = 1;
|
||||
const perPage = 10;
|
||||
let hasMorePages = true;
|
||||
|
||||
const { data } = await request.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
|
||||
// paginate through all sites
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage),
|
||||
filter: 'all'
|
||||
});
|
||||
|
||||
const { data } = await request.get(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.site_id
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
page++;
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.site_id,
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Netlify integration apps");
|
||||
|
||||
page++;
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -268,35 +244,58 @@ const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.name - name of Github site
|
||||
*/
|
||||
const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const octokit = new Octokit({
|
||||
auth: accessToken,
|
||||
});
|
||||
interface GitHubApp {
|
||||
id: string;
|
||||
name: string;
|
||||
permissions: {
|
||||
admin: boolean;
|
||||
};
|
||||
owner: {
|
||||
login: string;
|
||||
};
|
||||
}
|
||||
|
||||
const repos = (
|
||||
await octokit.request(
|
||||
const octokit = new Octokit({
|
||||
auth: accessToken,
|
||||
});
|
||||
|
||||
const getAllRepos = async () => {
|
||||
let repos: GitHubApp[] = [];
|
||||
let page = 1;
|
||||
const per_page = 100;
|
||||
let hasMore = true;
|
||||
|
||||
while (hasMore) {
|
||||
const response = await octokit.request(
|
||||
"GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}",
|
||||
{
|
||||
per_page: 100,
|
||||
per_page,
|
||||
page,
|
||||
}
|
||||
)
|
||||
).data;
|
||||
);
|
||||
|
||||
apps = repos
|
||||
.filter((a: any) => a.permissions.admin === true)
|
||||
.map((a: any) => {
|
||||
return ({
|
||||
appId: a.id,
|
||||
name: a.name,
|
||||
owner: a.owner.login,
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Github repos");
|
||||
}
|
||||
if (response.data.length > 0) {
|
||||
repos = repos.concat(response.data);
|
||||
page++;
|
||||
} else {
|
||||
hasMore = false;
|
||||
}
|
||||
}
|
||||
|
||||
return repos;
|
||||
};
|
||||
|
||||
const repos = await getAllRepos();
|
||||
|
||||
const apps = repos
|
||||
.filter((a: GitHubApp) => a.permissions.admin === true)
|
||||
.map((a: GitHubApp) => {
|
||||
return {
|
||||
appId: a.id,
|
||||
name: a.name,
|
||||
owner: a.owner.login,
|
||||
};
|
||||
});
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -310,29 +309,20 @@ const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.appId - id of Render service
|
||||
*/
|
||||
const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res
|
||||
.map((a: any) => ({
|
||||
name: a.service.name,
|
||||
appId: a.service.id
|
||||
}));
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Render services");
|
||||
}
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const apps = res.map((a: any) => ({
|
||||
name: a.service.name,
|
||||
appId: a.service.id,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -345,49 +335,51 @@ const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.name - name of Railway project
|
||||
* @returns {String} apps.appId - id of Railway project
|
||||
*
|
||||
*/
|
||||
*/
|
||||
const getAppsRailway = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any[] = [];
|
||||
try {
|
||||
const query = `
|
||||
query GetProjects($userId: String, $teamId: String) {
|
||||
projects(userId: $userId, teamId: $teamId) {
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
}
|
||||
const query = `
|
||||
query GetProjects($userId: String, $teamId: String) {
|
||||
projects(userId: $userId, teamId: $teamId) {
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
}
|
||||
`;
|
||||
|
||||
const variables = {};
|
||||
const variables = {};
|
||||
|
||||
const { data: { data: { projects: { edges }}} } = await request.post(INTEGRATION_RAILWAY_API_URL, {
|
||||
const {
|
||||
data: {
|
||||
data: {
|
||||
projects: { edges },
|
||||
},
|
||||
},
|
||||
} = await request.post(
|
||||
INTEGRATION_RAILWAY_API_URL,
|
||||
{
|
||||
query,
|
||||
variables,
|
||||
}, {
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
apps = edges.map((e: any) => ({
|
||||
name: e.node.name,
|
||||
appId: e.node.id
|
||||
}));
|
||||
}
|
||||
);
|
||||
|
||||
const apps = edges.map((e: any) => ({
|
||||
name: e.node.name,
|
||||
appId: e.node.id,
|
||||
}));
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Railway services");
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of apps for Fly.io integration
|
||||
@ -397,41 +389,40 @@ const getAppsRailway = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.name - name of Fly.io apps
|
||||
*/
|
||||
const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const query = `
|
||||
query($role: String) {
|
||||
apps(type: "container", first: 400, role: $role) {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
hostname
|
||||
}
|
||||
const query = `
|
||||
query($role: String) {
|
||||
apps(type: "container", first: 400, role: $role) {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
hostname
|
||||
}
|
||||
}
|
||||
`;
|
||||
}
|
||||
`;
|
||||
|
||||
const res = (await request.post(INTEGRATION_FLYIO_API_URL, {
|
||||
query,
|
||||
variables: {
|
||||
role: null,
|
||||
const res = (
|
||||
await request.post(
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
{
|
||||
query,
|
||||
variables: {
|
||||
role: null,
|
||||
},
|
||||
},
|
||||
}, {
|
||||
headers: {
|
||||
Authorization: "Bearer " + accessToken,
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
})).data.data.apps.nodes;
|
||||
{
|
||||
headers: {
|
||||
Authorization: "Bearer " + accessToken,
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data.data.apps.nodes;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get Fly.io apps");
|
||||
}
|
||||
const apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
@ -444,63 +435,43 @@ const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {String} apps.name - name of CircleCI apps
|
||||
*/
|
||||
const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(
|
||||
`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`, {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.reponame,
|
||||
};
|
||||
});
|
||||
|
||||
apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.reponame
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get CircleCI projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
const getAppsTravisCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(
|
||||
`${INTEGRATION_TRAVISCI_API_URL}/repos`,
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `token ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
const res = (
|
||||
await request.get(`${INTEGRATION_TRAVISCI_API_URL}/repos`, {
|
||||
headers: {
|
||||
Authorization: `token ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.slug?.split("/")[1],
|
||||
appId: a?.id,
|
||||
};
|
||||
});
|
||||
|
||||
apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.slug?.split("/")[1],
|
||||
appId: a?.id,
|
||||
}
|
||||
});
|
||||
}catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get TravisCI projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of repositories for GitLab integration
|
||||
@ -509,112 +480,98 @@ const getAppsTravisCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {Object[]} apps - names of GitLab sites
|
||||
* @returns {String} apps.name - name of GitLab site
|
||||
*/
|
||||
const getAppsGitlab = async ({
|
||||
const getAppsGitlab = async ({
|
||||
accessToken,
|
||||
teamId
|
||||
teamId,
|
||||
}: {
|
||||
accessToken: string;
|
||||
teamId?: string;
|
||||
}) => {
|
||||
const apps: App[] = [];
|
||||
|
||||
|
||||
let page = 1;
|
||||
const perPage = 10;
|
||||
let hasMorePages = true;
|
||||
try {
|
||||
|
||||
if (teamId) {
|
||||
// case: fetch projects for group with id [teamId] in GitLab
|
||||
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage)
|
||||
});
|
||||
if (teamId) {
|
||||
// case: fetch projects for group with id [teamId] in GitLab
|
||||
|
||||
const { data } = (
|
||||
await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/groups/${teamId}/projects`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage),
|
||||
});
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
const { data } = await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/groups/${teamId}/projects`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
} else {
|
||||
// case: fetch projects for individual in GitLab
|
||||
|
||||
const { id } = (
|
||||
await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/user`,
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage)
|
||||
);
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
});
|
||||
});
|
||||
|
||||
const { data } = (
|
||||
await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/users/${id}/projects`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
|
||||
page++;
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get GitLab projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
} else {
|
||||
// case: fetch projects for individual in GitLab
|
||||
|
||||
const { id } = (
|
||||
await request.get(`${INTEGRATION_GITLAB_API_URL}/v4/user`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage),
|
||||
});
|
||||
|
||||
const { data } = await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/users/${id}/projects`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
data.map((a: any) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
});
|
||||
});
|
||||
|
||||
if (data.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of projects for Supabase integration
|
||||
@ -624,30 +581,23 @@ const getAppsGitlab = async ({
|
||||
* @returns {String} apps.name - name of Supabase app
|
||||
*/
|
||||
const getAppsSupabase = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const { data } = await request.get(
|
||||
`${INTEGRATION_SUPABASE_API_URL}/v1/projects`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
}
|
||||
);
|
||||
const { data } = await request.get(
|
||||
`${INTEGRATION_SUPABASE_API_URL}/v1/projects`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const apps = data.map((a: any) => {
|
||||
return {
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
};
|
||||
});
|
||||
|
||||
apps = data.map((a: any) => {
|
||||
return {
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Supabase projects');
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import request from '../config/request';
|
||||
import request from "../config/request";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
@ -12,8 +11,8 @@ import {
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_GITLAB_TOKEN_URL
|
||||
} from '../variables';
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
} from "../variables";
|
||||
import {
|
||||
getSiteURL,
|
||||
getClientIdAzure,
|
||||
@ -26,8 +25,8 @@ import {
|
||||
getClientIdGitHub,
|
||||
getClientSecretGitHub,
|
||||
getClientIdGitLab,
|
||||
getClientSecretGitLab
|
||||
} from '../config';
|
||||
getClientSecretGitLab,
|
||||
} from "../config";
|
||||
|
||||
interface ExchangeCodeAzureResponse {
|
||||
token_type: string;
|
||||
@ -93,49 +92,43 @@ interface ExchangeCodeGitlabResponse {
|
||||
*/
|
||||
const exchangeCode = async ({
|
||||
integration,
|
||||
code
|
||||
code,
|
||||
}: {
|
||||
integration: string;
|
||||
code: string;
|
||||
}) => {
|
||||
let obj = {} as any;
|
||||
|
||||
try {
|
||||
switch (integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
obj = await exchangeCodeAzure({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
obj = await exchangeCodeHeroku({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
obj = await exchangeCodeVercel({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
obj = await exchangeCodeNetlify({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
obj = await exchangeCodeGithub({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
obj = await exchangeCodeGitlab({
|
||||
code
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange');
|
||||
switch (integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
obj = await exchangeCodeAzure({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
obj = await exchangeCodeHeroku({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
obj = await exchangeCodeVercel({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
obj = await exchangeCodeNetlify({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
obj = await exchangeCodeGithub({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
obj = await exchangeCodeGitlab({
|
||||
code,
|
||||
});
|
||||
}
|
||||
|
||||
return obj;
|
||||
@ -143,43 +136,33 @@ const exchangeCode = async ({
|
||||
|
||||
/**
|
||||
* Return [accessToken] for Azure OAuth2 code-token exchange
|
||||
* @param param0
|
||||
* @param param0
|
||||
*/
|
||||
const exchangeCodeAzure = async ({
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
}) => {
|
||||
const exchangeCodeAzure = async ({ code }: { code: string }) => {
|
||||
const accessExpiresAt = new Date();
|
||||
let res: ExchangeCodeAzureResponse;
|
||||
try {
|
||||
res = (await request.post(
|
||||
|
||||
const res: ExchangeCodeAzureResponse = (
|
||||
await request.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
scope: 'https://vault.azure.net/.default openid offline_access',
|
||||
client_id: getClientIdAzure(),
|
||||
client_secret: getClientSecretAzure(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/azure-key-vault/oauth2/callback`
|
||||
scope: "https://vault.azure.net/.default openid offline_access",
|
||||
client_id: await getClientIdAzure(),
|
||||
client_secret: await getClientSecretAzure(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/azure-key-vault/oauth2/callback`,
|
||||
} as any)
|
||||
)).data;
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Azure');
|
||||
}
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in);
|
||||
|
||||
return ({
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Heroku
|
||||
@ -191,38 +174,28 @@ const exchangeCodeAzure = async ({
|
||||
* @returns {String} obj2.refreshToken - refresh token for Heroku API
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeHeroku = async ({
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
}) => {
|
||||
let res: ExchangeCodeHerokuResponse;
|
||||
const exchangeCodeHeroku = async ({ code }: { code: string }) => {
|
||||
const accessExpiresAt = new Date();
|
||||
try {
|
||||
res = (await request.post(
|
||||
|
||||
const res: ExchangeCodeHerokuResponse = (
|
||||
await request.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
client_secret: getClientSecretHeroku()
|
||||
client_secret: await getClientSecretHeroku(),
|
||||
} as any)
|
||||
)).data;
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Heroku');
|
||||
}
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in);
|
||||
|
||||
return ({
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Vercel
|
||||
@ -235,30 +208,23 @@ const exchangeCodeHeroku = async ({
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeVercel = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeVercelResponse;
|
||||
try {
|
||||
res = (
|
||||
await request.post(
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
code: code,
|
||||
client_id: getClientIdVercel(),
|
||||
client_secret: getClientSecretVercel(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/vercel/oauth2/callback`
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(`Failed OAuth2 code-token exchange with Vercel [err=${err}]`);
|
||||
}
|
||||
const res: ExchangeCodeVercelResponse = (
|
||||
await request.post(
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
code: code,
|
||||
client_id: await getClientIdVercel(),
|
||||
client_secret: await getClientSecretVercel(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/vercel/oauth2/callback`,
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: null,
|
||||
accessExpiresAt: null,
|
||||
teamId: res.team_id
|
||||
teamId: res.team_id,
|
||||
};
|
||||
};
|
||||
|
||||
@ -273,47 +239,39 @@ const exchangeCodeVercel = async ({ code }: { code: string }) => {
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeNetlify = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeNetlifyResponse;
|
||||
let accountId;
|
||||
try {
|
||||
res = (
|
||||
await request.post(
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_id: getClientIdNetlify(),
|
||||
client_secret: getClientSecretNetlify(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/netlify/oauth2/callback`
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
const res: ExchangeCodeNetlifyResponse = (
|
||||
await request.post(
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
client_id: await getClientIdNetlify(),
|
||||
client_secret: await getClientSecretNetlify(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/netlify/oauth2/callback`,
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
|
||||
const res2 = await request.get('https://api.netlify.com/api/v1/sites', {
|
||||
const res2 = await request.get("https://api.netlify.com/api/v1/sites", {
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`,
|
||||
},
|
||||
});
|
||||
|
||||
const res3 = (
|
||||
await request.get("https://api.netlify.com/api/v1/accounts", {
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`
|
||||
}
|
||||
});
|
||||
Authorization: `Bearer ${res.access_token}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const res3 = (
|
||||
await request.get('https://api.netlify.com/api/v1/accounts', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`
|
||||
}
|
||||
})
|
||||
).data;
|
||||
|
||||
accountId = res3[0].id;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Netlify');
|
||||
}
|
||||
const accountId = res3[0].id;
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accountId
|
||||
accountId,
|
||||
};
|
||||
};
|
||||
|
||||
@ -328,33 +286,25 @@ const exchangeCodeNetlify = async ({ code }: { code: string }) => {
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeGithub = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeGithubResponse;
|
||||
try {
|
||||
res = (
|
||||
await request.get(INTEGRATION_GITHUB_TOKEN_URL, {
|
||||
params: {
|
||||
client_id: getClientIdGitHub(),
|
||||
client_secret: getClientSecretGitHub(),
|
||||
code: code,
|
||||
redirect_uri: `${getSiteURL()}/integrations/github/oauth2/callback`
|
||||
},
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
})
|
||||
).data;
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Github');
|
||||
}
|
||||
const res: ExchangeCodeGithubResponse = (
|
||||
await request.get(INTEGRATION_GITHUB_TOKEN_URL, {
|
||||
params: {
|
||||
client_id: await getClientIdGitHub(),
|
||||
client_secret: await getClientSecretGitHub(),
|
||||
code: code,
|
||||
redirect_uri: `${await getSiteURL()}/integrations/github/oauth2/callback`,
|
||||
},
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: null,
|
||||
accessExpiresAt: null
|
||||
accessExpiresAt: null,
|
||||
};
|
||||
};
|
||||
|
||||
@ -369,42 +319,32 @@ const exchangeCodeGithub = async ({ code }: { code: string }) => {
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeGitlab = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeGitlabResponse;
|
||||
const accessExpiresAt = new Date();
|
||||
|
||||
try {
|
||||
res = (
|
||||
await request.post(
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_id: getClientIdGitLab(),
|
||||
client_secret: getClientSecretGitLab(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/gitlab/oauth2/callback`
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
}
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Gitlab');
|
||||
}
|
||||
const res: ExchangeCodeGitlabResponse = (
|
||||
await request.post(
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
client_id: await getClientIdGitLab(),
|
||||
client_secret: await getClientSecretGitLab(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/gitlab/oauth2/callback`,
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in);
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
accessExpiresAt,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export { exchangeCode };
|
||||
|
@ -1,29 +1,24 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import request from '../config/request';
|
||||
import request from "../config/request";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import {
|
||||
IIntegrationAuth
|
||||
} from '../models';
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_GITLAB,
|
||||
} from '../variables';
|
||||
} from "../variables";
|
||||
import {
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_GITLAB_TOKEN_URL
|
||||
} from '../variables';
|
||||
import {
|
||||
IntegrationService
|
||||
} from '../services';
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
} from "../variables";
|
||||
import { IntegrationService } from "../services";
|
||||
import {
|
||||
getSiteURL,
|
||||
getClientIdAzure,
|
||||
getClientSecretAzure,
|
||||
getClientSecretHeroku,
|
||||
getClientIdGitLab,
|
||||
getClientSecretGitLab
|
||||
} from '../config';
|
||||
getClientSecretGitLab,
|
||||
} from "../config";
|
||||
|
||||
interface RefreshTokenAzureResponse {
|
||||
token_type: string;
|
||||
@ -60,60 +55,57 @@ interface RefreshTokenGitLabResponse {
|
||||
*/
|
||||
const exchangeRefresh = async ({
|
||||
integrationAuth,
|
||||
refreshToken
|
||||
refreshToken,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
|
||||
interface TokenDetails {
|
||||
accessToken: string;
|
||||
refreshToken: string;
|
||||
accessExpiresAt: Date;
|
||||
}
|
||||
|
||||
|
||||
let tokenDetails: TokenDetails;
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
tokenDetails = await exchangeRefreshAzure({
|
||||
refreshToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
tokenDetails = await exchangeRefreshHeroku({
|
||||
refreshToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
tokenDetails = await exchangeRefreshGitLab({
|
||||
refreshToken
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error('Failed to exchange token for incompatible integration');
|
||||
}
|
||||
|
||||
if (tokenDetails?.accessToken && tokenDetails?.refreshToken && tokenDetails?.accessExpiresAt) {
|
||||
await IntegrationService.setIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId: null,
|
||||
accessToken: tokenDetails.accessToken,
|
||||
accessExpiresAt: tokenDetails.accessExpiresAt
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
tokenDetails = await exchangeRefreshAzure({
|
||||
refreshToken,
|
||||
});
|
||||
|
||||
await IntegrationService.setIntegrationAuthRefresh({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
refreshToken: tokenDetails.refreshToken
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
tokenDetails = await exchangeRefreshHeroku({
|
||||
refreshToken,
|
||||
});
|
||||
}
|
||||
|
||||
return tokenDetails.accessToken;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get new OAuth2 access token');
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
tokenDetails = await exchangeRefreshGitLab({
|
||||
refreshToken,
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error("Failed to exchange token for incompatible integration");
|
||||
}
|
||||
|
||||
if (
|
||||
tokenDetails?.accessToken &&
|
||||
tokenDetails?.refreshToken &&
|
||||
tokenDetails?.accessExpiresAt
|
||||
) {
|
||||
await IntegrationService.setIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId: null,
|
||||
accessToken: tokenDetails.accessToken,
|
||||
accessExpiresAt: tokenDetails.accessExpiresAt,
|
||||
});
|
||||
|
||||
await IntegrationService.setIntegrationAuthRefresh({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
refreshToken: tokenDetails.refreshToken,
|
||||
});
|
||||
}
|
||||
|
||||
return tokenDetails.accessToken;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -124,38 +116,30 @@ const exchangeRefresh = async ({
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshAzure = async ({
|
||||
refreshToken
|
||||
refreshToken,
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const accessExpiresAt = new Date();
|
||||
const { data }: { data: RefreshTokenAzureResponse } = await request.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
client_id: getClientIdAzure(),
|
||||
scope: 'openid offline_access',
|
||||
refresh_token: refreshToken,
|
||||
grant_type: 'refresh_token',
|
||||
client_secret: getClientSecretAzure()
|
||||
} as any)
|
||||
);
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + data.expires_in
|
||||
);
|
||||
const accessExpiresAt = new Date();
|
||||
const { data }: { data: RefreshTokenAzureResponse } = await request.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
client_id: await getClientIdAzure(),
|
||||
scope: "openid offline_access",
|
||||
refresh_token: refreshToken,
|
||||
grant_type: "refresh_token",
|
||||
client_secret: await getClientSecretAzure(),
|
||||
} as any)
|
||||
);
|
||||
|
||||
return ({
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get refresh OAuth2 access token for Azure');
|
||||
}
|
||||
}
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
|
||||
return {
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||
@ -165,39 +149,31 @@ const exchangeRefreshAzure = async ({
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshHeroku = async ({
|
||||
refreshToken
|
||||
refreshToken,
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data
|
||||
}: {
|
||||
data: RefreshTokenHerokuResponse
|
||||
} = await request.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
client_secret: getClientSecretHeroku()
|
||||
} as any)
|
||||
);
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data,
|
||||
}: {
|
||||
data: RefreshTokenHerokuResponse;
|
||||
} = await request.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: refreshToken,
|
||||
client_secret: await getClientSecretHeroku(),
|
||||
} as any)
|
||||
);
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + data.expires_in
|
||||
);
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
|
||||
return ({
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to refresh OAuth2 access token for Heroku');
|
||||
}
|
||||
return {
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
@ -208,45 +184,38 @@ const exchangeRefreshHeroku = async ({
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshGitLab = async ({
|
||||
refreshToken
|
||||
refreshToken,
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data
|
||||
}: {
|
||||
data: RefreshTokenGitLabResponse
|
||||
} = await request.post(
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
client_id: getClientIdGitLab,
|
||||
client_secret: getClientSecretGitLab(),
|
||||
redirect_uri: `${getSiteURL()}/integrations/gitlab/oauth2/callback`
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
}
|
||||
});
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data,
|
||||
}: {
|
||||
data: RefreshTokenGitLabResponse;
|
||||
} = await request.post(
|
||||
INTEGRATION_GITLAB_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: refreshToken,
|
||||
client_id: await getClientIdGitLab(),
|
||||
client_secret: await getClientSecretGitLab(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/gitlab/oauth2/callback`,
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + data.expires_in
|
||||
);
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
|
||||
return ({
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to refresh OAuth2 access token for GitLab');
|
||||
}
|
||||
return {
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
export { exchangeRefresh };
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
IIntegrationAuth,
|
||||
IntegrationAuth,
|
||||
@ -22,34 +21,28 @@ const revokeAccess = async ({
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let deletedIntegrationAuth;
|
||||
try {
|
||||
// add any integration-specific revocation logic
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
break;
|
||||
}
|
||||
// add any integration-specific revocation logic
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
break;
|
||||
case INTEGRATION_GITLAB:
|
||||
break;
|
||||
}
|
||||
|
||||
deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
_id: integrationAuth._id
|
||||
deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
_id: integrationAuth._id
|
||||
});
|
||||
|
||||
if (deletedIntegrationAuth) {
|
||||
await Integration.deleteMany({
|
||||
integrationAuth: deletedIntegrationAuth._id
|
||||
});
|
||||
|
||||
if (deletedIntegrationAuth) {
|
||||
await Integration.deleteMany({
|
||||
integrationAuth: deletedIntegrationAuth._id
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to delete integration authorization');
|
||||
}
|
||||
|
||||
return deletedIntegrationAuth;
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from "@sentry/node";
|
||||
import {
|
||||
IIntegrationAuth
|
||||
} from '../models';
|
||||
@ -31,21 +30,15 @@ const getTeams = async ({
|
||||
}) => {
|
||||
|
||||
let teams: Team[] = [];
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_GITLAB:
|
||||
teams = await getTeamsGitLab({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get integration teams');
|
||||
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_GITLAB:
|
||||
teams = await getTeamsGitLab({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
return teams;
|
||||
}
|
||||
|
||||
@ -63,30 +56,24 @@ const getTeamsGitLab = async ({
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let teams: Team[] = [];
|
||||
try {
|
||||
const res = (await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/groups`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
const res = (await request.get(
|
||||
`${INTEGRATION_GITLAB_API_URL}/v4/groups`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
)).data;
|
||||
|
||||
teams = res.map((t: any) => ({
|
||||
name: t.name,
|
||||
teamId: t.id
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get GitLab integration teams");
|
||||
}
|
||||
}
|
||||
)).data;
|
||||
|
||||
teams = res.map((t: any) => ({
|
||||
name: t.name,
|
||||
teamId: t.id
|
||||
}));
|
||||
|
||||
return teams;
|
||||
}
|
||||
|
||||
export {
|
||||
getTeams
|
||||
}
|
||||
}
|
||||
|
@ -5,9 +5,9 @@ import { getLogger } from "../utils/logger";
|
||||
import RequestError, { LogLevel } from "../utils/requestError";
|
||||
import { getNodeEnv } from '../config';
|
||||
|
||||
export const requestErrorHandler: ErrorRequestHandler = (error: RequestError | Error, req, res, next) => {
|
||||
export const requestErrorHandler: ErrorRequestHandler = async (error: RequestError | Error, req, res, next) => {
|
||||
if (res.headersSent) return next();
|
||||
if (getNodeEnv() !== "production") {
|
||||
if ((await getNodeEnv()) !== "production") {
|
||||
/* eslint-disable no-console */
|
||||
console.log(error)
|
||||
/* eslint-enable no-console */
|
||||
@ -15,8 +15,8 @@ export const requestErrorHandler: ErrorRequestHandler = (error: RequestError | E
|
||||
|
||||
//TODO: Find better way to type check for error. In current setting you need to cast type to get the functions and variables from RequestError
|
||||
if (!(error instanceof RequestError)) {
|
||||
error = InternalServerError({ context: { exception: error.message }, stack: error.stack })
|
||||
getLogger('backend-main').log((<RequestError>error).levelName.toLowerCase(), (<RequestError>error).message)
|
||||
error = InternalServerError({ context: { exception: error.message }, stack: error.stack });
|
||||
(await getLogger('backend-main')).log((<RequestError>error).levelName.toLowerCase(), (<RequestError>error).message)
|
||||
}
|
||||
|
||||
//* Set Sentry user identification if req.user is populated
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { IntegrationAuth, IWorkspace } from '../models';
|
||||
|
@ -26,7 +26,7 @@ const requireMfaAuth = async (
|
||||
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(AUTH_TOKEN_VALUE, getJwtMfaSecret())
|
||||
jwt.verify(AUTH_TOKEN_VALUE, await getJwtMfaSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
|
@ -33,7 +33,7 @@ const requireServiceTokenAuth = async (
|
||||
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(AUTH_TOKEN_VALUE, getJwtServiceSecret())
|
||||
jwt.verify(AUTH_TOKEN_VALUE, await getJwtServiceSecret())
|
||||
);
|
||||
|
||||
const serviceToken = await ServiceToken.findOne({
|
||||
|
@ -27,7 +27,7 @@ const requireSignupAuth = async (
|
||||
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(AUTH_TOKEN_VALUE, getJwtSignupSecret())
|
||||
jwt.verify(AUTH_TOKEN_VALUE, await getJwtSignupSecret())
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
|
@ -3,6 +3,7 @@ import {
|
||||
SECRET_SHARED,
|
||||
SECRET_PERSONAL,
|
||||
} from '../variables';
|
||||
import { ROOT_FOLDER_PATH } from '../utils/folder';
|
||||
|
||||
export interface ISecret {
|
||||
_id: Types.ObjectId;
|
||||
@ -25,6 +26,8 @@ export interface ISecret {
|
||||
secretCommentTag?: string;
|
||||
secretCommentHash?: string;
|
||||
tags?: string[];
|
||||
path?: string;
|
||||
folder?: Types.ObjectId;
|
||||
}
|
||||
|
||||
const secretSchema = new Schema<ISecret>(
|
||||
@ -107,7 +110,18 @@ const secretSchema = new Schema<ISecret>(
|
||||
secretCommentHash: {
|
||||
type: String,
|
||||
required: false
|
||||
}
|
||||
},
|
||||
// the full path to the secret in relation to folders
|
||||
path: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: ROOT_FOLDER_PATH
|
||||
},
|
||||
folder: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Folder',
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
|
@ -5,11 +5,11 @@ const router = express.Router();
|
||||
|
||||
router.get(
|
||||
'/status',
|
||||
(req: Request, res: Response) => {
|
||||
async (req: Request, res: Response) => {
|
||||
res.status(200).json({
|
||||
date: new Date(),
|
||||
message: 'Ok',
|
||||
emailConfigured: getSmtpConfigured()
|
||||
emailConfigured: await getSmtpConfigured()
|
||||
})
|
||||
}
|
||||
);
|
||||
|
@ -15,6 +15,7 @@ import password from './password';
|
||||
import stripe from './stripe';
|
||||
import integration from './integration';
|
||||
import integrationAuth from './integrationAuth';
|
||||
import secretsFolder from './secretsFolder'
|
||||
|
||||
export {
|
||||
signup,
|
||||
@ -33,5 +34,6 @@ export {
|
||||
password,
|
||||
stripe,
|
||||
integration,
|
||||
integrationAuth
|
||||
integrationAuth,
|
||||
secretsFolder
|
||||
};
|
||||
|
@ -19,6 +19,7 @@ router.post(
|
||||
router.post(
|
||||
'/verify',
|
||||
body('email').exists().trim().notEmpty(),
|
||||
body('organizationId').exists().trim().notEmpty(),
|
||||
body('code').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
membershipOrgController.verifyUserToOrganization
|
||||
|
50
backend/src/routes/v1/secretsFolder.ts
Normal file
50
backend/src/routes/v1/secretsFolder.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import express, { Request, Response } from 'express';
|
||||
const router = express.Router();
|
||||
import {
|
||||
requireAuth,
|
||||
requireWorkspaceAuth,
|
||||
validateRequest
|
||||
} from '../../middleware';
|
||||
import { body, param } from 'express-validator';
|
||||
import { createFolder, deleteFolder, getFolderById } from '../../controllers/v1/secretsFolderController';
|
||||
import { ADMIN, MEMBER } from '../../variables';
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: 'body'
|
||||
}),
|
||||
body('workspaceId').exists(),
|
||||
body('environment').exists(),
|
||||
body('folderName').exists(),
|
||||
body('parentFolderId'),
|
||||
validateRequest,
|
||||
createFolder
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:folderId',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
param('folderId').exists(),
|
||||
validateRequest,
|
||||
deleteFolder
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:folderId',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
param('folderId').exists(),
|
||||
validateRequest,
|
||||
getFolderById
|
||||
);
|
||||
|
||||
|
||||
export default router;
|
@ -1,5 +1,3 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { getLogger } from '../utils/logger';
|
||||
import {
|
||||
initDatabaseHelper,
|
||||
closeDatabaseHelper
|
||||
|
@ -24,9 +24,9 @@ class Telemetry {
|
||||
/**
|
||||
* Logs telemetry enable/disable notice.
|
||||
*/
|
||||
static logTelemetryMessage = () => {
|
||||
if(!getTelemetryEnabled()){
|
||||
getLogger("backend-main").info([
|
||||
static logTelemetryMessage = async () => {
|
||||
if(!(await getTelemetryEnabled())){
|
||||
(await getLogger("backend-main")).info([
|
||||
"",
|
||||
"To improve, Infisical collects telemetry data about general usage.",
|
||||
"This helps us understand how the product is doing and guide our product development to create the best possible platform; it also helps us demonstrate growth as we support Infisical as open-source software.",
|
||||
@ -39,12 +39,12 @@ class Telemetry {
|
||||
* Return an instance of the PostHog client initialized.
|
||||
* @returns
|
||||
*/
|
||||
static getPostHogClient = () => {
|
||||
static getPostHogClient = async () => {
|
||||
let postHogClient: any;
|
||||
if (getNodeEnv() === 'production' && getTelemetryEnabled()) {
|
||||
if ((await getNodeEnv()) === 'production' && (await getTelemetryEnabled())) {
|
||||
// case: enable opt-out telemetry in production
|
||||
postHogClient = new PostHog(getPostHogProjectApiKey(), {
|
||||
host: getPostHogHost()
|
||||
postHogClient = new PostHog(await getPostHogProjectApiKey(), {
|
||||
host: await getPostHogHost()
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -3,8 +3,8 @@ import { createTerminus } from '@godaddy/terminus';
|
||||
import { getLogger } from '../utils/logger';
|
||||
|
||||
export const setUpHealthEndpoint = <T>(server: T) => {
|
||||
const onSignal = () => {
|
||||
getLogger('backend-main').info('Server is starting clean-up');
|
||||
const onSignal = async () => {
|
||||
(await getLogger('backend-main')).info('Server is starting clean-up');
|
||||
return Promise.all([
|
||||
new Promise((resolve) => {
|
||||
if (mongoose.connection && mongoose.connection.readyState == 1) {
|
||||
|
@ -3,7 +3,8 @@ import {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_GMAIL
|
||||
} from '../variables';
|
||||
import SMTPConnection from 'nodemailer/lib/smtp-connection';
|
||||
import * as Sentry from '@sentry/node';
|
||||
@ -15,21 +16,21 @@ import {
|
||||
getSmtpPort
|
||||
} from '../config';
|
||||
|
||||
export const initSmtp = () => {
|
||||
export const initSmtp = async () => {
|
||||
const mailOpts: SMTPConnection.Options = {
|
||||
host: getSmtpHost(),
|
||||
port: getSmtpPort()
|
||||
host: await getSmtpHost(),
|
||||
port: await getSmtpPort()
|
||||
};
|
||||
|
||||
if (getSmtpUsername() && getSmtpPassword()) {
|
||||
if ((await getSmtpUsername()) && (await getSmtpPassword())) {
|
||||
mailOpts.auth = {
|
||||
user: getSmtpUsername(),
|
||||
pass: getSmtpPassword()
|
||||
user: await getSmtpUsername(),
|
||||
pass: await getSmtpPassword()
|
||||
};
|
||||
}
|
||||
|
||||
if (getSmtpSecure() ? getSmtpSecure() : false) {
|
||||
switch (getSmtpHost()) {
|
||||
if ((await getSmtpSecure()) ? (await getSmtpSecure()) : false) {
|
||||
switch (await getSmtpHost()) {
|
||||
case SMTP_HOST_SENDGRID:
|
||||
mailOpts.requireTLS = true;
|
||||
break;
|
||||
@ -46,13 +47,19 @@ export const initSmtp = () => {
|
||||
}
|
||||
break;
|
||||
case SMTP_HOST_ZOHOMAIL:
|
||||
mailOpts.requireTLS = true;
|
||||
mailOpts.tls = {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
break;
|
||||
case SMTP_HOST_GMAIL:
|
||||
mailOpts.requireTLS = true;
|
||||
mailOpts.tls = {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (getSmtpHost().includes('amazonaws.com')) {
|
||||
if ((await getSmtpHost()).includes('amazonaws.com')) {
|
||||
mailOpts.tls = {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
@ -70,10 +77,10 @@ export const initSmtp = () => {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureMessage('SMTP - Successfully connected');
|
||||
})
|
||||
.catch((err) => {
|
||||
.catch(async (err) => {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(
|
||||
`SMTP - Failed to connect to ${getSmtpHost()}:${getSmtpPort()} \n\t${err}`
|
||||
`SMTP - Failed to connect to ${await getSmtpHost()}:${await getSmtpPort()} \n\t${err}`
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
<body>
|
||||
<h2>Join your organization on Infisical</h2>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}">Join now</a>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}&organization_id={{organizationId}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
</body>
|
||||
|
@ -19,7 +19,7 @@ export const testWorkspaceKeyId = "63cf48f0225e6955acec5eff"
|
||||
export const plainTextWorkspaceKey = "543fef8224813a46230b0a50a46c5fb2"
|
||||
|
||||
export const createTestUserForDevelopment = async () => {
|
||||
if (getNodeEnv() === "development" || getNodeEnv() === "test") {
|
||||
if ((await getNodeEnv()) === "development" || (await getNodeEnv()) === "test") {
|
||||
const testUser = {
|
||||
_id: testUserId,
|
||||
email: testUserEmail,
|
||||
|
@ -1,7 +1,6 @@
|
||||
import nacl from 'tweetnacl';
|
||||
import util from 'tweetnacl-util';
|
||||
import AesGCM from './aes-gcm';
|
||||
import * as Sentry from '@sentry/node';
|
||||
|
||||
/**
|
||||
* Return new base64, NaCl, public-private key pair.
|
||||
@ -38,20 +37,13 @@ const encryptAsymmetric = ({
|
||||
publicKey: string;
|
||||
privateKey: string;
|
||||
}) => {
|
||||
let nonce, ciphertext;
|
||||
try {
|
||||
nonce = nacl.randomBytes(24);
|
||||
ciphertext = nacl.box(
|
||||
util.decodeUTF8(plaintext),
|
||||
nonce,
|
||||
util.decodeBase64(publicKey),
|
||||
util.decodeBase64(privateKey)
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform asymmetric encryption');
|
||||
}
|
||||
const nonce = nacl.randomBytes(24);
|
||||
const ciphertext = nacl.box(
|
||||
util.decodeUTF8(plaintext),
|
||||
nonce,
|
||||
util.decodeBase64(publicKey),
|
||||
util.decodeBase64(privateKey)
|
||||
);
|
||||
|
||||
return {
|
||||
ciphertext: util.encodeBase64(ciphertext),
|
||||
@ -80,19 +72,12 @@ const decryptAsymmetric = ({
|
||||
publicKey: string;
|
||||
privateKey: string;
|
||||
}): string => {
|
||||
let plaintext: any;
|
||||
try {
|
||||
plaintext = nacl.box.open(
|
||||
util.decodeBase64(ciphertext),
|
||||
util.decodeBase64(nonce),
|
||||
util.decodeBase64(publicKey),
|
||||
util.decodeBase64(privateKey)
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform asymmetric decryption');
|
||||
}
|
||||
const plaintext: any = nacl.box.open(
|
||||
util.decodeBase64(ciphertext),
|
||||
util.decodeBase64(nonce),
|
||||
util.decodeBase64(publicKey),
|
||||
util.decodeBase64(privateKey)
|
||||
);
|
||||
|
||||
return util.encodeUTF8(plaintext);
|
||||
};
|
||||
@ -110,17 +95,8 @@ const encryptSymmetric = ({
|
||||
plaintext: string;
|
||||
key: string;
|
||||
}) => {
|
||||
let ciphertext, iv, tag;
|
||||
try {
|
||||
const obj = AesGCM.encrypt(plaintext, key);
|
||||
ciphertext = obj.ciphertext;
|
||||
iv = obj.iv;
|
||||
tag = obj.tag;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric encryption');
|
||||
}
|
||||
const obj = AesGCM.encrypt(plaintext, key);
|
||||
const { ciphertext, iv, tag } = obj;
|
||||
|
||||
return {
|
||||
ciphertext,
|
||||
@ -150,15 +126,7 @@ const decryptSymmetric = ({
|
||||
tag: string;
|
||||
key: string;
|
||||
}): string => {
|
||||
let plaintext;
|
||||
try {
|
||||
plaintext = AesGCM.decrypt(ciphertext, iv, tag, key);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric decryption');
|
||||
}
|
||||
|
||||
const plaintext = AesGCM.decrypt(ciphertext, iv, tag, key);
|
||||
return plaintext;
|
||||
};
|
||||
|
||||
|
@ -12,7 +12,7 @@ const logFormat = (prefix: string) => combine(
|
||||
printf((info) => `${info.timestamp} ${info.label} ${info.level}: ${info.message}`)
|
||||
);
|
||||
|
||||
const createLoggerWithLabel = (level: string, label: string) => {
|
||||
const createLoggerWithLabel = async (level: string, label: string) => {
|
||||
const _level = level.toLowerCase() || 'info'
|
||||
//* Always add Console output to transports
|
||||
const _transports: any[] = [
|
||||
@ -25,10 +25,10 @@ const createLoggerWithLabel = (level: string, label: string) => {
|
||||
})
|
||||
]
|
||||
//* Add LokiTransport if it's enabled
|
||||
if(getLokiHost() !== undefined){
|
||||
if((await getLokiHost()) !== undefined){
|
||||
_transports.push(
|
||||
new LokiTransport({
|
||||
host: getLokiHost(),
|
||||
host: await getLokiHost(),
|
||||
handleExceptions: true,
|
||||
handleRejections: true,
|
||||
batching: true,
|
||||
@ -40,7 +40,7 @@ const createLoggerWithLabel = (level: string, label: string) => {
|
||||
labels: {
|
||||
app: process.env.npm_package_name,
|
||||
version: process.env.npm_package_version,
|
||||
environment: getNodeEnv()
|
||||
environment: await getNodeEnv()
|
||||
},
|
||||
onConnectionError: (err: Error)=> console.error('Connection error while connecting to Loki Server.\n', err)
|
||||
})
|
||||
@ -58,12 +58,10 @@ const createLoggerWithLabel = (level: string, label: string) => {
|
||||
});
|
||||
}
|
||||
|
||||
const DEFAULT_LOGGERS = {
|
||||
"backend-main": createLoggerWithLabel('info', '[IFSC:backend-main]'),
|
||||
"database": createLoggerWithLabel('info', '[IFSC:database]'),
|
||||
}
|
||||
type LoggerNames = keyof typeof DEFAULT_LOGGERS
|
||||
|
||||
export const getLogger = (loggerName: LoggerNames) => {
|
||||
return DEFAULT_LOGGERS[loggerName]
|
||||
export const getLogger = async (loggerName: 'backend-main' | 'database') => {
|
||||
const logger = {
|
||||
"backend-main": await createLoggerWithLabel('info', '[IFSC:backend-main]'),
|
||||
"database": await createLoggerWithLabel('info', '[IFSC:database]'),
|
||||
}
|
||||
return logger[loggerName]
|
||||
}
|
||||
|
@ -81,13 +81,13 @@ export default class RequestError extends Error{
|
||||
return obj
|
||||
}
|
||||
|
||||
public format(req: Request){
|
||||
public async format(req: Request){
|
||||
let _context = Object.assign({
|
||||
stacktrace: this.stacktrace
|
||||
}, this.context)
|
||||
|
||||
//* Omit sensitive information from context that can leak internal workings of this program if user is not developer
|
||||
if(!getVerboseErrorOutput()){
|
||||
if(!(await getVerboseErrorOutput())){
|
||||
_context = this._omit(_context, [
|
||||
'stacktrace',
|
||||
'exception',
|
||||
|
@ -55,7 +55,8 @@ import {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_GMAIL
|
||||
} from './smtp';
|
||||
import { PLAN_STARTER, PLAN_PRO } from './stripe';
|
||||
import {
|
||||
@ -138,6 +139,7 @@ export {
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_GMAIL,
|
||||
PLAN_STARTER,
|
||||
PLAN_PRO,
|
||||
MFA_METHOD_EMAIL,
|
||||
|
@ -61,7 +61,7 @@ const INTEGRATION_CIRCLECI_API_URL = "https://circleci.com/api";
|
||||
const INTEGRATION_TRAVISCI_API_URL = "https://api.travis-ci.com";
|
||||
const INTEGRATION_SUPABASE_API_URL = 'https://api.supabase.com';
|
||||
|
||||
const getIntegrationOptions = () => {
|
||||
const getIntegrationOptions = async () => {
|
||||
const INTEGRATION_OPTIONS = [
|
||||
{
|
||||
name: 'Heroku',
|
||||
@ -69,7 +69,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'Heroku.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: getClientIdHeroku(),
|
||||
clientId: await getClientIdHeroku(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -79,7 +79,7 @@ const getIntegrationOptions = () => {
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: '',
|
||||
clientSlug: getClientSlugVercel(),
|
||||
clientSlug: await getClientSlugVercel(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -88,7 +88,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'Netlify.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: getClientIdNetlify(),
|
||||
clientId: await getClientIdNetlify(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -97,7 +97,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'GitHub.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: getClientIdGitHub(),
|
||||
clientId: await getClientIdGitHub(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -151,7 +151,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'Microsoft Azure.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: getClientIdAzure(),
|
||||
clientId: await getClientIdAzure(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
@ -169,7 +169,7 @@ const getIntegrationOptions = () => {
|
||||
image: 'GitLab.png',
|
||||
isAvailable: true,
|
||||
type: 'custom',
|
||||
clientId: getClientIdGitLab(),
|
||||
clientId: await getClientIdGitLab(),
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
|
@ -2,10 +2,12 @@ const SMTP_HOST_SENDGRID = 'smtp.sendgrid.net';
|
||||
const SMTP_HOST_MAILGUN = 'smtp.mailgun.org';
|
||||
const SMTP_HOST_SOCKETLABS = 'smtp.socketlabs.com';
|
||||
const SMTP_HOST_ZOHOMAIL = 'smtp.zoho.com';
|
||||
const SMTP_HOST_GMAIL = 'smtp.gmail.com';
|
||||
|
||||
export {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_GMAIL
|
||||
}
|
@ -1,408 +1,408 @@
|
||||
import request from 'supertest'
|
||||
import main from '../../../../src/index'
|
||||
import { testWorkspaceId } from '../../../../src/utils/addDevelopmentUser';
|
||||
import { deleteAllSecrets, getAllSecrets, getJWTFromTestUser, getServiceTokenFromTestUser } from '../../../helper/helper';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const batchSecretRequestWithNoOverride = require('../../../data/batch-secrets-no-override.json');
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const batchSecretRequestWithOverrides = require('../../../data/batch-secrets-with-overrides.json');
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const batchSecretRequestWithBadRequest = require('../../../data/batch-create-secrets-with-some-missing-params.json');
|
||||
|
||||
let server: any;
|
||||
beforeAll(async () => {
|
||||
server = await main;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
server.close();
|
||||
});
|
||||
|
||||
describe("GET /api/v2/secrets", () => {
|
||||
describe("Get secrets via JTW", () => {
|
||||
test("should create secrets and read secrets via jwt", async () => {
|
||||
try {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create creates
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithNoOverride
|
||||
})
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
|
||||
const getSecrets = await request(server)
|
||||
.get("/api/v2/secrets")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.query({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev"
|
||||
})
|
||||
|
||||
expect(getSecrets.statusCode).toBe(200)
|
||||
expect(getSecrets.body).toHaveProperty("secrets")
|
||||
expect(getSecrets.body.secrets).toHaveLength(3)
|
||||
expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
getSecrets.body.secrets.forEach((secret: any) => {
|
||||
expect(secret).toHaveProperty('_id');
|
||||
expect(secret._id).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('version');
|
||||
expect(secret.version).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('workspace');
|
||||
expect(secret.workspace).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('type');
|
||||
expect(secret.type).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('tags');
|
||||
expect(secret.tags).toHaveLength(0);
|
||||
|
||||
expect(secret).toHaveProperty('environment');
|
||||
expect(secret.environment).toEqual("dev");
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyIV');
|
||||
expect(secret.secretKeyIV).toBeTruthy();
|
||||
// import request from 'supertest'
|
||||
// import main from '../../../../src/index'
|
||||
// import { testWorkspaceId } from '../../../../src/utils/addDevelopmentUser';
|
||||
// import { deleteAllSecrets, getAllSecrets, getJWTFromTestUser, getServiceTokenFromTestUser } from '../../../helper/helper';
|
||||
// // eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// const batchSecretRequestWithNoOverride = require('../../../data/batch-secrets-no-override.json');
|
||||
// // eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// const batchSecretRequestWithOverrides = require('../../../data/batch-secrets-with-overrides.json');
|
||||
|
||||
// // eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// const batchSecretRequestWithBadRequest = require('../../../data/batch-create-secrets-with-some-missing-params.json');
|
||||
|
||||
// let server: any;
|
||||
// beforeAll(async () => {
|
||||
// server = await main;
|
||||
// });
|
||||
|
||||
// afterAll(async () => {
|
||||
// server.close();
|
||||
// });
|
||||
|
||||
// describe("GET /api/v2/secrets", () => {
|
||||
// describe("Get secrets via JTW", () => {
|
||||
// test("should create secrets and read secrets via jwt", async () => {
|
||||
// try {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create creates
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithNoOverride
|
||||
// })
|
||||
|
||||
// expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
|
||||
// const getSecrets = await request(server)
|
||||
// .get("/api/v2/secrets")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .query({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev"
|
||||
// })
|
||||
|
||||
// expect(getSecrets.statusCode).toBe(200)
|
||||
// expect(getSecrets.body).toHaveProperty("secrets")
|
||||
// expect(getSecrets.body.secrets).toHaveLength(3)
|
||||
// expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
// getSecrets.body.secrets.forEach((secret: any) => {
|
||||
// expect(secret).toHaveProperty('_id');
|
||||
// expect(secret._id).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('version');
|
||||
// expect(secret.version).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('workspace');
|
||||
// expect(secret.workspace).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('type');
|
||||
// expect(secret.type).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('tags');
|
||||
// expect(secret.tags).toHaveLength(0);
|
||||
|
||||
// expect(secret).toHaveProperty('environment');
|
||||
// expect(secret.environment).toEqual("dev");
|
||||
|
||||
// expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
// expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretKeyIV');
|
||||
// expect(secret.secretKeyIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyTag');
|
||||
expect(secret.secretKeyTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyTag');
|
||||
// expect(secret.secretKeyTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
// expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueIV');
|
||||
expect(secret.secretValueIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueIV');
|
||||
// expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueTag');
|
||||
expect(secret.secretValueTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueTag');
|
||||
// expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
// expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
// expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentIV');
|
||||
expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentTag');
|
||||
expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('createdAt');
|
||||
expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('updatedAt');
|
||||
expect(secret.updatedAt).toBeTruthy();
|
||||
});
|
||||
} finally {
|
||||
// clean up
|
||||
await deleteAllSecrets()
|
||||
}
|
||||
})
|
||||
|
||||
test("Get secrets via jwt when personal overrides exist", async () => {
|
||||
try {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create creates
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithOverrides
|
||||
})
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
const getSecrets = await request(server)
|
||||
.get("/api/v2/secrets")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.query({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev"
|
||||
})
|
||||
// expect(secret).toHaveProperty('secretCommentIV');
|
||||
// expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretCommentTag');
|
||||
// expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('createdAt');
|
||||
// expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('updatedAt');
|
||||
// expect(secret.updatedAt).toBeTruthy();
|
||||
// });
|
||||
// } finally {
|
||||
// // clean up
|
||||
// await deleteAllSecrets()
|
||||
// }
|
||||
// })
|
||||
|
||||
// test("Get secrets via jwt when personal overrides exist", async () => {
|
||||
// try {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create creates
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithOverrides
|
||||
// })
|
||||
|
||||
// expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
// const getSecrets = await request(server)
|
||||
// .get("/api/v2/secrets")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .query({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev"
|
||||
// })
|
||||
|
||||
expect(getSecrets.statusCode).toBe(200)
|
||||
expect(getSecrets.body).toHaveProperty("secrets")
|
||||
expect(getSecrets.body.secrets).toHaveLength(2)
|
||||
expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
getSecrets.body.secrets.forEach((secret: any) => {
|
||||
expect(secret).toHaveProperty('_id');
|
||||
expect(secret._id).toBeTruthy();
|
||||
// expect(getSecrets.statusCode).toBe(200)
|
||||
// expect(getSecrets.body).toHaveProperty("secrets")
|
||||
// expect(getSecrets.body.secrets).toHaveLength(2)
|
||||
// expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
// getSecrets.body.secrets.forEach((secret: any) => {
|
||||
// expect(secret).toHaveProperty('_id');
|
||||
// expect(secret._id).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('version');
|
||||
expect(secret.version).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('version');
|
||||
// expect(secret.version).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('workspace');
|
||||
expect(secret.workspace).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('workspace');
|
||||
// expect(secret.workspace).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('type');
|
||||
expect(secret.type).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('type');
|
||||
// expect(secret.type).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('tags');
|
||||
expect(secret.tags).toHaveLength(0);
|
||||
// expect(secret).toHaveProperty('tags');
|
||||
// expect(secret.tags).toHaveLength(0);
|
||||
|
||||
expect(secret).toHaveProperty('environment');
|
||||
expect(secret.environment).toEqual("dev");
|
||||
// expect(secret).toHaveProperty('environment');
|
||||
// expect(secret.environment).toEqual("dev");
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
// expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyIV');
|
||||
expect(secret.secretKeyIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyIV');
|
||||
// expect(secret.secretKeyIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyTag');
|
||||
expect(secret.secretKeyTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyTag');
|
||||
// expect(secret.secretKeyTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
// expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueIV');
|
||||
expect(secret.secretValueIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueIV');
|
||||
// expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueTag');
|
||||
expect(secret.secretValueTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueTag');
|
||||
// expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
// expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
// expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentIV');
|
||||
expect(secret.secretCommentIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretCommentIV');
|
||||
// expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentTag');
|
||||
expect(secret.secretCommentTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretCommentTag');
|
||||
// expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('createdAt');
|
||||
expect(secret.createdAt).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('createdAt');
|
||||
// expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('updatedAt');
|
||||
expect(secret.updatedAt).toBeTruthy();
|
||||
});
|
||||
} finally {
|
||||
// clean up
|
||||
await deleteAllSecrets()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetch secrets via service token", () => {
|
||||
test("Get secrets via jwt when personal overrides exist", async () => {
|
||||
try {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create creates
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithOverrides
|
||||
})
|
||||
// expect(secret).toHaveProperty('updatedAt');
|
||||
// expect(secret.updatedAt).toBeTruthy();
|
||||
// });
|
||||
// } finally {
|
||||
// // clean up
|
||||
// await deleteAllSecrets()
|
||||
// }
|
||||
// })
|
||||
// })
|
||||
|
||||
// describe("fetch secrets via service token", () => {
|
||||
// test("Get secrets via jwt when personal overrides exist", async () => {
|
||||
// try {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create creates
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithOverrides
|
||||
// })
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
// now use the service token to fetch secrets
|
||||
const serviceToken = await getServiceTokenFromTestUser()
|
||||
// expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
// // now use the service token to fetch secrets
|
||||
// const serviceToken = await getServiceTokenFromTestUser()
|
||||
|
||||
const getSecrets = await request(server)
|
||||
.get("/api/v2/secrets")
|
||||
.set('Authorization', `Bearer ${serviceToken}`)
|
||||
.query({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev"
|
||||
})
|
||||
|
||||
expect(getSecrets.statusCode).toBe(200)
|
||||
expect(getSecrets.body).toHaveProperty("secrets")
|
||||
expect(getSecrets.body.secrets).toHaveLength(2)
|
||||
expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
// const getSecrets = await request(server)
|
||||
// .get("/api/v2/secrets")
|
||||
// .set('Authorization', `Bearer ${serviceToken}`)
|
||||
// .query({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev"
|
||||
// })
|
||||
|
||||
// expect(getSecrets.statusCode).toBe(200)
|
||||
// expect(getSecrets.body).toHaveProperty("secrets")
|
||||
// expect(getSecrets.body.secrets).toHaveLength(2)
|
||||
// expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
getSecrets.body.secrets.forEach((secret: any) => {
|
||||
expect(secret).toHaveProperty('_id');
|
||||
expect(secret._id).toBeTruthy();
|
||||
// getSecrets.body.secrets.forEach((secret: any) => {
|
||||
// expect(secret).toHaveProperty('_id');
|
||||
// expect(secret._id).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('version');
|
||||
expect(secret.version).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('version');
|
||||
// expect(secret.version).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('workspace');
|
||||
expect(secret.workspace).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('workspace');
|
||||
// expect(secret.workspace).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('type');
|
||||
expect(secret.type).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('type');
|
||||
// expect(secret.type).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('tags');
|
||||
expect(secret.tags).toHaveLength(0);
|
||||
// expect(secret).toHaveProperty('tags');
|
||||
// expect(secret.tags).toHaveLength(0);
|
||||
|
||||
expect(secret).toHaveProperty('environment');
|
||||
expect(secret.environment).toEqual("dev");
|
||||
// expect(secret).toHaveProperty('environment');
|
||||
// expect(secret.environment).toEqual("dev");
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
// expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyIV');
|
||||
expect(secret.secretKeyIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyIV');
|
||||
// expect(secret.secretKeyIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyTag');
|
||||
expect(secret.secretKeyTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyTag');
|
||||
// expect(secret.secretKeyTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
// expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueIV');
|
||||
expect(secret.secretValueIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueIV');
|
||||
// expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueTag');
|
||||
expect(secret.secretValueTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueTag');
|
||||
// expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
// expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
// expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentIV');
|
||||
expect(secret.secretCommentIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretCommentIV');
|
||||
// expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentTag');
|
||||
expect(secret.secretCommentTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretCommentTag');
|
||||
// expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('createdAt');
|
||||
expect(secret.createdAt).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('createdAt');
|
||||
// expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('updatedAt');
|
||||
expect(secret.updatedAt).toBeTruthy();
|
||||
});
|
||||
} finally {
|
||||
// clean up
|
||||
await deleteAllSecrets()
|
||||
}
|
||||
})
|
||||
|
||||
test("should create secrets and read secrets via service token when no overrides", async () => {
|
||||
try {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create secrets
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithNoOverride
|
||||
})
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(200)
|
||||
// expect(secret).toHaveProperty('updatedAt');
|
||||
// expect(secret.updatedAt).toBeTruthy();
|
||||
// });
|
||||
// } finally {
|
||||
// // clean up
|
||||
// await deleteAllSecrets()
|
||||
// }
|
||||
// })
|
||||
|
||||
// test("should create secrets and read secrets via service token when no overrides", async () => {
|
||||
// try {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create secrets
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithNoOverride
|
||||
// })
|
||||
|
||||
// expect(createSecretsResponse.statusCode).toBe(200)
|
||||
|
||||
|
||||
// now use the service token to fetch secrets
|
||||
const serviceToken = await getServiceTokenFromTestUser()
|
||||
// // now use the service token to fetch secrets
|
||||
// const serviceToken = await getServiceTokenFromTestUser()
|
||||
|
||||
const getSecrets = await request(server)
|
||||
.get("/api/v2/secrets")
|
||||
.set('Authorization', `Bearer ${serviceToken}`)
|
||||
.query({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev"
|
||||
})
|
||||
// const getSecrets = await request(server)
|
||||
// .get("/api/v2/secrets")
|
||||
// .set('Authorization', `Bearer ${serviceToken}`)
|
||||
// .query({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev"
|
||||
// })
|
||||
|
||||
expect(getSecrets.statusCode).toBe(200)
|
||||
expect(getSecrets.body).toHaveProperty("secrets")
|
||||
expect(getSecrets.body.secrets).toHaveLength(3)
|
||||
expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
// expect(getSecrets.statusCode).toBe(200)
|
||||
// expect(getSecrets.body).toHaveProperty("secrets")
|
||||
// expect(getSecrets.body.secrets).toHaveLength(3)
|
||||
// expect(getSecrets.body.secrets).toBeInstanceOf(Array);
|
||||
|
||||
getSecrets.body.secrets.forEach((secret: any) => {
|
||||
expect(secret).toHaveProperty('_id');
|
||||
expect(secret._id).toBeTruthy();
|
||||
// getSecrets.body.secrets.forEach((secret: any) => {
|
||||
// expect(secret).toHaveProperty('_id');
|
||||
// expect(secret._id).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('version');
|
||||
expect(secret.version).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('version');
|
||||
// expect(secret.version).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('workspace');
|
||||
expect(secret.workspace).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('workspace');
|
||||
// expect(secret.workspace).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('type');
|
||||
expect(secret.type).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('type');
|
||||
// expect(secret.type).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('tags');
|
||||
expect(secret.tags).toHaveLength(0);
|
||||
// expect(secret).toHaveProperty('tags');
|
||||
// expect(secret.tags).toHaveLength(0);
|
||||
|
||||
expect(secret).toHaveProperty('environment');
|
||||
expect(secret.environment).toEqual("dev");
|
||||
// expect(secret).toHaveProperty('environment');
|
||||
// expect(secret.environment).toEqual("dev");
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyCiphertext');
|
||||
// expect(secret.secretKeyCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyIV');
|
||||
expect(secret.secretKeyIV).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyIV');
|
||||
// expect(secret.secretKeyIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretKeyTag');
|
||||
expect(secret.secretKeyTag).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretKeyTag');
|
||||
// expect(secret.secretKeyTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
// expect(secret).toHaveProperty('secretValueCiphertext');
|
||||
// expect(secret.secretValueCiphertext).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueIV');
|
||||
expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretValueTag');
|
||||
expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentIV');
|
||||
expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('secretCommentTag');
|
||||
expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('createdAt');
|
||||
expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
expect(secret).toHaveProperty('updatedAt');
|
||||
expect(secret.updatedAt).toBeTruthy();
|
||||
});
|
||||
} finally {
|
||||
// clean up
|
||||
await deleteAllSecrets()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("create secrets via JWT", () => {
|
||||
test("Create secrets via jwt when some requests have missing required parameters", async () => {
|
||||
// get login details
|
||||
const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// create creates
|
||||
const createSecretsResponse = await request(server)
|
||||
.post("/api/v2/secrets/batch")
|
||||
.set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
.send({
|
||||
workspaceId: testWorkspaceId,
|
||||
environment: "dev",
|
||||
requests: batchSecretRequestWithBadRequest
|
||||
})
|
||||
|
||||
const allSecretsInDB = await getAllSecrets()
|
||||
// expect(secret).toHaveProperty('secretValueIV');
|
||||
// expect(secret.secretValueIV).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretValueTag');
|
||||
// expect(secret.secretValueTag).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretCommentCiphertext');
|
||||
// expect(secret.secretCommentCiphertext).toBeFalsy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretCommentIV');
|
||||
// expect(secret.secretCommentIV).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('secretCommentTag');
|
||||
// expect(secret.secretCommentTag).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('createdAt');
|
||||
// expect(secret.createdAt).toBeTruthy();
|
||||
|
||||
// expect(secret).toHaveProperty('updatedAt');
|
||||
// expect(secret.updatedAt).toBeTruthy();
|
||||
// });
|
||||
// } finally {
|
||||
// // clean up
|
||||
// await deleteAllSecrets()
|
||||
// }
|
||||
// })
|
||||
// })
|
||||
|
||||
// describe("create secrets via JWT", () => {
|
||||
// test("Create secrets via jwt when some requests have missing required parameters", async () => {
|
||||
// // get login details
|
||||
// const loginResponse = await getJWTFromTestUser()
|
||||
|
||||
// // create creates
|
||||
// const createSecretsResponse = await request(server)
|
||||
// .post("/api/v2/secrets/batch")
|
||||
// .set('Authorization', `Bearer ${loginResponse.token}`)
|
||||
// .send({
|
||||
// workspaceId: testWorkspaceId,
|
||||
// environment: "dev",
|
||||
// requests: batchSecretRequestWithBadRequest
|
||||
// })
|
||||
|
||||
// const allSecretsInDB = await getAllSecrets()
|
||||
|
||||
expect(createSecretsResponse.statusCode).toBe(500) // TODO should be set to 400
|
||||
expect(allSecretsInDB).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
// expect(createSecretsResponse.statusCode).toBe(500) // TODO should be set to 400
|
||||
// expect(allSecretsInDB).toHaveLength(0)
|
||||
// })
|
||||
// })
|
||||
// })
|
@ -28,14 +28,14 @@ describe('Crypto', () => {
|
||||
test('should throw error if publicKey is undefined', () => {
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('invalid encoding');
|
||||
});
|
||||
|
||||
test('should throw error if publicKey is empty string', () => {
|
||||
publicKey = '';
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('bad public key size');
|
||||
});
|
||||
});
|
||||
|
||||
@ -47,14 +47,14 @@ describe('Crypto', () => {
|
||||
test('should throw error if privateKey is undefined', () => {
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('invalid encoding');
|
||||
});
|
||||
|
||||
test('should throw error if privateKey is empty string', () => {
|
||||
privateKey = '';
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('bad secret key size');
|
||||
});
|
||||
});
|
||||
|
||||
@ -66,7 +66,7 @@ describe('Crypto', () => {
|
||||
test('should throw error if plaintext is undefined', () => {
|
||||
expect(() => {
|
||||
encryptAsymmetric({ plaintext, publicKey, privateKey });
|
||||
}).toThrowError('Failed to perform asymmetric encryption');
|
||||
}).toThrowError('expected string');
|
||||
});
|
||||
|
||||
test('should encrypt plaintext containing special characters', () => {
|
||||
@ -130,7 +130,7 @@ describe('Crypto', () => {
|
||||
publicKey,
|
||||
privateKey
|
||||
});
|
||||
}).toThrowError('Failed to perform asymmetric decryption');
|
||||
}).toThrowError('invalid encoding');
|
||||
});
|
||||
|
||||
test('should throw error if nonce is modified', () => {
|
||||
@ -149,7 +149,7 @@ describe('Crypto', () => {
|
||||
publicKey,
|
||||
privateKey
|
||||
});
|
||||
}).toThrowError('Failed to perform asymmetric decryption');
|
||||
}).toThrowError('invalid encoding');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -170,7 +170,7 @@ describe('Crypto', () => {
|
||||
const invalidKey = 'invalid-key';
|
||||
expect(() => {
|
||||
encryptSymmetric({ plaintext, key: invalidKey });
|
||||
}).toThrowError('Failed to perform symmetric encryption');
|
||||
}).toThrowError('Invalid key length');
|
||||
});
|
||||
|
||||
test('should throw an error when invalid key is provided', () => {
|
||||
@ -179,7 +179,7 @@ describe('Crypto', () => {
|
||||
|
||||
expect(() => {
|
||||
encryptSymmetric({ plaintext, key: invalidKey });
|
||||
}).toThrowError('Failed to perform symmetric encryption');
|
||||
}).toThrowError('Invalid key length');
|
||||
});
|
||||
});
|
||||
|
||||
@ -209,7 +209,7 @@ describe('Crypto', () => {
|
||||
tag,
|
||||
key
|
||||
});
|
||||
}).toThrowError('Failed to perform symmetric decryption');
|
||||
}).toThrowError('Unsupported state or unable to authenticate data');
|
||||
});
|
||||
|
||||
test('should fail if iv is modified', () => {
|
||||
@ -221,7 +221,7 @@ describe('Crypto', () => {
|
||||
tag,
|
||||
key
|
||||
});
|
||||
}).toThrowError('Failed to perform symmetric decryption');
|
||||
}).toThrowError('Unsupported state or unable to authenticate data');
|
||||
});
|
||||
|
||||
test('should fail if tag is modified', () => {
|
||||
@ -233,7 +233,7 @@ describe('Crypto', () => {
|
||||
tag: modifiedTag,
|
||||
key
|
||||
});
|
||||
}).toThrowError('Failed to perform symmetric decryption');
|
||||
}).toThrowError(/Invalid authentication tag length: \d+/);
|
||||
});
|
||||
|
||||
test('should throw an error when decryption fails', () => {
|
||||
@ -245,7 +245,7 @@ describe('Crypto', () => {
|
||||
tag,
|
||||
key: invalidKey
|
||||
});
|
||||
}).toThrowError('Failed to perform symmetric decryption');
|
||||
}).toThrowError('Invalid key length');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -67,7 +67,7 @@ var loginCmd = &cobra.Command{
|
||||
|
||||
//override domain
|
||||
domainQuery := true
|
||||
if config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
|
||||
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
|
||||
overrideDomain, err := DomainOverridePrompt()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
|
@ -34,7 +34,9 @@ func init() {
|
||||
rootCmd.PersistentFlags().BoolVarP(&debugLogging, "debug", "d", false, "Enable verbose logging")
|
||||
rootCmd.PersistentFlags().StringVar(&config.INFISICAL_URL, "domain", util.INFISICAL_DEFAULT_API_URL, "Point the CLI to your own backend [can also set via environment variable name: INFISICAL_API_URL]")
|
||||
rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.CheckForUpdate()
|
||||
if !util.IsRunningInDocker() {
|
||||
util.CheckForUpdate()
|
||||
}
|
||||
}
|
||||
|
||||
// if config.INFISICAL_URL is set to the default value, check if INFISICAL_URL is set in the environment
|
||||
|
@ -4,12 +4,15 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/fatih/color"
|
||||
)
|
||||
@ -49,7 +52,7 @@ func CheckForUpdate() {
|
||||
}
|
||||
|
||||
func getLatestTag(repoOwner string, repoName string) (string, error) {
|
||||
url := fmt.Sprintf("https://api.github.com/repos/%s/%s/tags", repoOwner, repoName)
|
||||
url := fmt.Sprintf("https://api.github.com/repos/%s/%s/releases/latest", repoOwner, repoName)
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@ -65,15 +68,20 @@ func getLatestTag(repoOwner string, repoName string) (string, error) {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var tags []struct {
|
||||
Name string `json:"name"`
|
||||
var releaseTag struct {
|
||||
TagName string `json:"tag_name"`
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(body, &tags); err != nil {
|
||||
if err := json.Unmarshal(body, &releaseTag); err != nil {
|
||||
return "", fmt.Errorf("failed to unmarshal github response: %w", err)
|
||||
}
|
||||
|
||||
return tags[0].Name[1:], nil
|
||||
tag_prefix := "infisical-cli/v"
|
||||
|
||||
// Extract the version from the first valid tag
|
||||
version := strings.TrimPrefix(releaseTag.TagName, tag_prefix)
|
||||
|
||||
return version, nil
|
||||
}
|
||||
|
||||
func GetUpdateInstructions() string {
|
||||
@ -125,3 +133,16 @@ func getLinuxPackageManager() string {
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func IsRunningInDocker() bool {
|
||||
if _, err := os.Stat("/.dockerenv"); err == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
cgroup, err := ioutil.ReadFile("/proc/self/cgroup")
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return strings.Contains(string(cgroup), "docker")
|
||||
}
|
||||
|
@ -5,11 +5,11 @@ description: "How to authenticate with the Infisical Public API"
|
||||
|
||||
## Essentials
|
||||
|
||||
The Public API accepts multiple modes of authentication being via API Key, Service Account credentials, or [Infisical Token](../../getting-started/dashboard/token).
|
||||
The Public API accepts multiple modes of authentication being via API Key, Service Account credentials, or [Infisical Token](/documentation/platform/token).
|
||||
|
||||
- API Key: Provides full access to all endpoints representing the user.
|
||||
- [Service Account](): Provides scoped access to an organization and select projects representing a machine such as a VM or application client.
|
||||
- [Infisical Token](../../getting-started/dashboard/token): Provides short-lived, scoped CRUD access to the secrets of a specific project and environment.
|
||||
- Service Account: Provides scoped access to an organization and select projects representing a machine such as a VM or application client.
|
||||
- [Infisical Token](/documentation/platform/token): Provides short-lived, scoped CRUD access to the secrets of a specific project and environment.
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="API Key">
|
||||
|
@ -6,7 +6,7 @@ description: "How to add a secret using an Infisical Token scoped to a project a
|
||||
Prerequisites:
|
||||
|
||||
- Set up and add envars to [Infisical Cloud](https://app.infisical.com).
|
||||
- Create an [Infisical Token](../../../getting-started/dashboard/token) for your project and environment with write access enabled.
|
||||
- Create an [Infisical Token](/documentation/platform/token) for your project and environment with write access enabled.
|
||||
- Grasp a basic understanding of the system and its underlying cryptography [here](/api-reference/overview/introduction).
|
||||
- [Ensure that your project is blind-indexed](../blind-indices).
|
||||
|
||||
|
@ -6,7 +6,7 @@ description: "How to delete a secret using an Infisical Token scoped to a projec
|
||||
Prerequisites:
|
||||
|
||||
- Set up and add envars to [Infisical Cloud](https://app.infisical.com).
|
||||
- Create either an [API Key](/api-reference/overview/authentication) or [Infisical Token](../../../getting-started/dashboard/token) for your project and environment with write access enabled.
|
||||
- Create either an [API Key](/api-reference/overview/authentication) or [Infisical Token](/documentation/platform/token) for your project and environment with write access enabled.
|
||||
- Grasp a basic understanding of the system and its underlying cryptography [here](/api-reference/overview/introduction).
|
||||
- [Ensure that your project is blind-indexed](../blind-indices).
|
||||
|
||||
|
@ -6,7 +6,7 @@ description: "How to get a secret using an Infisical Token scoped to a project a
|
||||
Prerequisites:
|
||||
|
||||
- Set up and add envars to [Infisical Cloud](https://app.infisical.com).
|
||||
- Create an [Infisical Token](../../../getting-started/dashboard/token) for your project and environment.
|
||||
- Create an [Infisical Token](/documentation/platform/token) for your project and environment.
|
||||
- Grasp a basic understanding of the system and its underlying cryptography [here](/api-reference/overview/introduction).
|
||||
- [Ensure that your project is blind-indexed](../blind-indices).
|
||||
|
||||
|
@ -6,7 +6,7 @@ description: "How to get all secrets using an Infisical Token scoped to a projec
|
||||
Prerequisites:
|
||||
|
||||
- Set up and add envars to [Infisical Cloud](https://app.infisical.com).
|
||||
- Create an [Infisical Token](../../../getting-started/dashboard/token) for your project and environment.
|
||||
- Create an [Infisical Token](/documentation/platform/token) for your project and environment.
|
||||
- Grasp a basic understanding of the system and its underlying cryptography [here](/api-reference/overview/introduction).
|
||||
- [Ensure that your project is blind-indexed](../blind-indices).
|
||||
|
||||
|
@ -6,7 +6,7 @@ description: "How to update a secret using an Infisical Token scoped to a projec
|
||||
Prerequisites:
|
||||
|
||||
- Set up and add envars to [Infisical Cloud](https://app.infisical.com).
|
||||
- Create an [Infisical Token](../../../getting-started/dashboard/token) for your project and environment with write access enabled.
|
||||
- Create an [Infisical Token](/documentation/platform/token) for your project and environment with write access enabled.
|
||||
- Grasp a basic understanding of the system and its underlying cryptography [here](/api-reference/overview/introduction).
|
||||
- [Ensure that your project is blind-indexed](../blind-indices).
|
||||
|
||||
|
@ -37,7 +37,7 @@ Export environment variables from the platform into a file format.
|
||||
|
||||
### Environment variables
|
||||
<Accordion title="INFISICAL_TOKEN">
|
||||
Used to fetch secrets via a [service token](/getting-started/dashboard/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
Used to fetch secrets via a [service token](/documentation/platform/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
|
@ -42,7 +42,7 @@ Inject secrets from Infisical into your application process.
|
||||
|
||||
### Environment variables
|
||||
<Accordion title="INFISICAL_TOKEN">
|
||||
Used to fetch secrets via a [service token](/getting-started/dashboard/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
Used to fetch secrets via a [service token](/documentation/platform/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
@ -72,7 +72,7 @@ Inject secrets from Infisical into your application process.
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--token">
|
||||
If you are using a [service token](../../getting-started/dashboard/token) to authenticate, you can pass the token as a flag
|
||||
If you are using a [service token](/documentation/platform/token) to authenticate, you can pass the token as a flag
|
||||
|
||||
```bash
|
||||
# Example
|
||||
|
@ -20,7 +20,7 @@ This command enables you to perform CRUD (create, read, update, delete) operatio
|
||||
|
||||
### Environment variables
|
||||
<Accordion title="INFISICAL_TOKEN">
|
||||
Used to fetch secrets via a [service token](/getting-started/dashboard/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
Used to fetch secrets via a [service token](/documentation/platform/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
|
@ -3,7 +3,7 @@ title: "Infisical Token"
|
||||
description: "How to use Infical service token within the CLI."
|
||||
---
|
||||
|
||||
Prerequisite: [Infisical Token and How to Generate One](../../getting-started/dashboard/token).
|
||||
Prerequisite: [Infisical Token and How to Generate One](/documentation/platform/token).
|
||||
|
||||
It's possible to use the CLI to sync environment varialbes without manually entering login credentials by using a service token in the prerequisite link above.
|
||||
|
||||
|
@ -18,7 +18,7 @@ Prerequisite: [Install the CLI](/cli/overview)
|
||||
|
||||
<Tab title="Infisical Token">
|
||||
To use Infisical CLI in environments where you cannot run the `infisical login` command, you can authenticate via a
|
||||
Infisical Token instead. Learn more about [Infisical Token](../getting-started/dashboard/token).
|
||||
Infisical Token instead. Learn more about [Infisical Token](/documentation/platform/token).
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
|
128
docs/documentation/getting-started/cli.mdx
Normal file
128
docs/documentation/getting-started/cli.mdx
Normal file
@ -0,0 +1,128 @@
|
||||
---
|
||||
title: "CLI"
|
||||
---
|
||||
|
||||
The Infisical CLI can be used to inject secrets into any framework like Next.js, Express, Django and more in local development.
|
||||
|
||||
It can also be used to expose secrets from Infisical as environment variables in CI/CD pipelines and [Docker containers](/documentation/getting-started/docker)
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- Have a project with secrets ready in [Infisical Cloud](https://app.infisical.com).
|
||||
|
||||
## Installation
|
||||
|
||||
Follow the instructions for your operating system to install the Infisical CLI.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="MacOS">
|
||||
Use [brew](https://brew.sh/) package manager
|
||||
|
||||
```console
|
||||
$ brew install infisical/get-cli/infisical
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Windows">
|
||||
Use [Scoop](https://scoop.sh/) package manager
|
||||
|
||||
```console
|
||||
$ scoop bucket add org https://github.com/Infisical/scoop-infisical.git
|
||||
```
|
||||
|
||||
```console
|
||||
$ scoop install infisical
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Alpine">
|
||||
Install prerequisite
|
||||
```console
|
||||
$ apk add --no-cache bash sudo
|
||||
```
|
||||
|
||||
Add Infisical repository
|
||||
```console
|
||||
$ curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' \
|
||||
| bash
|
||||
```
|
||||
|
||||
Then install CLI
|
||||
```console
|
||||
$ apk update && sudo apk add infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="RedHat/CentOs/Amazon">
|
||||
Add Infisical repository
|
||||
```console
|
||||
$ curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.rpm.sh' \
|
||||
| sudo -E bash
|
||||
```
|
||||
|
||||
Then install CLI
|
||||
```console
|
||||
$ sudo yum install infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Debian/Ubuntu">
|
||||
Add Infisical repository
|
||||
|
||||
```console
|
||||
$ curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' \
|
||||
| sudo -E bash
|
||||
```
|
||||
|
||||
Then install CLI
|
||||
```console
|
||||
$ sudo apt-get update && sudo apt-get install -y infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Arch Linux">
|
||||
Use the `yay` package manager to install from the [Arch User Repository](https://aur.archlinux.org/packages/infisical-bin)
|
||||
|
||||
```console
|
||||
$ yay -S infisical-bin
|
||||
```
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Login
|
||||
|
||||
Authenticate the CLI with the Infisical platform using your email and password.
|
||||
|
||||
```console
|
||||
$ infisical login
|
||||
```
|
||||
|
||||
## Initialization
|
||||
|
||||
Navigate to the root of your project directory and run the `init` command. This step connects your local project to the project on the Infisical platform and creates a `infisical.json` file containing a reference to that latter project.
|
||||
|
||||
```console
|
||||
$ infisical init
|
||||
```
|
||||
|
||||
## Start your app with environment variables injected
|
||||
|
||||
```console
|
||||
$ infisical run -- <your_application_start_command>
|
||||
```
|
||||
|
||||
## Example Start Commands
|
||||
|
||||
```console
|
||||
$ infisical run -- npm run dev
|
||||
$ infisical run -- flask run
|
||||
$ infisical run -- ./your_bash_script.sh
|
||||
```
|
||||
|
||||
Your app should now be running with the secrets from Infisical injected as environment variables.
|
||||
|
||||
See also:
|
||||
|
||||
- [Full documentation for the Infisical CLI](/cli/overview)
|
185
docs/documentation/getting-started/docker.mdx
Normal file
185
docs/documentation/getting-started/docker.mdx
Normal file
@ -0,0 +1,185 @@
|
||||
---
|
||||
title: "Docker"
|
||||
---
|
||||
|
||||
The [Infisical CLI](/cli/overview) can be added to Dockerfiles to fetch secrets from Infisical and make them available as environment variables within containers at runtime.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- Have a project with secrets ready in [Infisical Cloud](https://app.infisical.com).
|
||||
- Create an [Infisical Token](/documentation/platform/token) scoped to an environment in your project in Infisical.
|
||||
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Docker">
|
||||
|
||||
## Dockerfile Modification
|
||||
|
||||
Follow the instruction for your specific Linux distrubtion to add the Infisical CLI to your Dockerfile.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Alpine">
|
||||
```dockerfile
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="RedHat/CentOs/Amazon-linux">
|
||||
```dockerfile
|
||||
RUN curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.rpm.sh' | sh \
|
||||
&& yum install -y infisical
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Debian/Ubuntu">
|
||||
```dockerfile
|
||||
RUN apt-get update && apt-get install -y bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
Next, modify the start command of your Dockerfile:
|
||||
|
||||
```dockerfile
|
||||
CMD ["infisical", "run", "--", "[your service start command]"]
|
||||
```
|
||||
|
||||
## Launch
|
||||
|
||||
Spin up your container with the `docker run` command and feed in your Infisical Token.
|
||||
|
||||
```console
|
||||
docker run --env INFISICAL_TOKEN=<your_infisical_token> <DOCKER-IMAGE>
|
||||
```
|
||||
|
||||
Your containerized application should now be up and running with secrets from Infisical exposed as environment variables within your application's process.
|
||||
|
||||
## Example Dockerfile
|
||||
|
||||
```dockerfile
|
||||
# Select your base image (based on your Linux distribution, e.g., Alpine, Debian, Ubuntu, etc.)
|
||||
FROM alpine
|
||||
|
||||
# Add the Infisical CLI to your Dockerfile (choose the appropriate block based on your base image)
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical
|
||||
|
||||
# Install any additional dependencies or packages your service requires
|
||||
# RUN <additional commands for your service>
|
||||
|
||||
# Copy your service files to the container
|
||||
COPY . /app
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Modify the start command of your Dockerfile
|
||||
CMD ["infisical", "run", "--", "npm run start"]
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Docker Compose">
|
||||
|
||||
## Dockerfile Modifications
|
||||
|
||||
Follow the instruction for your specific Linux distributions to add the Infisical CLI to your Dockerfiles.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Alpine">
|
||||
```dockerfile
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="RedHat/CentOs/Amazon-linux">
|
||||
```dockerfile
|
||||
RUN curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.rpm.sh' | sh \
|
||||
&& yum install -y infisical
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Debian/Ubuntu">
|
||||
```dockerfile
|
||||
RUN apt-get update && apt-get install -y bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
Next, modify the start commands of your Dockerfiles:
|
||||
|
||||
```dockerfile
|
||||
CMD ["infisical", "run", "--", "[your service start command]"]
|
||||
```
|
||||
|
||||
## Example Dockerfile
|
||||
|
||||
```dockerfile
|
||||
# Select your base image (based on your Linux distribution, e.g., Alpine, Debian, Ubuntu, etc.)
|
||||
FROM alpine
|
||||
|
||||
# Add the Infisical CLI to your Dockerfile (choose the appropriate block based on your base image)
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical
|
||||
|
||||
# Install any additional dependencies or packages your service requires
|
||||
# RUN <additional commands for your service>
|
||||
|
||||
# Copy your service files to the container
|
||||
COPY . /app
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Modify the start command of your Dockerfile
|
||||
CMD ["infisical", "run", "--", "[your service start command]"]
|
||||
```
|
||||
|
||||
## Docker Compose File Modification
|
||||
|
||||
For each service you want to inject secrets into, set an environment variable called `INFISICAL_TOKEN` equal to a unique identifier variable. For example:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
api:
|
||||
build: .
|
||||
image: example-service-2
|
||||
environment:
|
||||
- INFISICAL_TOKEN=${INFISICAL_TOKEN_FOR_API}
|
||||
...
|
||||
```
|
||||
|
||||
## Export shell variables
|
||||
|
||||
Next, set the shell variables you defined in your compose file. Continuing from the previous example:
|
||||
|
||||
```console
|
||||
export INFISICAL_TOKEN_FOR_API=<your_infisical_token>
|
||||
```
|
||||
|
||||
## Launch
|
||||
|
||||
Spin up your containers with the `docker-compose up` command.
|
||||
|
||||
```console
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
Your containers should now be running with the secrets from Infisical available inside as environment variables.
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
See also:
|
||||
|
||||
- [Documentation for Docker](/integrations/platforms/docker)
|
||||
- [Documentation for Docker Compose](/integrations/platforms/docker-compose)
|
90
docs/documentation/getting-started/introduction.mdx
Normal file
90
docs/documentation/getting-started/introduction.mdx
Normal file
@ -0,0 +1,90 @@
|
||||
---
|
||||
title: "Introduction"
|
||||
---
|
||||
|
||||
Infisical is an [open-source](https://opensource.com/resources/what-open-source), [end-to-end encrypted](https://en.wikipedia.org/wiki/End-to-end_encryption) secret management platform that enables teams to easily manage and sync their environment variables.
|
||||
|
||||
Start syncing environment variables with [Infisical Cloud](https://app.infisical.com) or learn how to [host Infisical](/self-hosting/overview) yourself.
|
||||
|
||||
## Learn about Infisical
|
||||
|
||||
<Card
|
||||
href="/documentation/getting-started/platform"
|
||||
title="Platform"
|
||||
icon="laptop"
|
||||
color="#dc2626"
|
||||
>
|
||||
Store secrets like API keys, database credentials, environment variables with Infisical
|
||||
</Card>
|
||||
|
||||
## Integrate with Infisical
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card
|
||||
title="SDKs"
|
||||
href="/documentation/getting-started/sdks"
|
||||
icon="boxes-stacked"
|
||||
color="#3c8639"
|
||||
>
|
||||
Fetch secrets with any programming language on demand
|
||||
</Card>
|
||||
<Card href="/documentation/getting-started/cli" title="Command Line Interface" icon="square-terminal" color="#3775a9">
|
||||
Inject secrets into any application process/environment
|
||||
</Card>
|
||||
<Card href="/documentation/getting-started/docker" title="Docker" icon="docker" color="#0078d3">
|
||||
Inject secrets into Docker containers
|
||||
</Card>
|
||||
<Card
|
||||
href="/documentation/getting-started/kubernetes"
|
||||
title="Kubernetes"
|
||||
icon="server"
|
||||
color="#3775a9"
|
||||
>
|
||||
Fetch and save secrets as native Kubernetes secrets
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Resources
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card
|
||||
href="/self-hosting/overview"
|
||||
title="Self-hosting"
|
||||
icon="server"
|
||||
color="#0285c7"
|
||||
>
|
||||
Learn how to configure and deploy Infisical
|
||||
</Card>
|
||||
<Card
|
||||
href="/documentation/guides/introduction"
|
||||
title="Guide"
|
||||
icon="book-open"
|
||||
color="#dc2626"
|
||||
>
|
||||
Explore guides for every language and stack
|
||||
</Card>
|
||||
<Card
|
||||
href="/integrations/overview"
|
||||
title="Native Integrations"
|
||||
icon="clouds"
|
||||
color="#dc2626"
|
||||
>
|
||||
Explore integrations for GitHub, Vercel, Netlify, and more
|
||||
</Card>
|
||||
<Card
|
||||
href="/integrations/overview"
|
||||
title="Frameworks"
|
||||
icon="plug"
|
||||
color="#dc2626"
|
||||
>
|
||||
Explore integrations for Next.js, Express, Django, and more
|
||||
</Card>
|
||||
<Card
|
||||
href="https://calendly.com/team-infisical/infisical-demo"
|
||||
title="Contact Us"
|
||||
icon="user-headset"
|
||||
color="#0285c7"
|
||||
>
|
||||
Questions? Need help setting up? Book a 1x1 meeting with us
|
||||
</Card>
|
||||
</CardGroup>
|
83
docs/documentation/getting-started/kubernetes.mdx
Normal file
83
docs/documentation/getting-started/kubernetes.mdx
Normal file
@ -0,0 +1,83 @@
|
||||
---
|
||||
title: "Kubernetes"
|
||||
---
|
||||
|
||||
The Infisical Secrets Operator fetches secrets from Infisical and saves them as Kubernetes secrets using the custom `InfisicalSecret` resource to define authentication and storage methods.
|
||||
The operator updates secrets continuously and can reload dependent deployments automatically on secret changes.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- Connected to your cluster via kubectl
|
||||
- Have a project with secrets ready in [Infisical Cloud](https://app.infisical.com).
|
||||
- Create an [Infisical Token](/documentation/platform/token) scoped to an environment in your project in Infisical.
|
||||
|
||||
## Installation
|
||||
|
||||
Follow the instructions for either [Helm](https://helm.sh/) or [kubectl](https://github.com/kubernetes/kubectl) to install the Infisical Secrets Operator.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Helm">
|
||||
Install the Infisical Helm repository
|
||||
|
||||
```console
|
||||
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
|
||||
|
||||
helm repo update
|
||||
```
|
||||
|
||||
Install the Helm chart
|
||||
```console
|
||||
helm install --generate-name infisical-helm-charts/secrets-operator
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Kubectl">
|
||||
The operator will be installed in `infisical-operator-system` namespace
|
||||
```
|
||||
kubectl apply -f https://raw.githubusercontent.com/Infisical/infisical/main/k8-operator/kubectl-install/install-secrets-operator.yaml
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
**Step 1: Create Kubernetes secret containing service token**
|
||||
|
||||
Once you have generated the service token, create a Kubernetes secret containing the service token you generated by running the command below.
|
||||
|
||||
``` bash
|
||||
kubectl create secret generic service-token --from-literal=infisicalToken=<your-service-token-here>
|
||||
```
|
||||
|
||||
**Step 2: Fill out the InfisicalSecrets CRD and apply it to your cluster**
|
||||
|
||||
```yaml infisical-secrets-config.yaml
|
||||
apiVersion: secrets.infisical.com/v1alpha1
|
||||
kind: InfisicalSecret
|
||||
metadata:
|
||||
# Name of of this InfisicalSecret resource
|
||||
name: infisicalsecret-sample
|
||||
spec:
|
||||
# The host that should be used to pull secrets from. If left empty, the value specified in Global configuration will be used
|
||||
hostAPI: https://app.infisical.com/api
|
||||
authentication:
|
||||
serviceToken:
|
||||
serviceTokenSecretReference: # <-- The secret's namespaced name that holds the project token for authentication in step 1
|
||||
secretName: service-token
|
||||
secretNamespace: option
|
||||
managedSecretReference:
|
||||
secretName: managed-secret # <-- the name of kubernetes secret that will be created
|
||||
secretNamespace: default # <-- in what namespace it will be created in
|
||||
```
|
||||
|
||||
```
|
||||
kubectl apply -f infisical-secrets-config.yaml
|
||||
```
|
||||
|
||||
You should now see a new kubernetes secret automatically created in the namespace you defined in the `managedSecretReference` property above.
|
||||
|
||||
See also:
|
||||
|
||||
- [Documentation for the Infisical Kubernetes Operator](../../integrations/platforms/kubernetes)
|
||||
|
57
docs/documentation/getting-started/platform.mdx
Normal file
57
docs/documentation/getting-started/platform.mdx
Normal file
@ -0,0 +1,57 @@
|
||||
---
|
||||
title: "Platform"
|
||||
---
|
||||
|
||||
Infisical is an [open-source](https://opensource.com/resources/what-open-source), [end-to-end encrypted](https://en.wikipedia.org/wiki/End-to-end_encryption) secret management platform that enables teams to easily store, manage, and sync secrets like API keys, database credentials, and environment variables across their apps and infrastructure.
|
||||
|
||||
This quickstart provides an overview of the functionalities offered by Infisical.
|
||||
|
||||
## Projects
|
||||
|
||||
Projects hold secrets for applications, which are further organized into environments such as development, testing and production.
|
||||
|
||||
### Secrets Overview
|
||||
|
||||
The secrets overview provides a bird's-eye view of all the secrets in a project and is particularly useful for identifying missing secrets across environments.
|
||||
|
||||

|
||||
|
||||
### Secrets Dashboard
|
||||
|
||||
The secrets dashboard lets you manage secrets for a specific environment in a project.
|
||||
Here, developers can [override secrets](//project#personal-overrides), [version secrets](/documentation/platform/secret-versioning), [rollback projects to any point in time](/documentation/platform/pit-recovery), and much more.
|
||||
|
||||

|
||||
|
||||
### Integrations
|
||||
|
||||
The integrations page provides native integrations to sync secrets from a project environment to a [host of ever-expanding integrations](/integrations/overview).
|
||||
|
||||
<Tip>
|
||||
Depending on your infrastructure setup and compliance requirements, you may or may not prefer to use these native integrations since they break end-to-end encryption (E2EE).
|
||||
|
||||
You will learn about various ways to integrate with Infisical and maintain E2EE in subsequent quickstart sections.
|
||||
</Tip>
|
||||
|
||||

|
||||
|
||||
### Access Control
|
||||
|
||||
The members page lets you add/remove members for a project and provision them access to environments (access levels include `No Access`, `Read Only`, and `Read and Write`).
|
||||
|
||||

|
||||
|
||||
## Organizations
|
||||
|
||||
Organizations house projects and members.
|
||||
|
||||
### Organization Settings
|
||||
|
||||
At the organization-level, you can add/remove members and manage their access to projects.
|
||||
|
||||

|
||||

|
||||
|
||||
That's it for the platform quickstart! — We encourage you to continue exploring the documentation to gain a deeper understanding of the extensive features and functionalities that Infisical has to offer.
|
||||
|
||||
Next, head back to [Getting Started > Introduction](/documentation/getting-started/overview) to explore ways to fetch secrets from Infisical to your apps and infrastructure.
|
145
docs/documentation/getting-started/sdks.mdx
Normal file
145
docs/documentation/getting-started/sdks.mdx
Normal file
@ -0,0 +1,145 @@
|
||||
---
|
||||
title: "SDKs"
|
||||
---
|
||||
|
||||
From local development to production, Infisical's language-specific SDKs provide the easiest way for your app to fetch back secrets on demand.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- Have a project with secrets ready in [Infisical Cloud](https://app.infisical.com).
|
||||
- Create an [Infisical Token](/documentation/platform/token) scoped to an environment in your project in Infisical.
|
||||
|
||||
## Installation
|
||||
|
||||
Follow the instructions for your language to install the SDK for it.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Node">
|
||||
|
||||
Run `npm` to add [infisical-node](https://github.com/Infisical/infisical-node) to your project.
|
||||
|
||||
```console
|
||||
$ npm install infisical-node --save
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Import the SDK and create a client instance with your [Infisical Token](/documentation/platform/token).
|
||||
|
||||
<Tabs>
|
||||
<Tab title="ES6">
|
||||
```js
|
||||
import InfisicalClient from "infisical-node";
|
||||
|
||||
const client = new InfisicalClient({
|
||||
token: "your_infisical_token"
|
||||
});
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="ES5">
|
||||
```js
|
||||
const InfisicalClient = require("infisical-node");
|
||||
|
||||
const client = new InfisicalClient({
|
||||
token: "your_infisical_token"
|
||||
});
|
||||
````
|
||||
</Tab>
|
||||
</Tabs>
|
||||
## Get a Secret
|
||||
|
||||
```js
|
||||
const secret = await client.getSecret("API_KEY");
|
||||
const value = secret.secretValue; // get its value
|
||||
```
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```js
|
||||
import express from "express";
|
||||
import InfisicalClient from "infisical-node";
|
||||
const app = express();
|
||||
const PORT = 3000;
|
||||
|
||||
const client = new InfisicalClient({
|
||||
token: "YOUR_INFISICAL_TOKEN"
|
||||
});
|
||||
|
||||
app.get("/", async (req, res) => {
|
||||
// access value
|
||||
const name = await client.getSecret("NAME");
|
||||
res.send(`Hello! My name is: ${name.secretValue}`);
|
||||
});
|
||||
|
||||
app.listen(PORT, async () => {
|
||||
console.log(`App listening on port ${port}`);
|
||||
});
|
||||
```
|
||||
|
||||
This example demonstrates how to use the Infisical Node SDK with an Express application. The application retrieves a secret named "NAME" and responds to requests with a greeting that includes the secret value.
|
||||
</Tab>
|
||||
<Tab title="Python">
|
||||
|
||||
## Installation
|
||||
|
||||
Run `pip` to add [infisical-python](https://github.com/Astropilot/infisical-python) to your project
|
||||
|
||||
```console
|
||||
$ pip install infisical
|
||||
```
|
||||
|
||||
Note: You need Python 3.7+.
|
||||
|
||||
## Configuration
|
||||
|
||||
Import the SDK and create a client instance with your [Infisical Token](/documentation/platform/token).
|
||||
|
||||
```py
|
||||
from infisical import InfisicalClient
|
||||
|
||||
client = InfisicalClient(token="your_infisical_token")
|
||||
```
|
||||
|
||||
## Get a Secret
|
||||
|
||||
```py
|
||||
secret = client.get_secret("API_KEY")
|
||||
value = secret.secret_value # get its value
|
||||
```
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```py
|
||||
from flask import Flask
|
||||
from infisical import InfisicalClient
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
client = InfisicalClient(token="your_infisical_token")
|
||||
|
||||
@app.route("/")
|
||||
def hello_world():
|
||||
# access value
|
||||
name = client.get_secret("NAME")
|
||||
return f"Hello! My name is: {name.secret_value}"
|
||||
```
|
||||
|
||||
This example demonstrates how to use the Infisical Python SDK with a Flask application. The application retrieves a secret named "NAME" and responds to requests with a greeting that includes the secret value.
|
||||
</Tab>
|
||||
<Tab title="Other">
|
||||
We're currently working on SDKs for other languages. Follow the GitHub issue for your needed language below:
|
||||
- [Java](https://github.com/Infisical/infisical/issues/434)
|
||||
- [Ruby](https://github.com/Infisical/infisical/issues/435)
|
||||
- [Go](https://github.com/Infisical/infisical/issues/436)
|
||||
- [Rust](https://github.com/Infisical/infisical/issues/437)
|
||||
- [PHP](https://github.com/Infisical/infisical/issues/531)
|
||||
|
||||
Missing a language? [Throw in a request](https://github.com/Infisical/infisical/issues).
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
See also:
|
||||
|
||||
- Explore the [Node SDK](https://github.com/Infisical/infisical-node)
|
||||
- Explore the [Python SDK](https://github.com/Infisical/infisical-python)
|
41
docs/documentation/guides/introduction.mdx
Normal file
41
docs/documentation/guides/introduction.mdx
Normal file
@ -0,0 +1,41 @@
|
||||
---
|
||||
title: "Introduction"
|
||||
---
|
||||
|
||||
Whether you're running a Node application on Heroku, Next.js application with Vercel, or Kubernetes on AWS, Infisical has a secret management strategy from local development to production ready for you.
|
||||
|
||||
## Guides by Language
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card
|
||||
title="Node"
|
||||
href="/documentation/guides/node"
|
||||
icon="node"
|
||||
color="#3c8639"
|
||||
>
|
||||
Manage secrets across your Node stack
|
||||
</Card>
|
||||
<Card
|
||||
href="/documentation/guides/python"
|
||||
title="Python"
|
||||
icon="python"
|
||||
color="#3775a9"
|
||||
>
|
||||
Manage secrets across your Python stack
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Guides by Stack
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card
|
||||
title="Next.js + Vercel"
|
||||
href="/documentation/guides/nextjs-vercel"
|
||||
icon="cloud"
|
||||
color="#3c8639"
|
||||
>
|
||||
Manage secrets for your Next.js + Vercel stack
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
Want a guide? [Throw in a request](https://github.com/Infisical/infisical/issues).
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user