mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-24 00:15:26 +00:00
Compare commits
292 Commits
gen-exampl
...
v0.3.3
Author | SHA1 | Date | |
---|---|---|---|
12364005c1 | |||
98573e9e05 | |||
c1a4ca6203 | |||
21c2fd8542 | |||
b27bc8fc1b | |||
091115e6ba | |||
d9c055872d | |||
f73d18ddc7 | |||
eb47126f68 | |||
4750767268 | |||
b0ed772885 | |||
7fdab81b5f | |||
c17bf13f8c | |||
0e17c9a6db | |||
1c4dd78dea | |||
23418b3a09 | |||
0f143adbde | |||
1f3f4b7900 | |||
2c5f26380e | |||
8f974fb087 | |||
a0722b4ca5 | |||
41e039578a | |||
c89e8e8a96 | |||
cac83ab927 | |||
0f0b894363 | |||
43f9af1bc6 | |||
f5ed14c84c | |||
2dd57d7c73 | |||
0b1891b64a | |||
5614b0f58a | |||
3bb178976d | |||
1777f98aef | |||
45e3706335 | |||
337ed1fc46 | |||
d1ea76e5a0 | |||
4a72d725b1 | |||
1693db3199 | |||
1ff42991b3 | |||
978423ba5b | |||
4d0dc0d7b7 | |||
3817e666a9 | |||
b61350f6a4 | |||
0fb1a1dc6f | |||
9eefc87b7a | |||
53d35757ee | |||
e80e8e00b1 | |||
0b08e574c7 | |||
499323d0e3 | |||
89ad2f163a | |||
7f04617b7d | |||
44904628bc | |||
fafde7b1ad | |||
7e65314670 | |||
df52c56e83 | |||
4276fb54cc | |||
bb5a0db79c | |||
b906048ea1 | |||
7ce9c816c5 | |||
3fef6e4849 | |||
e7ce1e36e7 | |||
734c915206 | |||
783174adc6 | |||
d769db7668 | |||
00e532fce4 | |||
7cf8cba54b | |||
70b26811d9 | |||
e7aafecbc2 | |||
949fb052cd | |||
fcb1f5a51b | |||
e24f70b891 | |||
bd233ebe9b | |||
f92269f2ec | |||
2143db5eb5 | |||
0c72f50b5e | |||
3c4c616242 | |||
153baad49f | |||
75a2ab636c | |||
05a77e612c | |||
d02bc06dce | |||
e1f88f1a7b | |||
86a2647134 | |||
621b640af4 | |||
40c80f417c | |||
7bb2c1c278 | |||
a5278affe6 | |||
2f953192d6 | |||
af64582efd | |||
6ad70f24a2 | |||
8bf8968588 | |||
7e9ce0360a | |||
1d35c41dcb | |||
824315f773 | |||
8a74799d64 | |||
f0f6e8a988 | |||
89bc9a823c | |||
40250b7ecf | |||
2d6d32923d | |||
7cb6aee3f7 | |||
469d042f4b | |||
c38ccdb915 | |||
baaa92427f | |||
1ff2c61b3a | |||
0b356e0e83 | |||
eb55c053eb | |||
07b307e4b1 | |||
5bee6a5e24 | |||
bdc99e34cc | |||
cee10fb507 | |||
74e78bb967 | |||
ea5811c24c | |||
d31b7ae4af | |||
75eac1b972 | |||
c65ce14de3 | |||
f8c4ccd64c | |||
43ce222725 | |||
c7ebeecb6b | |||
243c6ca22e | |||
66f1c57a2a | |||
c0d1495761 | |||
e5f6ed3dc7 | |||
ab62d91b09 | |||
59beabb445 | |||
d5bc377e3d | |||
2bdb20f42f | |||
0062df58a2 | |||
b6bbfc08ad | |||
5baccc73c9 | |||
20e7eae4fe | |||
8432f71d58 | |||
604c22d64d | |||
c1deb08df8 | |||
66f201746f | |||
1c61ffbd36 | |||
e5ba8eb281 | |||
f542e07c33 | |||
1082d7f869 | |||
4a3adaa347 | |||
1659dab87d | |||
d88599714f | |||
71bf56a2b7 | |||
0fba78ad16 | |||
92560f5e1f | |||
0d484b93eb | |||
5f3b8c55b8 | |||
553416689c | |||
b0744fd21d | |||
be38844a5b | |||
54e2b661bc | |||
b81d8eba25 | |||
dbcd2b0988 | |||
1d11f11eaf | |||
f2d7401d1d | |||
91cb9750b4 | |||
3e0d4cb70a | |||
dab677b360 | |||
625c0785b5 | |||
540a8b4201 | |||
11f86da1f6 | |||
ab5ffa9ee6 | |||
65bec23292 | |||
635ae941d7 | |||
a9753fb784 | |||
b587d9b35a | |||
aa68bc05d9 | |||
66566a401f | |||
5aa75ecd3f | |||
0a77f9a0c8 | |||
b5d4cfed03 | |||
c57394bdab | |||
da857f321b | |||
754ea09400 | |||
f28a2ea151 | |||
c7dd028771 | |||
3c94bacda9 | |||
8e85847de3 | |||
0c10bbb569 | |||
b710944630 | |||
280f482fc8 | |||
e1ad8fbee8 | |||
56ca6039ba | |||
fba54ae0c6 | |||
e243c72ca6 | |||
23ea6fd4f9 | |||
3f9f2ef238 | |||
77cb20f5c7 | |||
ddf630c269 | |||
39adb9a0c2 | |||
97fde96b7b | |||
190391e493 | |||
d3fcb69c50 | |||
2db4a29ad7 | |||
4df82a6ff1 | |||
cdf73043e1 | |||
ca07d1c50e | |||
868011479b | |||
6f6df3e63a | |||
23c740d225 | |||
702d4de3b5 | |||
445fa35ab5 | |||
9868476965 | |||
bfa6b955ca | |||
13b1805d04 | |||
c233fd8ed1 | |||
90f5934440 | |||
30b2b85446 | |||
0adc3d2027 | |||
e53fd110f6 | |||
edf0294d51 | |||
8850b44115 | |||
17f9e53779 | |||
a61233d2ba | |||
2022988e77 | |||
409de81bd2 | |||
2b289ddf77 | |||
b066a55ead | |||
8dfc0138f5 | |||
517f508e44 | |||
2f1a671121 | |||
2fb4b261a8 | |||
9c3c745fdf | |||
6a75147719 | |||
295b363d8a | |||
d96b5943b9 | |||
17406e413d | |||
9b219f67b0 | |||
8fd2578a6d | |||
cc809a6bc0 | |||
66659c8fc8 | |||
31293bbe06 | |||
1c3488f8db | |||
20e536cec0 | |||
e8b498ca6d | |||
b82f8606a8 | |||
ab27fbccf7 | |||
d50de9366b | |||
4c56bca4e7 | |||
a60774a3f4 | |||
03426ee7f2 | |||
428022d1a2 | |||
b5bcd0a308 | |||
03c72ea00f | |||
a486390015 | |||
8dc47110a0 | |||
52a6fe64a7 | |||
081ef94399 | |||
eebde3ad12 | |||
669861d7a8 | |||
6ab6147ac8 | |||
dd7e8d254b | |||
2765f7e488 | |||
2d3a276dc2 | |||
55eddee6ce | |||
ab751d0db3 | |||
b2bd0ba340 | |||
224fa25fdf | |||
e6539a5566 | |||
6115a311ad | |||
cd0b2e3a26 | |||
80a3c196ae | |||
6188b04544 | |||
8ba4f964d4 | |||
0d2caddb12 | |||
4570c35658 | |||
72f7d81b80 | |||
231fa61805 | |||
9f74affd3a | |||
f58e1e1d6c | |||
074cf695b2 | |||
07c056523f | |||
65eb037020 | |||
a37cf91702 | |||
80d219c3e0 | |||
3ec68daf2e | |||
9fafe02e16 | |||
86fd876850 | |||
b56d9287e4 | |||
a35e235744 | |||
77a44b4490 | |||
594f846943 | |||
b0ffac2f00 | |||
5ba851adff | |||
d7acd7aef6 | |||
860b8efd7d | |||
6ca3fc5ad2 | |||
189af07ff5 | |||
caf7426f86 | |||
75cd7a0f15 | |||
4722bb8fcd | |||
bb752863fa | |||
cf5603c8e3 | |||
77b1011207 | |||
5cadb9e2f9 |
.github
.goreleaser.yamlREADME.mdbackend
Dockerfilepackage-lock.jsonpackage.json
src
app.ts
config
controllers
v1
authController.tsintegrationAuthController.tsintegrationController.tsmembershipOrgController.tsorganizationController.tspasswordController.tssignupController.tsworkspaceController.ts
v2
ee
helpers
integrations
middleware
models
index.tsintegration.tsintegrationAuth.tssecret.tssecretApprovalRequest.tstoken.tstokenData.tsuser.tsworkspace.ts
routes
v1
v2
services
templates
emailMfa.handlebarsemailVerification.handlebarsnewDevice.handlebarsorganizationInvitation.handlebarspasswordReset.handlebarsworkspaceInvitation.handlebars
types/secret
variables
cli
docker-compose.ymldocs
cli
getting-started
images
email-socketlabs-credentials.pngemail-socketlabs-dashboard.pngemail-socketlabs-domains.pngintegrations-aws-access-key-1.pngintegrations-aws-access-key-2.pngintegrations-aws-access-key-3.pngintegrations-aws-iam-1.pngintegrations-aws-parameter-store-auth.pngintegrations-aws-parameter-store-create.pngintegrations-aws-parameter-store-iam-2.pngintegrations-aws-parameter-store-iam-3.pngintegrations-aws-parameter-store.pngintegrations-aws-secret-manager-auth.pngintegrations-aws-secret-manager-create.pngintegrations-aws-secret-manager-iam-2.pngintegrations-aws-secret-manager-iam-3.pngintegrations-aws-secret-manager.pngintegrations-circleci-auth.pngintegrations-circleci-create.pngintegrations-circleci-token.pngintegrations-circleci.pngintegrations-flyio-auth.pngintegrations-flyio-create.pngintegrations-flyio.pngintegrations-heroku-create.pngintegrations-heroku.pngintegrations-netlify-create.pngintegrations-netlify.pngintegrations-render-auth.pngintegrations-render-create.pngintegrations-render.pngintegrations-travisci-auth.pngintegrations-travisci-create.pngintegrations-travisci-token.pngintegrations-travisci.pngintegrations-vercel-create.pngintegrations-vercel.pngintegrations.pngmfa-email.png
integrations
mint.jsonsecurity
self-hosting
frontend
.eslintrc.jstailwind.config.js
.storybook
next-i18next.config.jsnext.config.jspackage-lock.jsonpackage.jsonpublic
data
images
locales
src
components
basic
context/Notifications
dashboard
AddTagsMenu.tsxCommentField.tsxConfirmEnvOverwriteModal.tsxDashboardInputField.tsxDropZone.tsxGenerateSecretMenu.tsxKeyPair.tsxSideBar.tsx
integrations
login
navigation
signup
utilities
SecurityClient.tsattemptLogin.tsattemptLoginMfa.ts
config
cryptography
generateBackupPDF.tssaveTokenToLocalStorage.tssecrets
v2
Button
Card
Checkbox
DeleteActionModal
EmptyState
HoverCard
Input
Menu
Modal
Popover
Select
Skeleton
Table
Tabs
Tag
index.tsxconfig
const.tscontext
ee/components
helpers
hooks
api
index.tsuseLeaveConfirm.tsxlayouts
pages
_app.tsx
reactQuery.tsactivity
api
auth
ChangePassword2.tsCompleteAccountInformationSignup.tsCompleteAccountInformationSignupInvite.tsLogin1.tsLogin2.tsLogout.tsresetPasswordOnAccountRecovery.tsverifyMfaToken.ts
files
integrations
organization
user
workspace
dashboard
home
integrations
[id].tsx
login.tsxpassword-reset.tsxrequestnewinvite.tsxaws-parameter-store
aws-secret-manager
azure-key-vault
circleci
flyio
github
heroku
netlify
render
travisci
vercel
settings
signup.tsxsignupinvite.tsxservices
views/Settings
OrgSettingsPage
OrgSettingsPage.tsx
components
index.tsxPersonalSettingsPage/SecuritySection
ProjectSettingsPage
ProjectSettingsPage.tsx
components
AutoCapitalizationSection
EnvironmentSection
SecretTagsSection
ServiceTokenSection
helm-charts
README.md
infisical
secrets-operator
upload-to-cloudsmith.shi18n
img
k8-operator
22
.github/pull_request_template.md
vendored
Normal file
22
.github/pull_request_template.md
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
# Description 📣
|
||||
|
||||
*Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.*
|
||||
|
||||
## Type ✨
|
||||
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Breaking change
|
||||
- [ ] Documentation
|
||||
|
||||
# Tests 🛠️
|
||||
|
||||
*Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration. You may want to add screenshots when relevant and possible*
|
||||
|
||||
```sh
|
||||
# Here's some code block to paste some code snippets
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/code-of-conduct). 📝
|
86
.github/values.yaml
vendored
86
.github/values.yaml
vendored
@ -1,11 +1,5 @@
|
||||
#####
|
||||
# INFISICAL K8 DEFAULT VALUES FILE
|
||||
# PLEASE REPLACE VALUES/EDIT AS REQUIRED
|
||||
#####
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
frontend:
|
||||
enabled: true
|
||||
name: frontend
|
||||
podAnnotations: {}
|
||||
deploymentAnnotations:
|
||||
@ -13,17 +7,18 @@ frontend:
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: infisical/frontend
|
||||
pullPolicy: Always
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
kubeSecretRef: managed-secret-frontend
|
||||
service:
|
||||
# type of the frontend service
|
||||
type: ClusterIP
|
||||
# define the nodePort if service type is NodePort
|
||||
# nodePort:
|
||||
annotations: {}
|
||||
type: ClusterIP
|
||||
nodePort: ""
|
||||
|
||||
frontendEnvironmentVariables: null
|
||||
|
||||
backend:
|
||||
enabled: true
|
||||
name: backend
|
||||
podAnnotations: {}
|
||||
deploymentAnnotations:
|
||||
@ -31,63 +26,46 @@ backend:
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: infisical/backend
|
||||
pullPolicy: Always
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
kubeSecretRef: managed-backend-secret
|
||||
service:
|
||||
annotations: {}
|
||||
type: ClusterIP
|
||||
nodePort: ""
|
||||
|
||||
backendEnvironmentVariables: null
|
||||
|
||||
## Mongo DB persistence
|
||||
mongodb:
|
||||
name: mongodb
|
||||
podAnnotations: {}
|
||||
image:
|
||||
repository: mongo
|
||||
pullPolicy: IfNotPresent
|
||||
tag: "latest"
|
||||
service:
|
||||
annotations: {}
|
||||
enabled: true
|
||||
persistence:
|
||||
enabled: false
|
||||
|
||||
# By default the backend will be connected to a Mongo instance in the cluster.
|
||||
# However, it is recommended to add a managed document DB connection string because the DB instance in the cluster does not have persistence yet ( data will be deleted on next deploy).
|
||||
# Learn about connection string type here https://www.mongodb.com/docs/manual/reference/connection-string/
|
||||
mongodbConnection: {}
|
||||
# externalMongoDBConnectionString: <>
|
||||
## By default the backend will be connected to a Mongo instance within the cluster
|
||||
## However, it is recommended to add a managed document DB connection string for production-use (DBaaS)
|
||||
## Learn about connection string type here https://www.mongodb.com/docs/manual/reference/connection-string/
|
||||
## e.g. "mongodb://<user>:<pass>@<host>:<port>/<database-name>"
|
||||
mongodbConnection:
|
||||
externalMongoDBConnectionString: ""
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: "nginx"
|
||||
hostName: gamma.infisical.com # replace with your domain
|
||||
frontend:
|
||||
# cert-manager.io/issuer: letsencrypt-nginx
|
||||
hostName: gamma.infisical.com ## <- Replace with your own domain
|
||||
frontend:
|
||||
path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
path: /api
|
||||
pathType: Prefix
|
||||
tls: []
|
||||
tls:
|
||||
[]
|
||||
# - secretName: letsencrypt-nginx
|
||||
# hosts:
|
||||
# - infisical.local
|
||||
|
||||
|
||||
## Complete Ingress example
|
||||
# ingress:
|
||||
# enabled: true
|
||||
# annotations:
|
||||
# kubernetes.io/ingress.class: "nginx"
|
||||
# cert-manager.io/issuer: letsencrypt-nginx
|
||||
# hostName: k8.infisical.com
|
||||
# frontend:
|
||||
# path: /
|
||||
# pathType: Prefix
|
||||
# backend:
|
||||
# path: /api
|
||||
# pathType: Prefix
|
||||
# tls:
|
||||
# - secretName: letsencrypt-nginx
|
||||
# hosts:
|
||||
# - k8.infisical.com
|
||||
|
||||
###
|
||||
### YOU MUST FILL IN ALL SECRETS BELOW
|
||||
###
|
||||
backendEnvironmentVariables: {}
|
||||
|
||||
frontendEnvironmentVariables: {}
|
||||
mailhog:
|
||||
enabled: false
|
||||
|
12
.github/workflows/release_build.yml
vendored
12
.github/workflows/release_build.yml
vendored
@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- 'v*'
|
||||
- "v*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@ -18,11 +18,16 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '>=1.19.3'
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
@ -45,8 +50,7 @@ jobs:
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
||||
|
@ -68,10 +68,10 @@ archives:
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
mode: 'replace'
|
||||
mode: "replace"
|
||||
|
||||
checksum:
|
||||
name_template: 'checksums.txt'
|
||||
name_template: "checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}-devel"
|
||||
@ -80,8 +80,8 @@ changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^test:'
|
||||
- "^docs:"
|
||||
- "^test:"
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
@ -109,30 +109,30 @@ brews:
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
|
||||
scoop:
|
||||
bucket:
|
||||
@ -146,15 +146,14 @@ scoop:
|
||||
license: MIT
|
||||
|
||||
aurs:
|
||||
-
|
||||
name: infisical-bin
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
maintainers:
|
||||
- Infisical, Inc <support@infisical.com>
|
||||
license: MIT
|
||||
private_key: '{{ .Env.AUR_KEY }}'
|
||||
git_url: 'ssh://aur@aur.archlinux.org/infisical-bin.git'
|
||||
private_key: "{{ .Env.AUR_KEY }}"
|
||||
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
package: |-
|
||||
# bin
|
||||
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
@ -169,19 +168,13 @@ aurs:
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
# dockers:
|
||||
# - dockerfile: goreleaser.dockerfile
|
||||
# - dockerfile: cli/docker/Dockerfile
|
||||
# goos: linux
|
||||
# goarch: amd64
|
||||
# ids:
|
||||
# - infisical
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}"
|
||||
# - "infisical/cli:{{ .Major }}"
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:latest"
|
||||
# build_flag_templates:
|
||||
# - "--label=org.label-schema.schema-version=1.0"
|
||||
# - "--label=org.label-schema.version={{.Version}}"
|
||||
# - "--label=org.label-schema.name={{.ProjectName}}"
|
||||
# - "--platform=linux/amd64"
|
129
README.md
129
README.md
File diff suppressed because one or more lines are too long
@ -1,18 +1,27 @@
|
||||
FROM node:16-bullseye-slim
|
||||
# Build stage
|
||||
FROM node:16-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package-lock.json ./
|
||||
|
||||
# RUN npm ci --only-production --ignore-scripts
|
||||
# "prepare": "cd .. && npm install"
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:16-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
EXPOSE 4000
|
||||
|
||||
CMD ["npm", "run", "start"]
|
||||
CMD ["npm", "run", "start"]
|
7380
backend/package-lock.json
generated
7380
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,19 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-secrets-manager": "^3.267.0",
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@sentry/node": "^7.21.1",
|
||||
"@sentry/tracing": "^7.21.1",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.14.0",
|
||||
"@sentry/tracing": "^7.19.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"axios": "^1.2.0",
|
||||
"@types/libsodium-wrappers": "^0.7.10",
|
||||
"await-to-js": "^3.0.0",
|
||||
"aws-sdk": "^2.1311.0",
|
||||
"axios": "^1.1.3",
|
||||
"axios-retry": "^3.4.0",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"builder-pattern": "^2.2.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-js": "^4.1.1",
|
||||
@ -15,23 +23,32 @@
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jsrp": "^0.2.4",
|
||||
"mongoose": "^6.7.3",
|
||||
"libsodium-wrappers": "^0.7.10",
|
||||
"lodash": "^4.17.21",
|
||||
"mongoose": "^6.7.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.1.0",
|
||||
"posthog-node": "^2.2.2",
|
||||
"query-string": "^7.1.3",
|
||||
"request-ip": "^3.3.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"swagger-autogen": "^2.22.0",
|
||||
"swagger-ui-express": "^4.6.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"typescript": "^4.9.3"
|
||||
"typescript": "^4.9.3",
|
||||
"utility-types": "^3.10.0",
|
||||
"winston": "^3.8.2",
|
||||
"winston-loki": "^6.0.6"
|
||||
},
|
||||
"name": "infisical-api",
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
"scripts": {
|
||||
"start": "npm run build && node build/index.js",
|
||||
"start": "node build/index.js",
|
||||
"dev": "nodemon",
|
||||
"swagger-autogen": "node ./swagger/index.ts",
|
||||
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build",
|
||||
@ -102,47 +119,5 @@
|
||||
"suiteNameTemplate": "{filepath}",
|
||||
"classNameTemplate": "{classname}",
|
||||
"titleTemplate": "{title}"
|
||||
},
|
||||
"dependencies": {
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.14.0",
|
||||
"@sentry/tracing": "^7.19.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/libsodium-wrappers": "^0.7.10",
|
||||
"await-to-js": "^3.0.0",
|
||||
"axios": "^1.1.3",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"builder-pattern": "^2.2.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-js": "^4.1.1",
|
||||
"dotenv": "^16.0.1",
|
||||
"express": "^4.18.1",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jsrp": "^0.2.4",
|
||||
"libsodium-wrappers": "^0.7.10",
|
||||
"lodash": "^4.17.21",
|
||||
"mongoose": "^6.7.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.2.2",
|
||||
"query-string": "^7.1.3",
|
||||
"request-ip": "^3.3.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"swagger-autogen": "^2.22.0",
|
||||
"swagger-ui-express": "^4.6.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"typescript": "^4.9.3",
|
||||
"utility-types": "^3.10.0",
|
||||
"winston": "^3.8.2",
|
||||
"winston-loki": "^6.0.6"
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { patchRouterParam } = require('./utils/patchAsyncRoutes');
|
||||
|
||||
import express, { Request, Response } from 'express';
|
||||
import express from 'express';
|
||||
import helmet from 'helmet';
|
||||
import cors from 'cors';
|
||||
import cookieParser from 'cookie-parser';
|
||||
@ -42,6 +42,8 @@ import {
|
||||
integrationAuth as v1IntegrationAuthRouter
|
||||
} from './routes/v1';
|
||||
import {
|
||||
signup as v2SignupRouter,
|
||||
auth as v2AuthRouter,
|
||||
users as v2UsersRouter,
|
||||
organizations as v2OrganizationsRouter,
|
||||
workspace as v2WorkspaceRouter,
|
||||
@ -110,6 +112,8 @@ app.use('/api/v1/integration', v1IntegrationRouter);
|
||||
app.use('/api/v1/integration-auth', v1IntegrationAuthRouter);
|
||||
|
||||
// v2 routes
|
||||
app.use('/api/v2/signup', v2SignupRouter);
|
||||
app.use('/api/v2/auth', v2AuthRouter);
|
||||
app.use('/api/v2/users', v2UsersRouter);
|
||||
app.use('/api/v2/organizations', v2OrganizationsRouter);
|
||||
app.use('/api/v2/workspace', v2EnvironmentRouter);
|
||||
|
@ -1,9 +1,12 @@
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const EMAIL_TOKEN_LIFETIME = process.env.EMAIL_TOKEN_LIFETIME! || '86400';
|
||||
const EMAIL_TOKEN_LIFETIME = parseInt(process.env.EMAIL_TOKEN_LIFETIME! || '86400');
|
||||
const INVITE_ONLY_SIGNUP = process.env.INVITE_ONLY_SIGNUP == undefined ? false : process.env.INVITE_ONLY_SIGNUP
|
||||
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY!;
|
||||
const SALT_ROUNDS = parseInt(process.env.SALT_ROUNDS!) || 10;
|
||||
const JWT_AUTH_LIFETIME = process.env.JWT_AUTH_LIFETIME! || '10d';
|
||||
const JWT_AUTH_SECRET = process.env.JWT_AUTH_SECRET!;
|
||||
const JWT_MFA_LIFETIME = process.env.JWT_MFA_LIFETIME! || '5m';
|
||||
const JWT_MFA_SECRET = process.env.JWT_MFA_SECRET!;
|
||||
const JWT_REFRESH_LIFETIME = process.env.JWT_REFRESH_LIFETIME! || '90d';
|
||||
const JWT_REFRESH_SECRET = process.env.JWT_REFRESH_SECRET!;
|
||||
const JWT_SERVICE_SECRET = process.env.JWT_SERVICE_SECRET!;
|
||||
@ -24,7 +27,7 @@ const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
|
||||
const CLIENT_SECRET_VERCEL = process.env.CLIENT_SECRET_VERCEL!;
|
||||
const CLIENT_SECRET_NETLIFY = process.env.CLIENT_SECRET_NETLIFY!;
|
||||
const CLIENT_SECRET_GITHUB = process.env.CLIENT_SECRET_GITHUB!;
|
||||
const CLIENT_SLUG_VERCEL= process.env.CLIENT_SLUG_VERCEL!;
|
||||
const CLIENT_SLUG_VERCEL = process.env.CLIENT_SLUG_VERCEL!;
|
||||
const POSTHOG_HOST = process.env.POSTHOG_HOST! || 'https://app.posthog.com';
|
||||
const POSTHOG_PROJECT_API_KEY =
|
||||
process.env.POSTHOG_PROJECT_API_KEY! ||
|
||||
@ -50,10 +53,13 @@ const LICENSE_KEY = process.env.LICENSE_KEY!;
|
||||
export {
|
||||
PORT,
|
||||
EMAIL_TOKEN_LIFETIME,
|
||||
INVITE_ONLY_SIGNUP,
|
||||
ENCRYPTION_KEY,
|
||||
SALT_ROUNDS,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_MFA_LIFETIME,
|
||||
JWT_MFA_SECRET,
|
||||
JWT_REFRESH_LIFETIME,
|
||||
JWT_REFRESH_SECRET,
|
||||
JWT_SERVICE_SECRET,
|
||||
|
16
backend/src/config/request.ts
Normal file
16
backend/src/config/request.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import axios from 'axios';
|
||||
import axiosRetry from 'axios-retry';
|
||||
|
||||
const axiosInstance = axios.create();
|
||||
|
||||
// add retry functionality to the axios instance
|
||||
axiosRetry(axiosInstance, {
|
||||
retries: 3,
|
||||
retryDelay: axiosRetry.exponentialDelay, // exponential back-off delay between retries
|
||||
retryCondition: (error) => {
|
||||
// only retry if the error is a network error or a 5xx server error
|
||||
return axiosRetry.isNetworkError(error) || axiosRetry.isRetryableError(error);
|
||||
},
|
||||
});
|
||||
|
||||
export default axiosInstance;
|
@ -5,7 +5,8 @@ import * as Sentry from '@sentry/node';
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
const jsrp = require('jsrp');
|
||||
import { User, LoginSRPDetail } from '../../models';
|
||||
import { createToken, issueTokens, clearTokens } from '../../helpers/auth';
|
||||
import { createToken, issueAuthTokens, clearTokens } from '../../helpers/auth';
|
||||
import { checkUserDevice } from '../../helpers/user';
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT
|
||||
@ -111,7 +112,14 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({ userId: user._id.toString() });
|
||||
|
||||
await checkUserDevice({
|
||||
user,
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'] ?? ''
|
||||
});
|
||||
|
||||
const tokens = await issueAuthTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
|
@ -35,14 +35,11 @@ export const getIntegrationAuth = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
export const getIntegrationOptions = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS
|
||||
});
|
||||
}
|
||||
export const getIntegrationOptions = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange as part of integration [integration] for workspace with id [workspaceId]
|
||||
@ -84,23 +81,28 @@ export const oAuthExchange = async (
|
||||
};
|
||||
|
||||
/**
|
||||
* Save integration access token as part of integration [integration] for workspace with id [workspaceId]
|
||||
* Save integration access token and (optionally) access id as part of integration
|
||||
* [integration] for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const saveIntegrationAccessToken = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
// TODO: refactor
|
||||
// TODO: check if access token is valid for each integration
|
||||
|
||||
let integrationAuth;
|
||||
try {
|
||||
const {
|
||||
workspaceId,
|
||||
accessId,
|
||||
accessToken,
|
||||
integration
|
||||
}: {
|
||||
workspaceId: string;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
integration: string;
|
||||
} = req.body;
|
||||
@ -123,9 +125,10 @@ export const saveIntegrationAccessToken = async (
|
||||
upsert: true
|
||||
});
|
||||
|
||||
// encrypt and save integration access token
|
||||
// encrypt and save integration access details
|
||||
integrationAuth = await IntegrationService.setIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt: undefined
|
||||
});
|
||||
@ -151,23 +154,23 @@ export const saveIntegrationAccessToken = async (
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
let apps;
|
||||
try {
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integration authorization applications'
|
||||
});
|
||||
}
|
||||
let apps;
|
||||
try {
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get integration authorization applications",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apps
|
||||
});
|
||||
return res.status(200).send({
|
||||
apps,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -177,21 +180,21 @@ export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration authorization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth
|
||||
});
|
||||
}
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete integration authorization",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth,
|
||||
});
|
||||
};
|
||||
|
@ -12,9 +12,9 @@ import { eventPushSecrets } from '../../events';
|
||||
|
||||
/**
|
||||
* Create/initialize an (empty) integration for integration authorization
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
@ -26,7 +26,9 @@ export const createIntegration = async (req: Request, res: Response) => {
|
||||
isActive,
|
||||
sourceEnvironment,
|
||||
targetEnvironment,
|
||||
owner
|
||||
owner,
|
||||
path,
|
||||
region
|
||||
} = req.body;
|
||||
|
||||
// TODO: validate [sourceEnvironment] and [targetEnvironment]
|
||||
@ -40,6 +42,8 @@ export const createIntegration = async (req: Request, res: Response) => {
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner,
|
||||
path,
|
||||
region,
|
||||
integration: req.integrationAuth.integration,
|
||||
integrationAuth: new Types.ObjectId(integrationAuthId)
|
||||
}).save();
|
||||
@ -61,10 +65,10 @@ export const createIntegration = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
}
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change environment or name of integration with id [integrationId]
|
||||
@ -73,57 +77,57 @@ export const createIntegration = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const updateIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const {
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner, // github-specific integration param
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id
|
||||
},
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace.toString()
|
||||
})
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update integration'
|
||||
});
|
||||
}
|
||||
let integration;
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const {
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner, // github-specific integration param
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id,
|
||||
},
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace.toString(),
|
||||
}),
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to update integration",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -134,24 +138,24 @@ export const updateIntegration = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
try {
|
||||
const { integrationId } = req.params;
|
||||
let integration;
|
||||
try {
|
||||
const { integrationId } = req.params;
|
||||
|
||||
integration = await Integration.findOneAndDelete({
|
||||
_id: integrationId
|
||||
});
|
||||
|
||||
if (!integration) throw new Error('Failed to find integration');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
integration = await Integration.findOneAndDelete({
|
||||
_id: integrationId,
|
||||
});
|
||||
|
||||
if (!integration) throw new Error("Failed to find integration");
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete integration",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
@ -1,14 +1,13 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
import { SITE_URL, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
|
||||
import { MembershipOrg, Organization, User, Token } from '../../models';
|
||||
import { MembershipOrg, Organization, User } from '../../models';
|
||||
import { deleteMembershipOrg as deleteMemberFromOrg } from '../../helpers/membershipOrg';
|
||||
import { checkEmailVerification } from '../../helpers/signup';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { updateSubscriptionOrgQuantity } from '../../helpers/organization';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { OWNER, ADMIN, MEMBER, ACCEPTED, INVITED } from '../../variables';
|
||||
import { TokenService } from '../../services';
|
||||
import { OWNER, ADMIN, MEMBER, ACCEPTED, INVITED, TOKEN_EMAIL_ORG_INVITATION } from '../../variables';
|
||||
|
||||
/**
|
||||
* Delete organization membership with id [membershipOrgId] from organization
|
||||
@ -113,14 +112,14 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
|
||||
|
||||
invitee = await User.findOne({
|
||||
email: inviteeEmail
|
||||
}).select('+publicKey');
|
||||
|
||||
if (invitee) {
|
||||
// case: invitee is an existing user
|
||||
|
||||
|
||||
inviteeMembershipOrg = await MembershipOrg.findOne({
|
||||
user: invitee._id,
|
||||
organization: organizationId
|
||||
@ -163,17 +162,11 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
const organization = await Organization.findOne({ _id: organizationId });
|
||||
|
||||
if (organization) {
|
||||
const token = crypto.randomBytes(16).toString('hex');
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
{ email: inviteeEmail },
|
||||
{
|
||||
email: inviteeEmail,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_ORG_INVITATION,
|
||||
email: inviteeEmail,
|
||||
organizationId: organization._id
|
||||
});
|
||||
|
||||
await sendMail({
|
||||
template: 'organizationInvitation.handlebars',
|
||||
@ -225,10 +218,12 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
|
||||
if (!membershipOrg)
|
||||
throw new Error('Failed to find any invitations for email');
|
||||
|
||||
await checkEmailVerification({
|
||||
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_ORG_INVITATION,
|
||||
email,
|
||||
code
|
||||
organizationId: membershipOrg.organization,
|
||||
token: code
|
||||
});
|
||||
|
||||
if (user && user?.publicKey) {
|
||||
@ -241,7 +236,7 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
message: 'Successfully verified email',
|
||||
user,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
// initialize user account
|
||||
|
@ -14,11 +14,13 @@ import {
|
||||
MembershipOrg,
|
||||
Organization,
|
||||
Workspace,
|
||||
IncidentContactOrg
|
||||
IncidentContactOrg,
|
||||
IMembershipOrg
|
||||
} from '../../models';
|
||||
import { createOrganization as create } from '../../helpers/organization';
|
||||
import { addMembershipsOrg } from '../../helpers/membershipOrg';
|
||||
import { OWNER, ACCEPTED } from '../../variables';
|
||||
import _ from 'lodash';
|
||||
|
||||
export const getOrganizations = async (req: Request, res: Response) => {
|
||||
let organizations;
|
||||
@ -382,3 +384,44 @@ export const getOrganizationSubscriptions = async (
|
||||
subscriptions
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Given a org id, return the projects each member of the org belongs to
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationMembersAndTheirWorkspaces = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
const { organizationId } = req.params;
|
||||
|
||||
const workspacesSet = (
|
||||
await Workspace.find(
|
||||
{
|
||||
organization: organizationId
|
||||
},
|
||||
'_id'
|
||||
)
|
||||
).map((w) => w._id.toString());
|
||||
|
||||
const memberships = (
|
||||
await Membership.find({
|
||||
workspace: { $in: workspacesSet }
|
||||
}).populate('workspace')
|
||||
);
|
||||
const userToWorkspaceIds: any = {};
|
||||
|
||||
memberships.forEach(membership => {
|
||||
const user = membership.user.toString();
|
||||
if (userToWorkspaceIds[user]) {
|
||||
userToWorkspaceIds[user].push(membership.workspace);
|
||||
} else {
|
||||
userToWorkspaceIds[user] = [membership.workspace];
|
||||
}
|
||||
});
|
||||
|
||||
return res.json(userToWorkspaceIds);
|
||||
};
|
@ -1,14 +1,14 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const jsrp = require('jsrp');
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
import { User, Token, BackupPrivateKey, LoginSRPDetail } from '../../models';
|
||||
import { checkEmailVerification } from '../../helpers/signup';
|
||||
import { User, BackupPrivateKey, LoginSRPDetail } from '../../models';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { TokenService } from '../../services';
|
||||
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, SITE_URL } from '../../config';
|
||||
import { TOKEN_EMAIL_PASSWORD_RESET } from '../../variables';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
|
||||
/**
|
||||
@ -31,19 +31,12 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
error: 'Failed to send email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
const token = crypto.randomBytes(16).toString('hex');
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
{ email },
|
||||
{
|
||||
email,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
|
||||
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_PASSWORD_RESET,
|
||||
email
|
||||
});
|
||||
|
||||
await sendMail({
|
||||
template: 'passwordReset.handlebars',
|
||||
subjectLine: 'Infisical password reset',
|
||||
@ -54,7 +47,6 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
callback_url: SITE_URL + '/password-reset'
|
||||
}
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
@ -87,10 +79,11 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
error: 'Failed email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
await checkEmailVerification({
|
||||
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_PASSWORD_RESET,
|
||||
email,
|
||||
code
|
||||
token: code
|
||||
});
|
||||
|
||||
// generate temporary password-reset token
|
||||
@ -173,8 +166,18 @@ export const srp1 = async (req: Request, res: Response) => {
|
||||
*/
|
||||
export const changePassword = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { clientProof, encryptedPrivateKey, iv, tag, salt, verifier } =
|
||||
req.body;
|
||||
const {
|
||||
clientProof,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
} = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email: req.user.email
|
||||
}).select('+salt +verifier');
|
||||
@ -204,9 +207,13 @@ export const changePassword = async (req: Request, res: Response) => {
|
||||
await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
@ -340,9 +347,12 @@ export const getBackupPrivateKey = async (req: Request, res: Response) => {
|
||||
export const resetPassword = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier,
|
||||
} = req.body;
|
||||
@ -350,9 +360,13 @@ export const resetPassword = async (req: Request, res: Response) => {
|
||||
await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
|
@ -1,16 +1,13 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { NODE_ENV, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
|
||||
import { User, MembershipOrg } from '../../models';
|
||||
import { completeAccount } from '../../helpers/user';
|
||||
import { User } from '../../models';
|
||||
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, INVITE_ONLY_SIGNUP } from '../../config';
|
||||
import {
|
||||
sendEmailVerification,
|
||||
checkEmailVerification,
|
||||
initializeDefaultOrg
|
||||
} from '../../helpers/signup';
|
||||
import { issueTokens, createToken } from '../../helpers/auth';
|
||||
import { INVITED, ACCEPTED } from '../../variables';
|
||||
import axios from 'axios';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
|
||||
/**
|
||||
* Signup step 1: Initialize account for user under email [email] and send a verification code
|
||||
@ -24,6 +21,14 @@ export const beginEmailSignup = async (req: Request, res: Response) => {
|
||||
try {
|
||||
email = req.body.email;
|
||||
|
||||
if (INVITE_ONLY_SIGNUP) {
|
||||
// Only one user can create an account without being invited. The rest need to be invited in order to make an account
|
||||
const userCount = await User.countDocuments({})
|
||||
if (userCount != 0) {
|
||||
throw BadRequestError({ message: "New user sign ups are not allowed at this time. You must be invited to sign up." })
|
||||
}
|
||||
}
|
||||
|
||||
const user = await User.findOne({ email }).select('+publicKey');
|
||||
if (user && user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
@ -103,201 +108,3 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
|
||||
token
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
* signup flow
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier,
|
||||
organizationName
|
||||
} = req.body;
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed to complete account for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
// complete setting up user's account
|
||||
user = await completeAccount({
|
||||
userId: user._id.toString(),
|
||||
firstName,
|
||||
lastName,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user'); // ensure user is non-null
|
||||
|
||||
// initialize default organization and workspace
|
||||
await initializeDefaultOrg({
|
||||
organizationName,
|
||||
user
|
||||
});
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
},
|
||||
{
|
||||
user,
|
||||
status: ACCEPTED
|
||||
}
|
||||
);
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({
|
||||
userId: user._id.toString()
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
refreshToken = tokens.refreshToken;
|
||||
|
||||
// sending a welcome email to new users
|
||||
if (process.env.LOOPS_API_KEY) {
|
||||
await axios.post("https://app.loops.so/api/v1/events/send", {
|
||||
"email": email,
|
||||
"eventName": "Sign Up",
|
||||
"firstName": firstName,
|
||||
"lastName": lastName
|
||||
}, {
|
||||
headers: {
|
||||
"Accept": "application/json",
|
||||
"Authorization": "Bearer " + process.env.LOOPS_API_KEY
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to complete account setup'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully set up account',
|
||||
user,
|
||||
token,
|
||||
refreshToken
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
* invite flow
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier
|
||||
} = req.body;
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed to complete account for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
});
|
||||
|
||||
if (!membershipOrg) throw new Error('Failed to find invitations for email');
|
||||
|
||||
// complete setting up user's account
|
||||
user = await completeAccount({
|
||||
userId: user._id.toString(),
|
||||
firstName,
|
||||
lastName,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user');
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
},
|
||||
{
|
||||
user,
|
||||
status: ACCEPTED
|
||||
}
|
||||
);
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({
|
||||
userId: user._id.toString()
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
refreshToken = tokens.refreshToken;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to complete account setup'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully set up account',
|
||||
user,
|
||||
token,
|
||||
refreshToken
|
||||
});
|
||||
};
|
||||
|
@ -1,21 +1,21 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response } from "express";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import {
|
||||
Workspace,
|
||||
Membership,
|
||||
MembershipOrg,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
IUser,
|
||||
ServiceToken,
|
||||
ServiceTokenData
|
||||
} from '../../models';
|
||||
Workspace,
|
||||
Membership,
|
||||
MembershipOrg,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
IUser,
|
||||
ServiceToken,
|
||||
ServiceTokenData,
|
||||
} from "../../models";
|
||||
import {
|
||||
createWorkspace as create,
|
||||
deleteWorkspace as deleteWork
|
||||
} from '../../helpers/workspace';
|
||||
import { addMemberships } from '../../helpers/membership';
|
||||
import { ADMIN } from '../../variables';
|
||||
createWorkspace as create,
|
||||
deleteWorkspace as deleteWork,
|
||||
} from "../../helpers/workspace";
|
||||
import { addMemberships } from "../../helpers/membership";
|
||||
import { ADMIN } from "../../variables";
|
||||
|
||||
/**
|
||||
* Return public keys of members of workspace with id [workspaceId]
|
||||
@ -24,32 +24,31 @@ import { ADMIN } from '../../variables';
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
|
||||
let publicKeys;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let publicKeys;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
publicKeys = (
|
||||
await Membership.find({
|
||||
workspace: workspaceId
|
||||
}).populate<{ user: IUser }>('user', 'publicKey')
|
||||
)
|
||||
.map((member) => {
|
||||
return {
|
||||
publicKey: member.user.publicKey,
|
||||
userId: member.user._id
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace member public keys'
|
||||
});
|
||||
}
|
||||
publicKeys = (
|
||||
await Membership.find({
|
||||
workspace: workspaceId,
|
||||
}).populate<{ user: IUser }>("user", "publicKey")
|
||||
).map((member) => {
|
||||
return {
|
||||
publicKey: member.user.publicKey,
|
||||
userId: member.user._id,
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace member public keys",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
publicKeys
|
||||
});
|
||||
return res.status(200).send({
|
||||
publicKeys,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -59,24 +58,24 @@ export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
let users;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let users;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
users = await Membership.find({
|
||||
workspace: workspaceId
|
||||
}).populate('user', '+publicKey');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace members'
|
||||
});
|
||||
}
|
||||
users = await Membership.find({
|
||||
workspace: workspaceId,
|
||||
}).populate("user", "+publicKey");
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace members",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
users
|
||||
});
|
||||
return res.status(200).send({
|
||||
users,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -86,24 +85,24 @@ export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaces = async (req: Request, res: Response) => {
|
||||
let workspaces;
|
||||
try {
|
||||
workspaces = (
|
||||
await Membership.find({
|
||||
user: req.user._id
|
||||
}).populate('workspace')
|
||||
).map((m) => m.workspace);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspaces'
|
||||
});
|
||||
}
|
||||
let workspaces;
|
||||
try {
|
||||
workspaces = (
|
||||
await Membership.find({
|
||||
user: req.user._id,
|
||||
}).populate("workspace")
|
||||
).map((m) => m.workspace);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspaces",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspaces
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspaces,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -113,24 +112,24 @@ export const getWorkspaces = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspace = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
workspace = await Workspace.findOne({
|
||||
_id: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace'
|
||||
});
|
||||
}
|
||||
workspace = await Workspace.findOne({
|
||||
_id: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -141,46 +140,46 @@ export const getWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const createWorkspace = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceName, organizationId } = req.body;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceName, organizationId } = req.body;
|
||||
|
||||
// validate organization membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId
|
||||
});
|
||||
// validate organization membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId,
|
||||
});
|
||||
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
if (!membershipOrg) {
|
||||
throw new Error("Failed to validate organization membership");
|
||||
}
|
||||
|
||||
if (workspaceName.length < 1) {
|
||||
throw new Error('Workspace names must be at least 1-character long');
|
||||
}
|
||||
if (workspaceName.length < 1) {
|
||||
throw new Error("Workspace names must be at least 1-character long");
|
||||
}
|
||||
|
||||
// create workspace and add user as member
|
||||
workspace = await create({
|
||||
name: workspaceName,
|
||||
organizationId
|
||||
});
|
||||
// create workspace and add user as member
|
||||
workspace = await create({
|
||||
name: workspaceName,
|
||||
organizationId,
|
||||
});
|
||||
|
||||
await addMemberships({
|
||||
userIds: [req.user._id],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN]
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to create workspace'
|
||||
});
|
||||
}
|
||||
await addMemberships({
|
||||
userIds: [req.user._id],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN],
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to create workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -190,24 +189,24 @@ export const createWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteWorkspace = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// delete workspace
|
||||
await deleteWork({
|
||||
id: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete workspace'
|
||||
});
|
||||
}
|
||||
// delete workspace
|
||||
await deleteWork({
|
||||
id: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully deleted workspace'
|
||||
});
|
||||
return res.status(200).send({
|
||||
message: "Successfully deleted workspace",
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -217,34 +216,34 @@ export const deleteWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const changeWorkspaceName = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { name } = req.body;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { name } = req.body;
|
||||
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId
|
||||
},
|
||||
{
|
||||
name
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change workspace name'
|
||||
});
|
||||
}
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId,
|
||||
},
|
||||
{
|
||||
name,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to change workspace name",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully changed workspace name',
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
message: "Successfully changed workspace name",
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -254,24 +253,24 @@ export const changeWorkspaceName = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
|
||||
let integrations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let integrations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
integrations = await Integration.find({
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace integrations'
|
||||
});
|
||||
}
|
||||
integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace integrations",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrations
|
||||
});
|
||||
return res.status(200).send({
|
||||
integrations,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -281,56 +280,56 @@ export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceIntegrationAuthorizations = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let authorizations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let authorizations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
authorizations = await IntegrationAuth.find({
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace integration authorizations'
|
||||
});
|
||||
}
|
||||
authorizations = await IntegrationAuth.find({
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace integration authorizations",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
authorizations
|
||||
});
|
||||
return res.status(200).send({
|
||||
authorizations,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return service service tokens for workspace [workspaceId] belonging to user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceServiceTokens = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let serviceTokens;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
// ?? FIX.
|
||||
serviceTokens = await ServiceToken.find({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace service tokens'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
serviceTokens
|
||||
});
|
||||
}
|
||||
let serviceTokens;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
// ?? FIX.
|
||||
serviceTokens = await ServiceToken.find({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace service tokens",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
serviceTokens,
|
||||
});
|
||||
};
|
||||
|
351
backend/src/controllers/v2/authController.ts
Normal file
351
backend/src/controllers/v2/authController.ts
Normal file
@ -0,0 +1,351 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
import { Request, Response } from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
const jsrp = require('jsrp');
|
||||
import { User, LoginSRPDetail } from '../../models';
|
||||
import { issueAuthTokens, createToken } from '../../helpers/auth';
|
||||
import { checkUserDevice } from '../../helpers/user';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { TokenService } from '../../services';
|
||||
import { EELogService } from '../../ee/services';
|
||||
import {
|
||||
NODE_ENV,
|
||||
JWT_MFA_LIFETIME,
|
||||
JWT_MFA_SECRET
|
||||
} from '../../config';
|
||||
import { BadRequestError, InternalServerError } from '../../utils/errors';
|
||||
import {
|
||||
TOKEN_EMAIL_MFA,
|
||||
ACTION_LOGIN
|
||||
} from '../../variables';
|
||||
import { getChannelFromUserAgent } from '../../utils/posthog'; // TODO: move this
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
}
|
||||
|
||||
const clientPublicKeys: any = {};
|
||||
|
||||
/**
|
||||
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login1 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
clientPublicKey
|
||||
}: { email: string; clientPublicKey: string } = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
async () => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
|
||||
await LoginSRPDetail.findOneAndReplace({ email: email }, {
|
||||
email: email,
|
||||
clientPublicKey: clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt),
|
||||
}, { upsert: true, returnNewDocument: false });
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
salt: user.salt
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to start authentication process'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log in user step 2: complete step 2 of SRP protocol and return token and their (encrypted)
|
||||
* private key
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login2 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
|
||||
if (!req.headers['user-agent']) throw InternalServerError({ message: 'User-Agent header is required' });
|
||||
|
||||
const { email, clientProof } = req.body;
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +encryptionVersion +protectedKey +protectedKeyIV +protectedKeyTag +publicKey +encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const loginSRPDetail = await LoginSRPDetail.findOneAndDelete({ email: email })
|
||||
|
||||
if (!loginSRPDetail) {
|
||||
return BadRequestError(Error("Failed to find login details for SRP"))
|
||||
}
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: loginSRPDetail.serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(loginSRPDetail.clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
|
||||
if (user.isMfaEnabled) {
|
||||
// case: user has MFA enabled
|
||||
|
||||
// generate temporary MFA token
|
||||
const token = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: JWT_MFA_LIFETIME,
|
||||
secret: JWT_MFA_SECRET
|
||||
});
|
||||
|
||||
const code = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_MFA,
|
||||
email
|
||||
});
|
||||
|
||||
// send MFA code [code] to [email]
|
||||
await sendMail({
|
||||
template: 'emailMfa.handlebars',
|
||||
subjectLine: 'Infisical MFA code',
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
code
|
||||
}
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
mfaEnabled: true,
|
||||
token
|
||||
});
|
||||
}
|
||||
|
||||
await checkUserDevice({
|
||||
user,
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'] ?? ''
|
||||
});
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueAuthTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
// case: user does not have MFA enablgged
|
||||
// return (access) token in response
|
||||
|
||||
interface ResponseData {
|
||||
mfaEnabled: boolean;
|
||||
encryptionVersion: any;
|
||||
protectedKey?: string;
|
||||
protectedKeyIV?: string;
|
||||
protectedKeyTag?: string;
|
||||
token: string;
|
||||
publicKey?: string;
|
||||
encryptedPrivateKey?: string;
|
||||
iv?: string;
|
||||
tag?: string;
|
||||
}
|
||||
|
||||
const response: ResponseData = {
|
||||
mfaEnabled: false,
|
||||
encryptionVersion: user.encryptionVersion,
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag
|
||||
}
|
||||
|
||||
if (
|
||||
user?.protectedKey &&
|
||||
user?.protectedKeyIV &&
|
||||
user?.protectedKeyTag
|
||||
) {
|
||||
response.protectedKey = user.protectedKey;
|
||||
response.protectedKeyIV = user.protectedKeyIV
|
||||
response.protectedKeyTag = user.protectedKeyTag;
|
||||
}
|
||||
|
||||
const loginAction = await EELogService.createAction({
|
||||
name: ACTION_LOGIN,
|
||||
userId: user._id
|
||||
});
|
||||
|
||||
loginAction && await EELogService.createLog({
|
||||
userId: user._id,
|
||||
actions: [loginAction],
|
||||
channel: getChannelFromUserAgent(req.headers['user-agent']),
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
return res.status(200).send(response);
|
||||
}
|
||||
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Send MFA token to email [email]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const sendMfaToken = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email } = req.body;
|
||||
|
||||
const code = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_MFA,
|
||||
email
|
||||
});
|
||||
|
||||
// send MFA code [code] to [email]
|
||||
await sendMail({
|
||||
template: 'emailMfa.handlebars',
|
||||
subjectLine: 'Infisical MFA code',
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
code
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to send MFA code'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully sent new MFA code'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify MFA token [mfaToken] and issue JWT and refresh tokens if the
|
||||
* MFA token [mfaToken] is valid
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const verifyMfaToken = async (req: Request, res: Response) => {
|
||||
const { email, mfaToken } = req.body;
|
||||
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_MFA,
|
||||
email,
|
||||
token: mfaToken
|
||||
});
|
||||
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +encryptionVersion +protectedKey +protectedKeyIV +protectedKeyTag +publicKey +encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
await checkUserDevice({
|
||||
user,
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'] ?? ''
|
||||
});
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueAuthTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
interface VerifyMfaTokenRes {
|
||||
encryptionVersion: number;
|
||||
protectedKey?: string;
|
||||
protectedKeyIV?: string;
|
||||
protectedKeyTag?: string;
|
||||
token: string;
|
||||
publicKey: string;
|
||||
encryptedPrivateKey: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}
|
||||
|
||||
const resObj: VerifyMfaTokenRes = {
|
||||
encryptionVersion: user.encryptionVersion,
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey as string,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey as string,
|
||||
iv: user.iv as string,
|
||||
tag: user.tag as string
|
||||
}
|
||||
|
||||
if (user?.protectedKey && user?.protectedKeyIV && user?.protectedKeyTag) {
|
||||
resObj.protectedKey = user.protectedKey;
|
||||
resObj.protectedKeyIV = user.protectedKeyIV;
|
||||
resObj.protectedKeyTag = user.protectedKeyTag;
|
||||
}
|
||||
|
||||
const loginAction = await EELogService.createAction({
|
||||
name: ACTION_LOGIN,
|
||||
userId: user._id
|
||||
});
|
||||
|
||||
loginAction && await EELogService.createLog({
|
||||
userId: user._id,
|
||||
actions: [loginAction],
|
||||
channel: getChannelFromUserAgent(req.headers['user-agent']),
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
return res.status(200).send(resObj);
|
||||
}
|
||||
|
@ -246,13 +246,14 @@ export const getAllAccessibleEnvironmentsOfWorkspace = async (
|
||||
relatedWorkspace.environments.forEach(environment => {
|
||||
const isReadBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_READ })
|
||||
const isWriteBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_WRITE })
|
||||
if (isReadBlocked) {
|
||||
if (isReadBlocked && isWriteBlocked) {
|
||||
return
|
||||
} else {
|
||||
accessibleEnvironments.push({
|
||||
name: environment.name,
|
||||
slug: environment.slug,
|
||||
isWriteDenied: isWriteBlocked
|
||||
isWriteDenied: isWriteBlocked,
|
||||
isReadDenied: isReadBlocked
|
||||
})
|
||||
}
|
||||
})
|
||||
|
@ -1,3 +1,5 @@
|
||||
import * as authController from './authController';
|
||||
import * as signupController from './signupController';
|
||||
import * as usersController from './usersController';
|
||||
import * as organizationsController from './organizationsController';
|
||||
import * as workspaceController from './workspaceController';
|
||||
@ -9,6 +11,8 @@ import * as environmentController from './environmentController';
|
||||
import * as tagController from './tagController';
|
||||
|
||||
export {
|
||||
authController,
|
||||
signupController,
|
||||
usersController,
|
||||
organizationsController,
|
||||
workspaceController,
|
||||
|
@ -1,7 +1,8 @@
|
||||
import to from 'await-to-js';
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response } from 'express';
|
||||
import { ISecret, Membership, Secret, Workspace } from '../../models';
|
||||
import { ISecret, Secret } from '../../models';
|
||||
import { IAction } from '../../ee/models';
|
||||
import {
|
||||
SECRET_PERSONAL,
|
||||
SECRET_SHARED,
|
||||
@ -17,7 +18,255 @@ import { EESecretService, EELogService } from '../../ee/services';
|
||||
import { postHogClient } from '../../services';
|
||||
import { getChannelFromUserAgent } from '../../utils/posthog';
|
||||
import { ABILITY_READ, ABILITY_WRITE } from '../../variables/organization';
|
||||
import { userHasWorkspaceAccess } from '../../ee/helpers/checkMembershipPermissions';
|
||||
import { userHasNoAbility, userHasWorkspaceAccess, userHasWriteOnlyAbility } from '../../ee/helpers/checkMembershipPermissions';
|
||||
import Tag from '../../models/tag';
|
||||
import _ from 'lodash';
|
||||
import {
|
||||
BatchSecretRequest,
|
||||
BatchSecret
|
||||
} from '../../types/secret';
|
||||
|
||||
/**
|
||||
* Peform a batch of any specified CUD secret operations
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const batchSecrets = async (req: Request, res: Response) => {
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent']);
|
||||
const {
|
||||
workspaceId,
|
||||
environment,
|
||||
requests
|
||||
}: {
|
||||
workspaceId: string;
|
||||
environment: string;
|
||||
requests: BatchSecretRequest[];
|
||||
}= req.body;
|
||||
|
||||
const createSecrets: BatchSecret[] = [];
|
||||
const updateSecrets: BatchSecret[] = [];
|
||||
const deleteSecrets: Types.ObjectId[] = [];
|
||||
const actions: IAction[] = [];
|
||||
|
||||
requests.forEach((request) => {
|
||||
switch (request.method) {
|
||||
case 'POST':
|
||||
createSecrets.push({
|
||||
...request.secret,
|
||||
version: 1,
|
||||
user: request.secret.type === SECRET_PERSONAL ? req.user : undefined,
|
||||
environment,
|
||||
workspace: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
break;
|
||||
case 'PATCH':
|
||||
updateSecrets.push({
|
||||
...request.secret,
|
||||
_id: new Types.ObjectId(request.secret._id)
|
||||
});
|
||||
break;
|
||||
case 'DELETE':
|
||||
deleteSecrets.push(new Types.ObjectId(request.secret._id));
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
// handle create secrets
|
||||
let createdSecrets: ISecret[] = [];
|
||||
if (createSecrets.length > 0) {
|
||||
createdSecrets = await Secret.insertMany(createSecrets);
|
||||
// (EE) add secret versions for new secrets
|
||||
await EESecretService.addSecretVersions({
|
||||
secretVersions: createdSecrets.map((n: any) => {
|
||||
return ({
|
||||
...n._doc,
|
||||
_id: new Types.ObjectId(),
|
||||
secret: n._id,
|
||||
isDeleted: false
|
||||
});
|
||||
})
|
||||
});
|
||||
|
||||
const addAction = await EELogService.createAction({
|
||||
name: ACTION_ADD_SECRETS,
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: createdSecrets.map((n) => n._id)
|
||||
}) as IAction;
|
||||
actions.push(addAction);
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets added',
|
||||
distinctId: req.user.email,
|
||||
properties: {
|
||||
numberOfSecrets: createdSecrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel,
|
||||
userAgent: req.headers?.['user-agent']
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// handle update secrets
|
||||
let updatedSecrets: ISecret[] = [];
|
||||
if (updateSecrets.length > 0 && req.secrets) {
|
||||
// construct object containing all secrets
|
||||
let listedSecretsObj: {
|
||||
[key: string]: {
|
||||
version: number;
|
||||
type: string;
|
||||
}
|
||||
} = {};
|
||||
|
||||
listedSecretsObj = req.secrets.reduce((obj: any, secret: ISecret) => ({
|
||||
...obj,
|
||||
[secret._id.toString()]: secret
|
||||
}), {});
|
||||
|
||||
const updateOperations = updateSecrets.map((u) => ({
|
||||
updateOne: {
|
||||
filter: { _id: new Types.ObjectId(u._id) },
|
||||
update: {
|
||||
$inc: {
|
||||
version: 1
|
||||
},
|
||||
...u,
|
||||
_id: new Types.ObjectId(u._id)
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
await Secret.bulkWrite(updateOperations);
|
||||
|
||||
const secretVersions = updateSecrets.map((u) => ({
|
||||
secret: new Types.ObjectId(u._id),
|
||||
version: listedSecretsObj[u._id.toString()].version,
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
type: listedSecretsObj[u._id.toString()].type,
|
||||
environment,
|
||||
isDeleted: false,
|
||||
secretKeyCiphertext: u.secretKeyCiphertext,
|
||||
secretKeyIV: u.secretKeyIV,
|
||||
secretKeyTag: u.secretKeyTag,
|
||||
secretValueCiphertext: u.secretValueCiphertext,
|
||||
secretValueIV: u.secretValueIV,
|
||||
secretValueTag: u.secretValueTag,
|
||||
secretCommentCiphertext: u.secretCommentCiphertext,
|
||||
secretCommentIV: u.secretCommentIV,
|
||||
secretCommentTag: u.secretCommentTag,
|
||||
tags: u.tags
|
||||
}));
|
||||
|
||||
await EESecretService.addSecretVersions({
|
||||
secretVersions
|
||||
});
|
||||
|
||||
updatedSecrets = await Secret.find({
|
||||
_id: {
|
||||
$in: updateSecrets.map((u) => new Types.ObjectId(u._id))
|
||||
}
|
||||
});
|
||||
|
||||
const updateAction = await EELogService.createAction({
|
||||
name: ACTION_UPDATE_SECRETS,
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: updatedSecrets.map((u) => u._id)
|
||||
}) as IAction;
|
||||
actions.push(updateAction);
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets modified',
|
||||
distinctId: req.user.email,
|
||||
properties: {
|
||||
numberOfSecrets: updateSecrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel,
|
||||
userAgent: req.headers?.['user-agent']
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// handle delete secrets
|
||||
if (deleteSecrets.length > 0) {
|
||||
await Secret.deleteMany({
|
||||
_id: {
|
||||
$in: deleteSecrets
|
||||
}
|
||||
});
|
||||
|
||||
await EESecretService.markDeletedSecretVersions({
|
||||
secretIds: deleteSecrets
|
||||
});
|
||||
|
||||
const deleteAction = await EELogService.createAction({
|
||||
name: ACTION_DELETE_SECRETS,
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: deleteSecrets
|
||||
}) as IAction;
|
||||
actions.push(deleteAction);
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets deleted',
|
||||
distinctId: req.user.email,
|
||||
properties: {
|
||||
numberOfSecrets: deleteSecrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel: channel,
|
||||
userAgent: req.headers?.['user-agent']
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (actions.length > 0) {
|
||||
// (EE) create (audit) log
|
||||
await EELogService.createLog({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
actions,
|
||||
channel,
|
||||
ipAddress: req.ip
|
||||
});
|
||||
}
|
||||
|
||||
// // trigger event - push secrets
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId
|
||||
})
|
||||
});
|
||||
|
||||
// (EE) take a secret snapshot
|
||||
await EESecretService.takeSecretSnapshot({
|
||||
workspaceId
|
||||
});
|
||||
|
||||
const resObj: { [key: string]: ISecret[] | string[] } = {}
|
||||
|
||||
if (createSecrets.length > 0) {
|
||||
resObj['createdSecrets'] = createdSecrets;
|
||||
}
|
||||
|
||||
if (updateSecrets.length > 0) {
|
||||
resObj['updatedSecrets'] = updatedSecrets;
|
||||
}
|
||||
|
||||
if (deleteSecrets.length > 0) {
|
||||
resObj['deletedSecrets'] = deleteSecrets.map((d) => d.toString());
|
||||
}
|
||||
|
||||
return res.status(200).send(resObj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create secret(s) for workspace with id [workspaceId] and environment [environment]
|
||||
@ -164,11 +413,9 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
@ -185,11 +432,9 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
@ -284,8 +529,10 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
}
|
||||
*/
|
||||
const { workspaceId, environment } = req.query;
|
||||
|
||||
|
||||
const { workspaceId, environment, tagSlugs } = req.query;
|
||||
const tagNamesList = typeof tagSlugs === 'string' && tagSlugs !== '' ? tagSlugs.split(',') : [];
|
||||
let userId = "" // used for getting personal secrets for user
|
||||
let userEmail = "" // used for posthog
|
||||
if (req.user) {
|
||||
@ -298,16 +545,38 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
userEmail = req.serviceTokenData.user.email;
|
||||
}
|
||||
|
||||
// none service token case as service tokens are already scoped
|
||||
// none service token case as service tokens are already scoped to env and project
|
||||
let hasWriteOnlyAccess
|
||||
if (!req.serviceTokenData) {
|
||||
const hasAccess = await userHasWorkspaceAccess(userId, workspaceId, environment, ABILITY_READ)
|
||||
if (!hasAccess) {
|
||||
hasWriteOnlyAccess = await userHasWriteOnlyAbility(userId, workspaceId, environment)
|
||||
const hasNoAccess = await userHasNoAbility(userId, workspaceId, environment)
|
||||
if (hasNoAccess) {
|
||||
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
|
||||
}
|
||||
}
|
||||
let secrets: any
|
||||
let secretQuery: any
|
||||
|
||||
const [err, secrets] = await to(Secret.find(
|
||||
{
|
||||
if (tagNamesList != undefined && tagNamesList.length != 0) {
|
||||
const workspaceFromDB = await Tag.find({ workspace: workspaceId })
|
||||
|
||||
const tagIds = _.map(tagNamesList, (tagName) => {
|
||||
const tag = _.find(workspaceFromDB, { slug: tagName });
|
||||
return tag ? tag.id : null;
|
||||
});
|
||||
|
||||
secretQuery = {
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
{ user: userId },
|
||||
{ user: { $exists: false } }
|
||||
],
|
||||
tags: { $in: tagIds },
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
} else {
|
||||
secretQuery = {
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
@ -316,9 +585,13 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
],
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
).populate("tags").then())
|
||||
}
|
||||
|
||||
if (err) throw ValidationError({ message: 'Failed to get secrets', stack: err.stack });
|
||||
if (hasWriteOnlyAccess) {
|
||||
secrets = await Secret.find(secretQuery).select("secretKeyCiphertext secretKeyIV secretKeyTag")
|
||||
} else {
|
||||
secrets = await Secret.find(secretQuery).populate("tags")
|
||||
}
|
||||
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent'])
|
||||
|
||||
@ -356,6 +629,59 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
export const getOnlySecretKeys = async (req: Request, res: Response) => {
|
||||
const { workspaceId, environment } = req.query;
|
||||
|
||||
let userId = "" // used for getting personal secrets for user
|
||||
let userEmail = "" // used for posthog
|
||||
if (req.user) {
|
||||
userId = req.user._id;
|
||||
userEmail = req.user.email;
|
||||
}
|
||||
|
||||
if (req.serviceTokenData) {
|
||||
userId = req.serviceTokenData.user._id
|
||||
userEmail = req.serviceTokenData.user.email;
|
||||
}
|
||||
|
||||
// none service token case as service tokens are already scoped
|
||||
if (!req.serviceTokenData) {
|
||||
const hasAccess = await userHasWorkspaceAccess(userId, workspaceId, environment, ABILITY_READ)
|
||||
if (!hasAccess) {
|
||||
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
|
||||
}
|
||||
}
|
||||
|
||||
const [err, secretKeys] = await to(Secret.find(
|
||||
{
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
{ user: userId },
|
||||
{ user: { $exists: false } }
|
||||
],
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
)
|
||||
.select("secretKeyIV secretKeyTag secretKeyCiphertext")
|
||||
.then())
|
||||
|
||||
if (err) throw ValidationError({ message: 'Failed to get secrets', stack: err.stack });
|
||||
|
||||
// readAction && await EELogService.createLog({
|
||||
// userId: new Types.ObjectId(userId),
|
||||
// workspaceId: new Types.ObjectId(workspaceId as string),
|
||||
// actions: [readAction],
|
||||
// channel,
|
||||
// ipAddress: req.ip
|
||||
// });
|
||||
|
||||
return res.status(200).send({
|
||||
secretKeys
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update secret(s)
|
||||
* @param req
|
||||
@ -452,7 +778,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
secretValueTag,
|
||||
tags,
|
||||
...((
|
||||
secretCommentCiphertext &&
|
||||
secretCommentCiphertext !== undefined &&
|
||||
secretCommentIV &&
|
||||
secretCommentTag
|
||||
) ? {
|
||||
|
250
backend/src/controllers/v2/signupController.ts
Normal file
250
backend/src/controllers/v2/signupController.ts
Normal file
@ -0,0 +1,250 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { User, MembershipOrg } from '../../models';
|
||||
import { completeAccount } from '../../helpers/user';
|
||||
import {
|
||||
initializeDefaultOrg
|
||||
} from '../../helpers/signup';
|
||||
import { issueAuthTokens } from '../../helpers/auth';
|
||||
import { INVITED, ACCEPTED } from '../../variables';
|
||||
import { NODE_ENV } from '../../config';
|
||||
import request from '../../config/request';
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
* signup flow
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier,
|
||||
organizationName
|
||||
}: {
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
protectedKey: string;
|
||||
protectedKeyIV: string;
|
||||
protectedKeyTag: string;
|
||||
publicKey: string;
|
||||
encryptedPrivateKey: string;
|
||||
encryptedPrivateKeyIV: string;
|
||||
encryptedPrivateKeyTag: string;
|
||||
salt: string;
|
||||
verifier: string;
|
||||
organizationName: string;
|
||||
} = req.body;
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed to complete account for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
// complete setting up user's account
|
||||
user = await completeAccount({
|
||||
userId: user._id.toString(),
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user'); // ensure user is non-null
|
||||
|
||||
// initialize default organization and workspace
|
||||
await initializeDefaultOrg({
|
||||
organizationName,
|
||||
user
|
||||
});
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
},
|
||||
{
|
||||
user,
|
||||
status: ACCEPTED
|
||||
}
|
||||
);
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueAuthTokens({
|
||||
userId: user._id.toString()
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
|
||||
// sending a welcome email to new users
|
||||
if (process.env.LOOPS_API_KEY) {
|
||||
await request.post("https://app.loops.so/api/v1/events/send", {
|
||||
"email": email,
|
||||
"eventName": "Sign Up",
|
||||
"firstName": firstName,
|
||||
"lastName": lastName
|
||||
}, {
|
||||
headers: {
|
||||
"Accept": "application/json",
|
||||
"Authorization": "Bearer " + process.env.LOOPS_API_KEY
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to complete account setup'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully set up account',
|
||||
user,
|
||||
token
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
* invite flow
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
} = req.body;
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed to complete account for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
});
|
||||
|
||||
if (!membershipOrg) throw new Error('Failed to find invitations for email');
|
||||
|
||||
// complete setting up user's account
|
||||
user = await completeAccount({
|
||||
userId: user._id.toString(),
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user');
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
},
|
||||
{
|
||||
user,
|
||||
status: ACCEPTED
|
||||
}
|
||||
);
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueAuthTokens({
|
||||
userId: user._id.toString()
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to complete account setup'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully set up account',
|
||||
user,
|
||||
token
|
||||
});
|
||||
};
|
@ -2,7 +2,7 @@ import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
Membership,
|
||||
Membership, Secret,
|
||||
} from '../../models';
|
||||
import Tag, { ITag } from '../../models/tag';
|
||||
import { Builder } from "builder-pattern"
|
||||
@ -54,6 +54,12 @@ export const deleteWorkspaceTag = async (req: Request, res: Response) => {
|
||||
|
||||
const result = await Tag.findByIdAndDelete(tagId);
|
||||
|
||||
// remove the tag from secrets
|
||||
await Secret.updateMany(
|
||||
{ tags: { $in: [tagId] } },
|
||||
{ $pull: { tags: tagId } }
|
||||
);
|
||||
|
||||
res.json(result);
|
||||
}
|
||||
|
||||
|
@ -55,6 +55,44 @@ export const getMe = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the current user's MFA-enabled status [isMfaEnabled].
|
||||
* Note: Infisical currently only supports email-based 2FA only; this will expand to
|
||||
* include SMS and authenticator app modes of authentication in the future.
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateMyMfaEnabled = async (req: Request, res: Response) => {
|
||||
let user;
|
||||
try {
|
||||
const { isMfaEnabled }: { isMfaEnabled: boolean } = req.body;
|
||||
req.user.isMfaEnabled = isMfaEnabled;
|
||||
|
||||
if (isMfaEnabled) {
|
||||
// TODO: adapt this route/controller
|
||||
// to work for different forms of MFA
|
||||
req.user.mfaMethods = ['email'];
|
||||
} else {
|
||||
req.user.mfaMethods = [];
|
||||
}
|
||||
|
||||
await req.user.save();
|
||||
|
||||
user = req.user;
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to update current user's MFA status"
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
user
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return organizations that the current user is part of.
|
||||
* @param req
|
||||
|
@ -467,4 +467,42 @@ export const deleteWorkspaceMembership = async (req: Request, res: Response) =>
|
||||
return res.status(200).send({
|
||||
membership
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Change autoCapitilzation Rule of workspace
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const toggleAutoCapitalization = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { autoCapitalization } = req.body;
|
||||
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId
|
||||
},
|
||||
{
|
||||
autoCapitalization
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change autoCapitalization setting'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully changed autoCapitalization setting',
|
||||
workspace
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -158,11 +158,9 @@ export const rollbackSecretVersion = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash
|
||||
} = oldSecretVersion;
|
||||
|
||||
// update secret
|
||||
@ -179,11 +177,9 @@ export const rollbackSecretVersion = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash
|
||||
},
|
||||
{
|
||||
new: true
|
||||
@ -204,11 +200,9 @@ export const rollbackSecretVersion = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash
|
||||
secretValueTag
|
||||
}).save();
|
||||
|
||||
// take secret snapshot
|
||||
|
@ -1,5 +1,6 @@
|
||||
import _ from "lodash";
|
||||
import { Membership } from "../../models";
|
||||
import { ABILITY_READ, ABILITY_WRITE } from "../../variables/organization";
|
||||
|
||||
export const userHasWorkspaceAccess = async (userId: any, workspaceId: any, environment: any, action: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
@ -15,4 +16,39 @@ export const userHasWorkspaceAccess = async (userId: any, workspaceId: any, envi
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
export const userHasWriteOnlyAbility = async (userId: any, workspaceId: any, environment: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
if (!membershipForWorkspace) {
|
||||
return false
|
||||
}
|
||||
|
||||
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
|
||||
const isWriteDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_WRITE });
|
||||
const isReadDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_READ });
|
||||
|
||||
// case: you have write only if read is blocked and write is not
|
||||
if (isReadDisallowed && !isWriteDisallowed) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export const userHasNoAbility = async (userId: any, workspaceId: any, environment: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
if (!membershipForWorkspace) {
|
||||
return true
|
||||
}
|
||||
|
||||
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
|
||||
const isWriteDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_WRITE });
|
||||
const isReadBlocked = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_READ });
|
||||
|
||||
if (isReadBlocked && isWriteDisallowed) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
@ -5,22 +5,19 @@ import {
|
||||
} from '../../variables';
|
||||
|
||||
export interface ISecretVersion {
|
||||
_id: Types.ObjectId;
|
||||
secret: Types.ObjectId;
|
||||
version: number;
|
||||
workspace: Types.ObjectId; // new
|
||||
type: string; // new
|
||||
user: Types.ObjectId; // new
|
||||
user?: Types.ObjectId; // new
|
||||
environment: string; // new
|
||||
isDeleted: boolean;
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretKeyHash: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretValueHash: string;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
@ -72,9 +69,6 @@ const secretVersionSchema = new Schema<ISecretVersion>(
|
||||
type: String, // symmetric
|
||||
required: true
|
||||
},
|
||||
secretKeyHash: {
|
||||
type: String
|
||||
},
|
||||
secretValueCiphertext: {
|
||||
type: String,
|
||||
required: true
|
||||
@ -87,9 +81,6 @@ const secretVersionSchema = new Schema<ISecretVersion>(
|
||||
type: String, // symmetric
|
||||
required: true
|
||||
},
|
||||
secretValueHash: {
|
||||
type: String
|
||||
},
|
||||
tags: {
|
||||
ref: 'Tag',
|
||||
type: [Schema.Types.ObjectId],
|
||||
|
@ -211,7 +211,7 @@ const getAuthAPIKeyPayload = async ({
|
||||
* @return {String} obj.token - issued JWT token
|
||||
* @return {String} obj.refreshToken - issued refresh token
|
||||
*/
|
||||
const issueTokens = async ({ userId }: { userId: string }) => {
|
||||
const issueAuthTokens = async ({ userId }: { userId: string }) => {
|
||||
let token: string;
|
||||
let refreshToken: string;
|
||||
try {
|
||||
@ -298,6 +298,6 @@ export {
|
||||
getAuthSTDPayload,
|
||||
getAuthAPIKeyPayload,
|
||||
createToken,
|
||||
issueTokens,
|
||||
issueAuthTokens,
|
||||
clearTokens
|
||||
};
|
||||
|
@ -94,6 +94,7 @@ const handleOAuthExchangeHelper = async ({
|
||||
// set integration auth access token
|
||||
await setIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId: null,
|
||||
accessToken: res.accessToken,
|
||||
accessExpiresAt: res.accessExpiresAt
|
||||
});
|
||||
@ -138,7 +139,7 @@ const syncIntegrationsHelper = async ({
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
// get integration auth access token
|
||||
const accessToken = await getIntegrationAuthAccessHelper({
|
||||
const access = await getIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integration.integrationAuth.toString()
|
||||
});
|
||||
|
||||
@ -147,7 +148,8 @@ const syncIntegrationsHelper = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
accessId: access.accessId,
|
||||
accessToken: access.accessToken
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
@ -203,12 +205,12 @@ const syncIntegrationsHelper = async ({
|
||||
* @returns {String} accessToken - decrypted access token
|
||||
*/
|
||||
const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrationAuthId: string }) => {
|
||||
let accessId;
|
||||
let accessToken;
|
||||
|
||||
try {
|
||||
const integrationAuth = await IntegrationAuth
|
||||
.findById(integrationAuthId)
|
||||
.select('workspace integration +accessCiphertext +accessIV +accessTag +accessExpiresAt + refreshCiphertext');
|
||||
.select('workspace integration +accessCiphertext +accessIV +accessTag +accessExpiresAt + refreshCiphertext +accessIdCiphertext +accessIdIV +accessIdTag');
|
||||
|
||||
if (!integrationAuth) throw UnauthorizedRequestError({message: 'Failed to locate Integration Authentication credentials'});
|
||||
|
||||
@ -232,6 +234,15 @@ const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrati
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (integrationAuth?.accessIdCiphertext && integrationAuth?.accessIdIV && integrationAuth?.accessIdTag) {
|
||||
accessId = await BotService.decryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
ciphertext: integrationAuth.accessIdCiphertext as string,
|
||||
iv: integrationAuth.accessIdIV as string,
|
||||
tag: integrationAuth.accessIdTag as string
|
||||
});
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
@ -242,7 +253,10 @@ const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrati
|
||||
throw new Error('Failed to get integration access token');
|
||||
}
|
||||
|
||||
return accessToken;
|
||||
return ({
|
||||
accessId,
|
||||
accessToken
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@ -292,9 +306,9 @@ const setIntegrationAuthRefreshHelper = async ({
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt access token [accessToken] using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId] and store it along with [accessExpiresAt]
|
||||
* Encrypt access token [accessToken] and (optionally) access id [accessId]
|
||||
* using the bot's copy of the workspace key for workspace belonging to
|
||||
* integration auth with id [integrationAuthId] and store it along with [accessExpiresAt]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} obj.accessToken - access token
|
||||
@ -302,10 +316,12 @@ const setIntegrationAuthRefreshHelper = async ({
|
||||
*/
|
||||
const setIntegrationAuthAccessHelper = async ({
|
||||
integrationAuthId,
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
}: {
|
||||
integrationAuthId: string;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
accessExpiresAt: Date | undefined;
|
||||
}) => {
|
||||
@ -315,17 +331,28 @@ const setIntegrationAuthAccessHelper = async ({
|
||||
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
const obj = await BotService.encryptSymmetric({
|
||||
const encryptedAccessTokenObj = await BotService.encryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
plaintext: accessToken
|
||||
});
|
||||
|
||||
let encryptedAccessIdObj;
|
||||
if (accessId) {
|
||||
encryptedAccessIdObj = await BotService.encryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
plaintext: accessId
|
||||
});
|
||||
}
|
||||
|
||||
integrationAuth = await IntegrationAuth.findOneAndUpdate({
|
||||
_id: integrationAuthId
|
||||
}, {
|
||||
accessCiphertext: obj.ciphertext,
|
||||
accessIV: obj.iv,
|
||||
accessTag: obj.tag,
|
||||
accessIdCiphertext: encryptedAccessIdObj?.ciphertext ?? undefined,
|
||||
accessIdIV: encryptedAccessIdObj?.iv ?? undefined,
|
||||
accessIdTag: encryptedAccessIdObj?.tag ?? undefined,
|
||||
accessCiphertext: encryptedAccessTokenObj.ciphertext,
|
||||
accessIV: encryptedAccessTokenObj.iv,
|
||||
accessTag: encryptedAccessTokenObj.tag,
|
||||
accessExpiresAt
|
||||
}, {
|
||||
new: true
|
||||
|
@ -1,12 +1,11 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
import { Token, IToken, IUser } from '../models';
|
||||
import { IUser } from '../models';
|
||||
import { createOrganization } from './organization';
|
||||
import { addMembershipsOrg } from './membershipOrg';
|
||||
import { createWorkspace } from './workspace';
|
||||
import { addMemberships } from './membership';
|
||||
import { OWNER, ADMIN, ACCEPTED } from '../variables';
|
||||
import { OWNER, ACCEPTED } from '../variables';
|
||||
import { sendMail } from '../helpers/nodemailer';
|
||||
import { TokenService } from '../services';
|
||||
import { TOKEN_EMAIL_CONFIRMATION } from '../variables';
|
||||
|
||||
/**
|
||||
* Send magic link to verify email to [email]
|
||||
@ -14,21 +13,13 @@ import { sendMail } from '../helpers/nodemailer';
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.email - email
|
||||
* @returns {Boolean} success - whether or not operation was successful
|
||||
*
|
||||
*/
|
||||
const sendEmailVerification = async ({ email }: { email: string }) => {
|
||||
try {
|
||||
const token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
{ email },
|
||||
{
|
||||
email,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_CONFIRMATION,
|
||||
email
|
||||
});
|
||||
|
||||
// send mail
|
||||
await sendMail({
|
||||
@ -62,12 +53,11 @@ const checkEmailVerification = async ({
|
||||
code: string;
|
||||
}) => {
|
||||
try {
|
||||
const token = await Token.findOneAndDelete({
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_CONFIRMATION,
|
||||
email,
|
||||
token: code
|
||||
});
|
||||
|
||||
if (!token) throw new Error('Failed to find email verification token');
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
@ -103,18 +93,6 @@ const initializeDefaultOrg = async ({
|
||||
roles: [OWNER],
|
||||
statuses: [ACCEPTED]
|
||||
});
|
||||
|
||||
// initialize a default workspace inside the new organization
|
||||
const workspace = await createWorkspace({
|
||||
name: `Example Project`,
|
||||
organizationId: organization._id.toString()
|
||||
});
|
||||
|
||||
await addMemberships({
|
||||
userIds: [user._id.toString()],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN]
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to initialize default organization and workspace [err=${err}]`);
|
||||
}
|
||||
|
217
backend/src/helpers/token.ts
Normal file
217
backend/src/helpers/token.ts
Normal file
@ -0,0 +1,217 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { TokenData } from '../models';
|
||||
import crypto from 'crypto';
|
||||
import bcrypt from 'bcrypt';
|
||||
import {
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
} from '../variables';
|
||||
import {
|
||||
SALT_ROUNDS
|
||||
} from '../config';
|
||||
import { UnauthorizedRequestError } from '../utils/errors';
|
||||
|
||||
/**
|
||||
* Create and store a token in the database for purpose [type]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.type
|
||||
* @param {String} obj.email
|
||||
* @param {String} obj.phoneNumber
|
||||
* @param {Types.ObjectId} obj.organizationId
|
||||
* @returns {String} token - the created token
|
||||
*/
|
||||
const createTokenHelper = async ({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId
|
||||
}) => {
|
||||
let token, expiresAt, triesLeft;
|
||||
try {
|
||||
// generate random token based on specified token use-case
|
||||
// type [type]
|
||||
switch (type) {
|
||||
case TOKEN_EMAIL_CONFIRMATION:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
expiresAt = new Date((new Date()).getTime() + 86400000);
|
||||
break;
|
||||
case TOKEN_EMAIL_MFA:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
triesLeft = 5;
|
||||
expiresAt = new Date((new Date()).getTime() + 300000);
|
||||
break;
|
||||
case TOKEN_EMAIL_ORG_INVITATION:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date((new Date()).getTime() + 259200000);
|
||||
break;
|
||||
case TOKEN_EMAIL_PASSWORD_RESET:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date((new Date()).getTime() + 86400000);
|
||||
break;
|
||||
default:
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date();
|
||||
break;
|
||||
}
|
||||
|
||||
interface TokenDataQuery {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
interface TokenDataUpdate {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
tokenHash: string;
|
||||
triesLeft?: number;
|
||||
expiresAt: Date;
|
||||
}
|
||||
|
||||
const query: TokenDataQuery = { type };
|
||||
const update: TokenDataUpdate = {
|
||||
type,
|
||||
tokenHash: await bcrypt.hash(token, SALT_ROUNDS),
|
||||
expiresAt
|
||||
}
|
||||
|
||||
if (email) {
|
||||
query.email = email;
|
||||
update.email = email;
|
||||
}
|
||||
if (phoneNumber) {
|
||||
query.phoneNumber = phoneNumber;
|
||||
update.phoneNumber = phoneNumber;
|
||||
}
|
||||
if (organizationId) {
|
||||
query.organization = organizationId
|
||||
update.organization = organizationId
|
||||
}
|
||||
|
||||
if (triesLeft) {
|
||||
update.triesLeft = triesLeft;
|
||||
}
|
||||
|
||||
await TokenData.findOneAndUpdate(
|
||||
query,
|
||||
update,
|
||||
{
|
||||
new: true,
|
||||
upsert: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(
|
||||
"Failed to create token"
|
||||
);
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.email - email associated with the token
|
||||
* @param {String} obj.token - value of the token
|
||||
*/
|
||||
const validateTokenHelper = async ({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
token: string;
|
||||
}) => {
|
||||
interface Query {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
const query: Query = { type };
|
||||
|
||||
if (email) { query.email = email; }
|
||||
if (phoneNumber) { query.phoneNumber = phoneNumber; }
|
||||
if (organizationId) { query.organization = organizationId; }
|
||||
|
||||
const tokenData = await TokenData.findOne(query).select('+tokenHash');
|
||||
|
||||
if (!tokenData) throw new Error('Failed to find token to validate');
|
||||
|
||||
if (tokenData.expiresAt < new Date()) {
|
||||
// case: token expired
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA session expired. Please log in again',
|
||||
context: {
|
||||
code: 'mfa_expired'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const isValid = await bcrypt.compare(token, tokenData.tokenHash);
|
||||
if (!isValid) {
|
||||
// case: token is not valid
|
||||
if (tokenData?.triesLeft !== undefined) {
|
||||
// case: token has a try-limit
|
||||
if (tokenData.triesLeft === 1) {
|
||||
// case: token is out of tries
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
} else {
|
||||
// case: token has more than 1 try left
|
||||
await TokenData.findByIdAndUpdate(tokenData._id, {
|
||||
triesLeft: tokenData.triesLeft - 1
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA code is invalid',
|
||||
context: {
|
||||
code: 'mfa_invalid',
|
||||
triesLeft: tokenData.triesLeft - 1
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA code is invalid',
|
||||
context: {
|
||||
code: 'mfa_invalid'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// case: token is valid
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
}
|
||||
|
||||
export {
|
||||
createTokenHelper,
|
||||
validateTokenHelper
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { User, IUser } from '../models';
|
||||
import { IUser, User } from '../models';
|
||||
import { sendMail } from './nodemailer';
|
||||
|
||||
/**
|
||||
* Initialize a user under email [email]
|
||||
@ -28,10 +29,14 @@ const setupAccount = async ({ email }: { email: string }) => {
|
||||
* @param {String} obj.userId - id of user to finish setting up
|
||||
* @param {String} obj.firstName - first name of user
|
||||
* @param {String} obj.lastName - last name of user
|
||||
* @param {Number} obj.encryptionVersion - version of auth encryption scheme used
|
||||
* @param {String} obj.protectedKey - protected key in encryption version 2
|
||||
* @param {String} obj.protectedKeyIV - IV of protected key in encryption version 2
|
||||
* @param {String} obj.protectedKeyTag - tag of protected key in encryption version 2
|
||||
* @param {String} obj.publicKey - publickey of user
|
||||
* @param {String} obj.encryptedPrivateKey - (encrypted) private key of user
|
||||
* @param {String} obj.iv - iv for (encrypted) private key of user
|
||||
* @param {String} obj.tag - tag for (encrypted) private key of user
|
||||
* @param {String} obj.encryptedPrivateKeyIV - iv for (encrypted) private key of user
|
||||
* @param {String} obj.encryptedPrivateKeyTag - tag for (encrypted) private key of user
|
||||
* @param {String} obj.salt - salt for auth SRP
|
||||
* @param {String} obj.verifier - verifier for auth SRP
|
||||
* @returns {Object} user - the completed user
|
||||
@ -40,20 +45,28 @@ const completeAccount = async ({
|
||||
userId,
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
}: {
|
||||
userId: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
encryptionVersion: number;
|
||||
protectedKey: string;
|
||||
protectedKeyIV: string;
|
||||
protectedKeyTag: string;
|
||||
publicKey: string;
|
||||
encryptedPrivateKey: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
encryptedPrivateKeyIV: string;
|
||||
encryptedPrivateKeyTag: string;
|
||||
salt: string;
|
||||
verifier: string;
|
||||
}) => {
|
||||
@ -67,10 +80,14 @@ const completeAccount = async ({
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
@ -85,4 +102,48 @@ const completeAccount = async ({
|
||||
return user;
|
||||
};
|
||||
|
||||
export { setupAccount, completeAccount };
|
||||
/**
|
||||
* Check if device with ip [ip] and user-agent [userAgent] has been seen for user [user].
|
||||
* If the device is unseen, then notify the user of the new device
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.ip - login ip address
|
||||
* @param {String} obj.userAgent - login user-agent
|
||||
*/
|
||||
const checkUserDevice = async ({
|
||||
user,
|
||||
ip,
|
||||
userAgent
|
||||
}: {
|
||||
user: IUser;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
}) => {
|
||||
const isDeviceSeen = user.devices.some((device) => device.ip === ip && device.userAgent === userAgent);
|
||||
|
||||
if (!isDeviceSeen) {
|
||||
// case: unseen login ip detected for user
|
||||
// -> notify user about the sign-in from new ip
|
||||
|
||||
user.devices = user.devices.concat([{
|
||||
ip: String(ip),
|
||||
userAgent
|
||||
}]);
|
||||
|
||||
await user.save();
|
||||
|
||||
// send MFA code [code] to [email]
|
||||
await sendMail({
|
||||
template: 'newDevice.handlebars',
|
||||
subjectLine: `Successful login from new device`,
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
email: user.email,
|
||||
timestamp: new Date().toString(),
|
||||
ip,
|
||||
userAgent
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export { setupAccount, completeAccount, checkUserDevice };
|
||||
|
@ -1,21 +1,27 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { IIntegrationAuth } from '../models';
|
||||
import * as Sentry from "@sentry/node";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import request from '../config/request';
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL
|
||||
} from '../variables';
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
} from "../variables";
|
||||
|
||||
/**
|
||||
* Return list of names of apps for integration named [integration]
|
||||
@ -27,7 +33,7 @@ import {
|
||||
*/
|
||||
const getApps = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
accessToken,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
@ -38,64 +44,69 @@ const getApps = async ({
|
||||
owner?: string;
|
||||
}
|
||||
|
||||
let apps: App[];
|
||||
let apps: App[] = [];
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
apps = await getAppsAzureKeyVault({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_PARAMETER_STORE:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_SECRET_MANAGER:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
apps = await getAppsHeroku({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
apps = await getAppsVercel({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
apps = await getAppsNetlify({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
apps = await getAppsGithub({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RENDER:
|
||||
apps = await getAppsRender({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_FLYIO:
|
||||
apps = await getAppsFlyio({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CIRCLECI:
|
||||
apps = await getAppsCircleCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TRAVISCI:
|
||||
apps = await getAppsTravisCI({
|
||||
accessToken,
|
||||
})
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get integration apps');
|
||||
throw new Error("Failed to get integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
const getAppsAzureKeyVault = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
// TODO
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of apps for Heroku integration
|
||||
* @param {Object} obj
|
||||
@ -107,21 +118,21 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
await request.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: 'application/vnd.heroku+json; version=3',
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Heroku integration apps');
|
||||
throw new Error("Failed to get Heroku integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -134,35 +145,38 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {Object[]} apps - names of Vercel apps
|
||||
* @returns {String} apps.name - name of Vercel app
|
||||
*/
|
||||
const getAppsVercel = async ({
|
||||
const getAppsVercel = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
}: {
|
||||
accessToken,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
|
||||
await request.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
},
|
||||
...( integrationAuth?.teamId ? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId
|
||||
}
|
||||
} : {})
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
).data;
|
||||
|
||||
|
||||
apps = res.projects.map((a: any) => ({
|
||||
name: a.name
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Vercel integration apps');
|
||||
throw new Error("Failed to get Vercel integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -175,29 +189,26 @@ const getAppsVercel = async ({
|
||||
* @returns {Object[]} apps - names of Netlify sites
|
||||
* @returns {String} apps.name - name of Netlify site
|
||||
*/
|
||||
const getAppsNetlify = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
|
||||
await request.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
appId: a.site_id
|
||||
appId: a.site_id,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Netlify integration apps');
|
||||
throw new Error("Failed to get Netlify integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -210,35 +221,32 @@ const getAppsNetlify = async ({
|
||||
* @returns {Object[]} apps - names of Netlify sites
|
||||
* @returns {String} apps.name - name of Netlify site
|
||||
*/
|
||||
const getAppsGithub = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const octokit = new Octokit({
|
||||
auth: accessToken
|
||||
auth: accessToken,
|
||||
});
|
||||
|
||||
const repos = (await octokit.request(
|
||||
'GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}',
|
||||
{
|
||||
per_page: 100
|
||||
}
|
||||
)).data;
|
||||
const repos = (
|
||||
await octokit.request(
|
||||
"GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}",
|
||||
{
|
||||
per_page: 100,
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
apps = repos
|
||||
.filter((a:any) => a.permissions.admin === true)
|
||||
.filter((a: any) => a.permissions.admin === true)
|
||||
.map((a: any) => ({
|
||||
name: a.name,
|
||||
owner: a.owner.login
|
||||
})
|
||||
);
|
||||
name: a.name,
|
||||
owner: a.owner.login,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Github repos');
|
||||
throw new Error("Failed to get Github repos");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -252,20 +260,16 @@ const getAppsGithub = async ({
|
||||
* @returns {String} apps.name - name of Render service
|
||||
* @returns {String} apps.appId - id of Render service
|
||||
*/
|
||||
const getAppsRender = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
|
||||
await request.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
@ -278,11 +282,11 @@ const getAppsRender = async ({
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Render services');
|
||||
throw new Error("Failed to get Render services");
|
||||
}
|
||||
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of apps for Fly.io integration
|
||||
@ -291,11 +295,7 @@ const getAppsRender = async ({
|
||||
* @returns {Object[]} apps - names and ids of Fly.io apps
|
||||
* @returns {String} apps.name - name of Fly.io apps
|
||||
*/
|
||||
const getAppsFlyio = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const query = `
|
||||
@ -309,31 +309,93 @@ const getAppsFlyio = async ({
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const res = (await axios({
|
||||
url: INTEGRATION_FLYIO_API_URL,
|
||||
method: 'post',
|
||||
headers: {
|
||||
'Authorization': 'Bearer ' + accessToken,
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
|
||||
const res = (await request.post(INTEGRATION_FLYIO_API_URL, {
|
||||
query,
|
||||
variables: {
|
||||
role: null,
|
||||
},
|
||||
}, {
|
||||
headers: {
|
||||
Authorization: "Bearer " + accessToken,
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
data: {
|
||||
query,
|
||||
variables: {
|
||||
role: null
|
||||
}
|
||||
}
|
||||
})).data.data.apps.nodes;
|
||||
|
||||
apps = res
|
||||
.map((a: any) => ({
|
||||
name: a.name
|
||||
}));
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Fly.io apps');
|
||||
throw new Error("Failed to get Fly.io apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of projects for CircleCI integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for CircleCI API
|
||||
* @returns {Object[]} apps -
|
||||
* @returns {String} apps.name - name of CircleCI apps
|
||||
*/
|
||||
const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(
|
||||
`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data
|
||||
|
||||
apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.reponame
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get CircleCI projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
const getAppsTravisCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await request.get(
|
||||
`${INTEGRATION_TRAVISCI_API_URL}/repos`,
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `token ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.slug?.split("/")[1],
|
||||
appId: a?.id,
|
||||
}
|
||||
});
|
||||
}catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get TravisCI projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import axios from 'axios';
|
||||
import request from '../config/request';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
@ -136,27 +136,27 @@ const exchangeCodeAzure = async ({
|
||||
const accessExpiresAt = new Date();
|
||||
let res: ExchangeCodeAzureResponse;
|
||||
try {
|
||||
res = (await axios.post(
|
||||
res = (await request.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
scope: 'https://vault.azure.net/.default openid offline_access', // TODO: do we need all these permissions?
|
||||
scope: 'https://vault.azure.net/.default openid offline_access',
|
||||
client_id: CLIENT_ID_AZURE,
|
||||
client_secret: CLIENT_SECRET_AZURE,
|
||||
redirect_uri: `${SITE_URL}/integrations/azure-key-vault/oauth2/callback`
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err: any) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Azure');
|
||||
}
|
||||
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
@ -175,36 +175,36 @@ const exchangeCodeAzure = async ({
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeHeroku = async ({
|
||||
code
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
code: string;
|
||||
}) => {
|
||||
let res: ExchangeCodeHerokuResponse;
|
||||
const accessExpiresAt = new Date();
|
||||
try {
|
||||
res = (await axios.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_secret: CLIENT_SECRET_HEROKU
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Heroku');
|
||||
}
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
let res: ExchangeCodeHerokuResponse;
|
||||
const accessExpiresAt = new Date();
|
||||
try {
|
||||
res = (await request.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_secret: CLIENT_SECRET_HEROKU
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Heroku');
|
||||
}
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@ -221,7 +221,7 @@ const exchangeCodeVercel = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeVercelResponse;
|
||||
try {
|
||||
res = (
|
||||
await axios.post(
|
||||
await request.post(
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
code: code,
|
||||
@ -234,7 +234,7 @@ const exchangeCodeVercel = async ({ code }: { code: string }) => {
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Vercel');
|
||||
throw new Error(`Failed OAuth2 code-token exchange with Vercel [err=${err}]`);
|
||||
}
|
||||
|
||||
return {
|
||||
@ -260,7 +260,7 @@ const exchangeCodeNetlify = async ({ code }: { code: string }) => {
|
||||
let accountId;
|
||||
try {
|
||||
res = (
|
||||
await axios.post(
|
||||
await request.post(
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
@ -272,14 +272,14 @@ const exchangeCodeNetlify = async ({ code }: { code: string }) => {
|
||||
)
|
||||
).data;
|
||||
|
||||
const res2 = await axios.get('https://api.netlify.com/api/v1/sites', {
|
||||
const res2 = await request.get('https://api.netlify.com/api/v1/sites', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`
|
||||
}
|
||||
});
|
||||
|
||||
const res3 = (
|
||||
await axios.get('https://api.netlify.com/api/v1/accounts', {
|
||||
await request.get('https://api.netlify.com/api/v1/accounts', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`
|
||||
}
|
||||
@ -314,7 +314,7 @@ const exchangeCodeGithub = async ({ code }: { code: string }) => {
|
||||
let res: ExchangeCodeGithubResponse;
|
||||
try {
|
||||
res = (
|
||||
await axios.get(INTEGRATION_GITHUB_TOKEN_URL, {
|
||||
await request.get(INTEGRATION_GITHUB_TOKEN_URL, {
|
||||
params: {
|
||||
client_id: CLIENT_ID_GITHUB,
|
||||
client_secret: CLIENT_SECRET_GITHUB,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import axios from 'axios';
|
||||
import request from '../config/request';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { INTEGRATION_AZURE_KEY_VAULT, INTEGRATION_HEROKU } from '../variables';
|
||||
import {
|
||||
@ -71,7 +71,7 @@ const exchangeRefreshAzure = async ({
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const res: RefreshTokenAzureResponse = (await axios.post(
|
||||
const res: RefreshTokenAzureResponse = (await request.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
client_id: CLIENT_ID_AZURE,
|
||||
@ -105,7 +105,7 @@ const exchangeRefreshHeroku = async ({
|
||||
|
||||
let accessToken;
|
||||
try {
|
||||
const res = await axios.post(
|
||||
const res = await request.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,5 @@
|
||||
import requireAuth from './requireAuth';
|
||||
import requireMfaAuth from './requireMfaAuth';
|
||||
import requireBotAuth from './requireBotAuth';
|
||||
import requireSignupAuth from './requireSignupAuth';
|
||||
import requireWorkspaceAuth from './requireWorkspaceAuth';
|
||||
@ -15,6 +16,7 @@ import validateRequest from './validateRequest';
|
||||
|
||||
export {
|
||||
requireAuth,
|
||||
requireMfaAuth,
|
||||
requireBotAuth,
|
||||
requireSignupAuth,
|
||||
requireWorkspaceAuth,
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { ErrorRequestHandler } from "express";
|
||||
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { InternalServerError } from "../utils/errors";
|
||||
import { InternalServerError, UnauthorizedRequestError } from "../utils/errors";
|
||||
import { getLogger } from "../utils/logger";
|
||||
import RequestError, { LogLevel } from "../utils/requestError";
|
||||
import { NODE_ENV } from "../config";
|
||||
|
||||
import { TokenExpiredError } from 'jsonwebtoken';
|
||||
|
||||
export const requestErrorHandler: ErrorRequestHandler = (error: RequestError | Error, req, res, next) => {
|
||||
if (res.headersSent) return next();
|
||||
|
@ -45,9 +45,10 @@ const requireIntegrationAuthorizationAuth = ({
|
||||
|
||||
req.integrationAuth = integrationAuth;
|
||||
if (attachAccessToken) {
|
||||
req.accessToken = await IntegrationService.getIntegrationAuthAccess({
|
||||
const access = await IntegrationService.getIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString()
|
||||
});
|
||||
req.accessToken = access.accessToken;
|
||||
}
|
||||
|
||||
return next();
|
||||
|
43
backend/src/middleware/requireMfaAuth.ts
Normal file
43
backend/src/middleware/requireMfaAuth.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { User } from '../models';
|
||||
import { JWT_MFA_SECRET } from '../config';
|
||||
import { BadRequestError, UnauthorizedRequestError } from '../utils/errors';
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate if (MFA) JWT temporary token on request is valid (e.g. not expired)
|
||||
* and if there is an associated user.
|
||||
*/
|
||||
const requireMfaAuth = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
// JWT (temporary) authentication middleware for complete signup
|
||||
const [ AUTH_TOKEN_TYPE, AUTH_TOKEN_VALUE ] = <[string, string]>req.headers['authorization']?.split(' ', 2) ?? [null, null]
|
||||
if(AUTH_TOKEN_TYPE === null) return next(BadRequestError({message: `Missing Authorization Header in the request header.`}))
|
||||
if(AUTH_TOKEN_TYPE.toLowerCase() !== 'bearer') return next(BadRequestError({message: `The provided authentication type '${AUTH_TOKEN_TYPE}' is not supported.`}))
|
||||
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(AUTH_TOKEN_VALUE, JWT_MFA_SECRET)
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
_id: decodedToken.userId
|
||||
}).select('+publicKey');
|
||||
|
||||
if (!user)
|
||||
return next(UnauthorizedRequestError({message: 'Unable to authenticate for User account completion. Try logging in again'}))
|
||||
|
||||
req.user = user;
|
||||
return next();
|
||||
};
|
||||
|
||||
export default requireMfaAuth;
|
@ -10,7 +10,7 @@ import MembershipOrg, { IMembershipOrg } from './membershipOrg';
|
||||
import Organization, { IOrganization } from './organization';
|
||||
import Secret, { ISecret } from './secret';
|
||||
import ServiceToken, { IServiceToken } from './serviceToken';
|
||||
import Token, { IToken } from './token';
|
||||
import TokenData, { ITokenData } from './tokenData';
|
||||
import User, { IUser } from './user';
|
||||
import UserAction, { IUserAction } from './userAction';
|
||||
import Workspace, { IWorkspace } from './workspace';
|
||||
@ -43,8 +43,8 @@ export {
|
||||
ISecret,
|
||||
ServiceToken,
|
||||
IServiceToken,
|
||||
Token,
|
||||
IToken,
|
||||
TokenData,
|
||||
ITokenData,
|
||||
User,
|
||||
IUser,
|
||||
UserAction,
|
||||
|
@ -1,13 +1,17 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { Schema, model, Types } from "mongoose";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
} from '../variables';
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
} from "../variables";
|
||||
|
||||
export interface IIntegration {
|
||||
_id: Types.ObjectId;
|
||||
@ -18,69 +22,98 @@ export interface IIntegration {
|
||||
owner: string;
|
||||
targetEnvironment: string;
|
||||
appId: string;
|
||||
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio' | 'azure-key-vault';
|
||||
path: string;
|
||||
region: string;
|
||||
integration:
|
||||
| 'azure-key-vault'
|
||||
| 'aws-parameter-store'
|
||||
| 'aws-secret-manager'
|
||||
| 'heroku'
|
||||
| 'vercel'
|
||||
| 'netlify'
|
||||
| 'github'
|
||||
| 'render'
|
||||
| 'flyio'
|
||||
| 'circleci'
|
||||
| 'travisci';
|
||||
integrationAuth: Types.ObjectId;
|
||||
}
|
||||
|
||||
const integrationSchema = new Schema<IIntegration>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true,
|
||||
},
|
||||
environment: {
|
||||
type: String,
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
app: {
|
||||
// name of app in provider
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
appId: { // (new)
|
||||
appId: {
|
||||
// (new)
|
||||
// id of app in provider
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
targetEnvironment: { // (new)
|
||||
// target environment
|
||||
targetEnvironment: {
|
||||
// (new)
|
||||
// target environment
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
owner: {
|
||||
// github-specific repo owner-login
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
path: {
|
||||
// aws-parameter-store-specific path
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
region: {
|
||||
// aws-parameter-store-specific path
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
integration: {
|
||||
type: String,
|
||||
enum: [
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
],
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
integrationAuth: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'IntegrationAuth',
|
||||
required: true
|
||||
}
|
||||
ref: "IntegrationAuth",
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const Integration = model<IIntegration>('Integration', integrationSchema);
|
||||
const Integration = model<IIntegration>("Integration", integrationSchema);
|
||||
|
||||
export default Integration;
|
||||
|
@ -1,21 +1,30 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { Schema, model, Types } from "mongoose";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB
|
||||
} from '../variables';
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
} from "../variables";
|
||||
|
||||
export interface IIntegrationAuth {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio' | 'azure-key-vault';
|
||||
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio' | 'azure-key-vault' | 'circleci' | 'travisci' | 'aws-parameter-store' | 'aws-secret-manager';
|
||||
teamId: string;
|
||||
accountId: string;
|
||||
refreshCiphertext?: string;
|
||||
refreshIV?: string;
|
||||
refreshTag?: string;
|
||||
accessIdCiphertext?: string; // new
|
||||
accessIdIV?: string; // new
|
||||
accessIdTag?: string; // new
|
||||
accessCiphertext?: string;
|
||||
accessIV?: string;
|
||||
accessTag?: string;
|
||||
@ -25,65 +34,83 @@ export interface IIntegrationAuth {
|
||||
const integrationAuthSchema = new Schema<IIntegrationAuth>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true,
|
||||
},
|
||||
integration: {
|
||||
type: String,
|
||||
enum: [
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
],
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
teamId: {
|
||||
// vercel-specific integration param
|
||||
type: String
|
||||
type: String,
|
||||
},
|
||||
accountId: {
|
||||
// netlify-specific integration param
|
||||
type: String
|
||||
type: String,
|
||||
},
|
||||
refreshCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
refreshIV: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
refreshTag: {
|
||||
type: String,
|
||||
select: false,
|
||||
},
|
||||
accessIdCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
accessIdIV: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
accessIdTag: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
accessCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessIV: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessTag: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessExpiresAt: {
|
||||
type: Date,
|
||||
select: false
|
||||
}
|
||||
select: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const IntegrationAuth = model<IIntegrationAuth>(
|
||||
'IntegrationAuth',
|
||||
"IntegrationAuth",
|
||||
integrationAuthSchema
|
||||
);
|
||||
|
||||
|
@ -109,6 +109,9 @@ const secretSchema = new Schema<ISecret>(
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
secretSchema.index({ tags: 1 }, { background: true })
|
||||
|
||||
const Secret = model<ISecret>('Secret', secretSchema);
|
||||
|
||||
export default Secret;
|
||||
|
83
backend/src/models/secretApprovalRequest.ts
Normal file
83
backend/src/models/secretApprovalRequest.ts
Normal file
@ -0,0 +1,83 @@
|
||||
import mongoose, { Schema, model } from 'mongoose';
|
||||
import Secret, { ISecret } from './secret';
|
||||
|
||||
interface ISecretApprovalRequest {
|
||||
secret: mongoose.Types.ObjectId;
|
||||
requestedChanges: ISecret;
|
||||
requestedBy: mongoose.Types.ObjectId;
|
||||
approvers: IApprover[];
|
||||
status: ApprovalStatus;
|
||||
timestamp: Date;
|
||||
requestType: RequestType;
|
||||
requestId: string;
|
||||
}
|
||||
|
||||
interface IApprover {
|
||||
userId: mongoose.Types.ObjectId;
|
||||
status: ApprovalStatus;
|
||||
}
|
||||
|
||||
export enum ApprovalStatus {
|
||||
PENDING = 'pending',
|
||||
APPROVED = 'approved',
|
||||
REJECTED = 'rejected'
|
||||
}
|
||||
|
||||
export enum RequestType {
|
||||
UPDATE = 'update',
|
||||
DELETE = 'delete',
|
||||
CREATE = 'create'
|
||||
}
|
||||
|
||||
const approverSchema = new mongoose.Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true
|
||||
},
|
||||
status: {
|
||||
type: String,
|
||||
enum: [ApprovalStatus],
|
||||
default: ApprovalStatus.PENDING
|
||||
}
|
||||
});
|
||||
|
||||
const secretApprovalRequestSchema = new Schema<ISecretApprovalRequest>(
|
||||
{
|
||||
secret: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'Secret'
|
||||
},
|
||||
requestedChanges: Secret,
|
||||
requestedBy: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
},
|
||||
approvers: [approverSchema],
|
||||
status: {
|
||||
type: String,
|
||||
enum: ApprovalStatus,
|
||||
default: ApprovalStatus.PENDING
|
||||
},
|
||||
timestamp: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
requestType: {
|
||||
type: String,
|
||||
enum: RequestType,
|
||||
required: true
|
||||
},
|
||||
requestId: {
|
||||
type: String,
|
||||
required: false
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
const SecretApprovalRequest = model<ISecretApprovalRequest>('SecretApprovalRequest', secretApprovalRequestSchema);
|
||||
|
||||
export default SecretApprovalRequest;
|
@ -5,6 +5,7 @@ export interface IToken {
|
||||
email: string;
|
||||
token: string;
|
||||
createdAt: Date;
|
||||
ttl: number;
|
||||
}
|
||||
|
||||
const tokenSchema = new Schema<IToken>({
|
||||
@ -19,14 +20,13 @@ const tokenSchema = new Schema<IToken>({
|
||||
createdAt: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
ttl: {
|
||||
type: Number,
|
||||
}
|
||||
});
|
||||
|
||||
tokenSchema.index({
|
||||
createdAt: 1
|
||||
}, {
|
||||
expireAfterSeconds: parseInt(EMAIL_TOKEN_LIFETIME)
|
||||
});
|
||||
tokenSchema.index({ email: 1 });
|
||||
|
||||
const Token = model<IToken>('Token', tokenSchema);
|
||||
|
||||
|
55
backend/src/models/tokenData.ts
Normal file
55
backend/src/models/tokenData.ts
Normal file
@ -0,0 +1,55 @@
|
||||
import { Schema, Types, model } from 'mongoose';
|
||||
|
||||
export interface ITokenData {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
tokenHash: string;
|
||||
triesLeft?: number;
|
||||
expiresAt: Date;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
const tokenDataSchema = new Schema<ITokenData>({
|
||||
type: {
|
||||
type: String,
|
||||
enum: [
|
||||
'emailConfirmation',
|
||||
'emailMfa',
|
||||
'organizationInvitation',
|
||||
'passwordReset'
|
||||
],
|
||||
required: true
|
||||
},
|
||||
email: {
|
||||
type: String
|
||||
},
|
||||
phoneNumber: {
|
||||
type: String
|
||||
},
|
||||
organization: { // organizationInvitation-specific field
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Organization'
|
||||
},
|
||||
tokenHash: {
|
||||
type: String,
|
||||
select: false,
|
||||
required: true
|
||||
},
|
||||
triesLeft: {
|
||||
type: Number
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
expires: 0,
|
||||
required: true
|
||||
}
|
||||
}, {
|
||||
timestamps: true
|
||||
});
|
||||
|
||||
const TokenData = model<ITokenData>('TokenData', tokenDataSchema);
|
||||
|
||||
export default TokenData;
|
@ -1,10 +1,14 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { Schema, model, Types, Document } from 'mongoose';
|
||||
|
||||
export interface IUser {
|
||||
export interface IUser extends Document {
|
||||
_id: Types.ObjectId;
|
||||
email: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
encryptionVersion: number;
|
||||
protectedKey: string;
|
||||
protectedKeyIV: string;
|
||||
protectedKeyTag: string;
|
||||
publicKey?: string;
|
||||
encryptedPrivateKey?: string;
|
||||
iv?: string;
|
||||
@ -12,7 +16,12 @@ export interface IUser {
|
||||
salt?: string;
|
||||
verifier?: string;
|
||||
refreshVersion?: number;
|
||||
seenIps: [string];
|
||||
isMfaEnabled: boolean;
|
||||
mfaMethods: boolean;
|
||||
devices: {
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
const userSchema = new Schema<IUser>(
|
||||
@ -27,6 +36,23 @@ const userSchema = new Schema<IUser>(
|
||||
lastName: {
|
||||
type: String
|
||||
},
|
||||
encryptionVersion: {
|
||||
type: Number,
|
||||
select: false,
|
||||
default: 1 // to resolve backward-compatibility issues
|
||||
},
|
||||
protectedKey: { // introduced as part of encryption version 2
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
protectedKeyIV: { // introduced as part of encryption version 2
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
protectedKeyTag: { // introduced as part of encryption version 2
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
publicKey: {
|
||||
type: String,
|
||||
select: false
|
||||
@ -35,11 +61,11 @@ const userSchema = new Schema<IUser>(
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
iv: {
|
||||
iv: { // iv of [encryptedPrivateKey]
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
tag: {
|
||||
tag: { // tag of [encryptedPrivateKey]
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
@ -56,8 +82,21 @@ const userSchema = new Schema<IUser>(
|
||||
default: 0,
|
||||
select: false
|
||||
},
|
||||
seenIps: [String]
|
||||
},
|
||||
isMfaEnabled: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
mfaMethods: [{
|
||||
type: String
|
||||
}],
|
||||
devices: {
|
||||
type: [{
|
||||
ip: String,
|
||||
userAgent: String
|
||||
}],
|
||||
default: []
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ export interface IWorkspace {
|
||||
name: string;
|
||||
slug: string;
|
||||
}>;
|
||||
autoCapitalization: boolean;
|
||||
}
|
||||
|
||||
const workspaceSchema = new Schema<IWorkspace>({
|
||||
@ -15,6 +16,10 @@ const workspaceSchema = new Schema<IWorkspace>({
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
autoCapitalization: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
organization: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Organization',
|
||||
|
@ -7,7 +7,7 @@ import { authLimiter } from '../../helpers/rateLimiter';
|
||||
|
||||
router.post('/token', validateRequest, authController.getNewToken);
|
||||
|
||||
router.post(
|
||||
router.post( // deprecated (moved to api/v2/auth/login1)
|
||||
'/login1',
|
||||
authLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
@ -16,7 +16,7 @@ router.post(
|
||||
authController.login1
|
||||
);
|
||||
|
||||
router.post(
|
||||
router.post( // deprecated (moved to api/v2/auth/login2)
|
||||
'/login2',
|
||||
authLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
|
@ -31,7 +31,7 @@ router.patch(
|
||||
requireBotAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
}),
|
||||
body('isActive').isBoolean(),
|
||||
body('isActive').exists().isBoolean(),
|
||||
body('botKey'),
|
||||
validateRequest,
|
||||
botController.setBotActiveState
|
||||
|
@ -20,12 +20,14 @@ router.post( // new: add new integration for integration auth
|
||||
location: 'body'
|
||||
}),
|
||||
body('integrationAuthId').exists().isString().trim(),
|
||||
body('app').isString().trim(),
|
||||
body('app').trim(),
|
||||
body('isActive').exists().isBoolean(),
|
||||
body('appId').trim(),
|
||||
body('sourceEnvironment').trim(),
|
||||
body('targetEnvironment').trim(),
|
||||
body('owner').trim(),
|
||||
body('path').trim(),
|
||||
body('region').trim(),
|
||||
validateRequest,
|
||||
integrationController.createIntegration
|
||||
);
|
||||
|
@ -57,6 +57,7 @@ router.post(
|
||||
location: 'body'
|
||||
}),
|
||||
body('workspaceId').exists().trim().notEmpty(),
|
||||
body('accessId').trim(),
|
||||
body('accessToken').exists().trim().notEmpty(),
|
||||
body('integration').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
|
@ -156,4 +156,19 @@ router.get(
|
||||
organizationController.getOrganizationSubscriptions
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:organizationId/workspace-memberships',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED]
|
||||
}),
|
||||
param('organizationId').exists().trim(),
|
||||
validateRequest,
|
||||
organizationController.getOrganizationMembersAndTheirWorkspaces
|
||||
);
|
||||
|
||||
|
||||
export default router;
|
||||
|
@ -10,7 +10,7 @@ router.post(
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
body('clientPublicKey').exists().trim().notEmpty(),
|
||||
body('clientPublicKey').exists().isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
passwordController.srp1
|
||||
);
|
||||
@ -22,11 +22,14 @@ router.post(
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
body('clientProof').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty().notEmpty(), // private key encrypted under new pwd
|
||||
body('iv').exists().trim().notEmpty(), // new iv for private key
|
||||
body('tag').exists().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().trim().notEmpty(), // part of new pwd
|
||||
body('protectedKey').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(), // private key encrypted under new pwd
|
||||
body('encryptedPrivateKeyIV').exists().isString().trim().notEmpty(), // new iv for private key
|
||||
body('encryptedPrivateKeyTag').exists().isString().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().isString().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().isString().trim().notEmpty(), // part of new pwd
|
||||
validateRequest,
|
||||
passwordController.changePassword
|
||||
);
|
||||
@ -34,7 +37,7 @@ router.post(
|
||||
router.post(
|
||||
'/email/password-reset',
|
||||
passwordLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
body('email').exists().isString().trim().notEmpty().isEmail(),
|
||||
validateRequest,
|
||||
passwordController.emailPasswordReset
|
||||
);
|
||||
@ -42,8 +45,8 @@ router.post(
|
||||
router.post(
|
||||
'/email/password-reset-verify',
|
||||
passwordLimiter,
|
||||
body('email').exists().trim().notEmpty().isEmail(),
|
||||
body('code').exists().trim().notEmpty(),
|
||||
body('email').exists().isString().trim().notEmpty().isEmail(),
|
||||
body('code').exists().isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
passwordController.emailPasswordResetVerify
|
||||
);
|
||||
@ -61,12 +64,12 @@ router.post(
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
body('clientProof').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(), // (backup) private key encrypted under a strong key
|
||||
body('iv').exists().trim().notEmpty(), // new iv for (backup) private key
|
||||
body('tag').exists().trim().notEmpty(), // new tag for (backup) private key
|
||||
body('salt').exists().trim().notEmpty(), // salt generated from strong key
|
||||
body('verifier').exists().trim().notEmpty(), // salt generated from strong key
|
||||
body('clientProof').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(), // (backup) private key encrypted under a strong key
|
||||
body('iv').exists().isString().trim().notEmpty(), // new iv for (backup) private key
|
||||
body('tag').exists().isString().trim().notEmpty(), // new tag for (backup) private key
|
||||
body('salt').exists().isString().trim().notEmpty(), // salt generated from strong key
|
||||
body('verifier').exists().isString().trim().notEmpty(), // salt generated from strong key
|
||||
validateRequest,
|
||||
passwordController.createBackupPrivateKey
|
||||
);
|
||||
@ -74,11 +77,14 @@ router.post(
|
||||
router.post(
|
||||
'/password-reset',
|
||||
requireSignupAuth,
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(), // private key encrypted under new pwd
|
||||
body('iv').exists().trim().notEmpty(), // new iv for private key
|
||||
body('tag').exists().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().trim().notEmpty(), // part of new pwd
|
||||
body('protectedKey').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(), // private key encrypted under new pwd
|
||||
body('encryptedPrivateKeyIV').exists().isString().trim().notEmpty(), // new iv for private key
|
||||
body('encryptedPrivateKeyTag').exists().isString().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().isString().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().isString().trim().notEmpty(), // part of new pwd
|
||||
validateRequest,
|
||||
passwordController.resetPassword
|
||||
);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import { body } from 'express-validator';
|
||||
import { requireSignupAuth, validateRequest } from '../../middleware';
|
||||
import { validateRequest } from '../../middleware';
|
||||
import { signupController } from '../../controllers/v1';
|
||||
import { authLimiter } from '../../helpers/rateLimiter';
|
||||
|
||||
@ -22,39 +22,4 @@ router.post(
|
||||
signupController.verifyEmailSignup
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/complete-account/signup',
|
||||
authLimiter,
|
||||
requireSignupAuth,
|
||||
body('email').exists().trim().notEmpty().isEmail(),
|
||||
body('firstName').exists().trim().notEmpty(),
|
||||
body('lastName').exists().trim().notEmpty(),
|
||||
body('publicKey').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(),
|
||||
body('iv').exists().trim().notEmpty(),
|
||||
body('tag').exists().trim().notEmpty(),
|
||||
body('salt').exists().trim().notEmpty(),
|
||||
body('verifier').exists().trim().notEmpty(),
|
||||
body('organizationName').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
signupController.completeAccountSignup
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/complete-account/invite',
|
||||
authLimiter,
|
||||
requireSignupAuth,
|
||||
body('email').exists().trim().notEmpty().isEmail(),
|
||||
body('firstName').exists().trim().notEmpty(),
|
||||
body('lastName').exists().trim().notEmpty(),
|
||||
body('publicKey').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(),
|
||||
body('iv').exists().trim().notEmpty(),
|
||||
body('tag').exists().trim().notEmpty(),
|
||||
body('salt').exists().trim().notEmpty(),
|
||||
body('verifier').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
signupController.completeAccountInvite
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
44
backend/src/routes/v2/auth.ts
Normal file
44
backend/src/routes/v2/auth.ts
Normal file
@ -0,0 +1,44 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import { body } from 'express-validator';
|
||||
import { requireMfaAuth, validateRequest } from '../../middleware';
|
||||
import { authController } from '../../controllers/v2';
|
||||
import { authLimiter } from '../../helpers/rateLimiter';
|
||||
|
||||
router.post(
|
||||
'/login1',
|
||||
authLimiter,
|
||||
body('email').isString().trim().notEmpty(),
|
||||
body('clientPublicKey').isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.login1
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/login2',
|
||||
authLimiter,
|
||||
body('email').isString().trim().notEmpty(),
|
||||
body('clientProof').isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.login2
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/mfa/send',
|
||||
authLimiter,
|
||||
body('email').isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.sendMfaToken
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/mfa/verify',
|
||||
authLimiter,
|
||||
requireMfaAuth,
|
||||
body('email').isString().trim().notEmpty(),
|
||||
body('mfaToken').isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.verifyMfaToken
|
||||
);
|
||||
|
||||
export default router;
|
@ -1,3 +1,5 @@
|
||||
import auth from './auth';
|
||||
import signup from './signup';
|
||||
import users from './users';
|
||||
import organizations from './organizations';
|
||||
import workspace from './workspace';
|
||||
@ -9,6 +11,8 @@ import environment from "./environment"
|
||||
import tags from "./tags"
|
||||
|
||||
export {
|
||||
auth,
|
||||
signup,
|
||||
users,
|
||||
organizations,
|
||||
workspace,
|
||||
|
@ -6,14 +6,52 @@ import {
|
||||
requireSecretsAuth,
|
||||
validateRequest
|
||||
} from '../../middleware';
|
||||
import { query, check, body } from 'express-validator';
|
||||
import { query, body } from 'express-validator';
|
||||
import { secretsController } from '../../controllers/v2';
|
||||
import { validateSecrets } from '../../helpers/secret';
|
||||
import {
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
SECRET_PERSONAL,
|
||||
SECRET_SHARED
|
||||
} from '../../variables';
|
||||
import {
|
||||
BatchSecretRequest
|
||||
} from '../../types/secret';
|
||||
|
||||
router.post(
|
||||
'/batch',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey']
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
location: 'body'
|
||||
}),
|
||||
body('workspaceId').exists().isString().trim(),
|
||||
body('environment').exists().isString().trim(),
|
||||
body('requests')
|
||||
.exists()
|
||||
.custom(async (requests: BatchSecretRequest[], { req }) => {
|
||||
if (Array.isArray(requests)) {
|
||||
const secretIds = requests
|
||||
.map((request) => request.secret._id)
|
||||
.filter((secretId) => secretId !== undefined)
|
||||
|
||||
if (secretIds.length > 0) {
|
||||
const relevantSecrets = await validateSecrets({
|
||||
userId: req.user._id.toString(),
|
||||
secretIds
|
||||
});
|
||||
|
||||
req.secrets = relevantSecrets;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
validateRequest,
|
||||
secretsController.batchSecrets
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
@ -74,6 +112,7 @@ router.get(
|
||||
'/',
|
||||
query('workspaceId').exists().trim(),
|
||||
query('environment').exists().trim(),
|
||||
query('tagSlugs'),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken']
|
||||
|
49
backend/src/routes/v2/signup.ts
Normal file
49
backend/src/routes/v2/signup.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import { body } from 'express-validator';
|
||||
import { requireSignupAuth, validateRequest } from '../../middleware';
|
||||
import { signupController } from '../../controllers/v2';
|
||||
import { authLimiter } from '../../helpers/rateLimiter';
|
||||
|
||||
router.post(
|
||||
'/complete-account/signup',
|
||||
authLimiter,
|
||||
requireSignupAuth,
|
||||
body('email').exists().isString().trim().notEmpty().isEmail(),
|
||||
body('firstName').exists().isString().trim().notEmpty(),
|
||||
body('lastName').exists().isString().trim().notEmpty(),
|
||||
body('protectedKey').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('publicKey').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('salt').exists().isString().trim().notEmpty(),
|
||||
body('verifier').exists().isString().trim().notEmpty(),
|
||||
body('organizationName').exists().isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
signupController.completeAccountSignup
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/complete-account/invite',
|
||||
authLimiter,
|
||||
requireSignupAuth,
|
||||
body('email').exists().isString().trim().notEmpty().isEmail(),
|
||||
body('firstName').exists().isString().trim().notEmpty(),
|
||||
body('lastName').exists().isString().trim().notEmpty(),
|
||||
body('protectedKey').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('publicKey').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('salt').exists().isString().trim().notEmpty(),
|
||||
body('verifier').exists().isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
signupController.completeAccountInvite
|
||||
);
|
||||
|
||||
export default router;
|
@ -1,8 +1,10 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import {
|
||||
requireAuth
|
||||
requireAuth,
|
||||
validateRequest
|
||||
} from '../../middleware';
|
||||
import { body } from 'express-validator';
|
||||
import { usersController } from '../../controllers/v2';
|
||||
|
||||
router.get(
|
||||
@ -13,6 +15,16 @@ router.get(
|
||||
usersController.getMe
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/me/mfa',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey']
|
||||
}),
|
||||
body('isMfaEnabled').exists().isBoolean(),
|
||||
validateRequest,
|
||||
usersController.updateMyMfaEnabled
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/me/organizations',
|
||||
requireAuth({
|
||||
|
@ -118,4 +118,18 @@ router.delete( // TODO - rewire dashboard to this route
|
||||
workspaceController.deleteWorkspaceMembership
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/:workspaceId/auto-capitalization',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
}),
|
||||
param('workspaceId').exists().trim(),
|
||||
body('autoCapitalization').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
workspaceController.toggleAutoCapitalization
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
@ -1,5 +1,3 @@
|
||||
import { Bot, IBot } from '../models';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { handleEventHelper } from '../helpers/event';
|
||||
|
||||
interface Event {
|
||||
|
@ -112,26 +112,30 @@ class IntegrationService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt access token [accessToken] using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* Encrypt access token [accessToken] and (optionally) access id using the
|
||||
* bot's copy of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} obj.accessId - access id
|
||||
* @param {String} obj.accessToken - access token
|
||||
* @param {Date} obj.accessExpiresAt - expiration date of access token
|
||||
* @returns {IntegrationAuth} - updated integration auth
|
||||
*/
|
||||
static async setIntegrationAuthAccess({
|
||||
integrationAuthId,
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
}: {
|
||||
integrationAuthId: string;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
accessExpiresAt: Date | undefined;
|
||||
}) {
|
||||
return await setIntegrationAuthAccessHelper({
|
||||
integrationAuthId,
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
});
|
||||
|
69
backend/src/services/TokenService.ts
Normal file
69
backend/src/services/TokenService.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import { Types } from 'mongoose';
|
||||
import { createTokenHelper, validateTokenHelper } from '../helpers/token';
|
||||
|
||||
/**
|
||||
* Class to handle token actions
|
||||
* TODO: elaborate more on this class
|
||||
*/
|
||||
class TokenService {
|
||||
/**
|
||||
* Create a token [token] for type [type] with associated details
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.type - type or context of token (e.g. emailConfirmation)
|
||||
* @param {String} obj.email - email associated with the token
|
||||
* @param {String} obj.phoneNumber - phone number associated with the token
|
||||
* @param {Types.ObjectId} obj.organizationId - id of organization associated with the token
|
||||
* @returns {String} token - the token to create
|
||||
*/
|
||||
static async createToken({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
}) {
|
||||
return await createTokenHelper({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate whether or not token [token] and its associated details match a token in the DB
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.type - type or context of token (e.g. emailConfirmation)
|
||||
* @param {String} obj.email - email associated with the token
|
||||
* @param {String} obj.phoneNumber - phone number associated with the token
|
||||
* @param {Types.ObjectId} obj.organizationId - id of organization associated with the token
|
||||
* @param {String} obj.token - the token to validate
|
||||
*/
|
||||
static async validateToken({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
token: string;
|
||||
}) {
|
||||
return await validateTokenHelper({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default TokenService;
|
@ -3,11 +3,13 @@ import postHogClient from './PostHogClient';
|
||||
import BotService from './BotService';
|
||||
import EventService from './EventService';
|
||||
import IntegrationService from './IntegrationService';
|
||||
import TokenService from './TokenService';
|
||||
|
||||
export {
|
||||
DatabaseService,
|
||||
postHogClient,
|
||||
BotService,
|
||||
EventService,
|
||||
IntegrationService
|
||||
IntegrationService,
|
||||
TokenService
|
||||
}
|
@ -1,6 +1,10 @@
|
||||
import nodemailer from 'nodemailer';
|
||||
import { SMTP_HOST, SMTP_PORT, SMTP_USERNAME, SMTP_PASSWORD, SMTP_SECURE } from '../config';
|
||||
import { SMTP_HOST_SENDGRID, SMTP_HOST_MAILGUN } from '../variables';
|
||||
import {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS
|
||||
} from '../variables';
|
||||
import SMTPConnection from 'nodemailer/lib/smtp-connection';
|
||||
import * as Sentry from '@sentry/node';
|
||||
|
||||
@ -27,6 +31,12 @@ if (SMTP_SECURE) {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
break;
|
||||
case SMTP_HOST_SOCKETLABS:
|
||||
mailOpts.requireTLS = true;
|
||||
mailOpts.tls = {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (SMTP_HOST.includes('amazonaws.com')) {
|
||||
mailOpts.tls = {
|
||||
|
19
backend/src/templates/emailMfa.handlebars
Normal file
19
backend/src/templates/emailMfa.handlebars
Normal file
@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>MFA Code</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Sign in attempt requires further verification</h2>
|
||||
<p>Your MFA code is below — enter it where you started signing in to Infisical.</p>
|
||||
<h2>{{code}}</h2>
|
||||
<p>The MFA code will be valid for 2 minutes.</p>
|
||||
<p>Not you? Contact Infisical or your administrator immediately.</p>
|
||||
</body>
|
||||
|
||||
</html>
|
@ -1,15 +1,17 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Email Verification</title>
|
||||
<title>Code</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Confirm your email address</h2>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.</p>
|
||||
<h2>{{code}}</h2>
|
||||
<h1>{{code}}</h1>
|
||||
<p>Questions about setting up Infisical? Email us at support@infisical.com</p>
|
||||
</body>
|
||||
|
||||
</html>
|
19
backend/src/templates/newDevice.handlebars
Normal file
19
backend/src/templates/newDevice.handlebars
Normal file
@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Successful login for {{email}} from new device</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<p>We're verifying a recent login for {{email}}:</p>
|
||||
<p><strong>Timestamp</strong>: {{timestamp}}</p>
|
||||
<p><strong>IP address</strong>: {{ip}}</p>
|
||||
<p><strong>User agent</strong>: {{userAgent}}</p>
|
||||
<p>If you believe that this login is suspicious, please contact Infisical or reset your password immediately.</p>
|
||||
</body>
|
||||
|
||||
</html>
|
@ -7,12 +7,10 @@
|
||||
<title>Organization Invitation</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Join your team on Infisical</h2>
|
||||
<p>{{inviterFirstName}}({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<h2>Join your organization on Infisical</h2>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is a simple end-to-end encrypted solution that enables teams to sync and manage their environment
|
||||
variables.</p>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
</body>
|
||||
</html>
|
@ -6,7 +6,6 @@
|
||||
<title>Account Recovery</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Reset your password</h2>
|
||||
<p>Someone requested a password reset.</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}">Reset password</a>
|
||||
|
@ -6,11 +6,10 @@
|
||||
<title>Project Invitation</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Join your team on Infisical</h2>
|
||||
<p>{{inviterFirstName}}({{inviterEmail}}) has invited you to their Infisical workspace — {{workspaceName}}</p>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical project — {{workspaceName}}</p>
|
||||
<a href="{{callback_url}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is a simple end-to-end encrypted solution that enables teams to sync and manage their environment variables.</p>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
</body>
|
||||
</html>
|
38
backend/src/types/secret/index.d.ts
vendored
38
backend/src/types/secret/index.d.ts
vendored
@ -1,5 +1,7 @@
|
||||
import { Types } from 'mongoose';
|
||||
import { Assign, Omit } from 'utility-types';
|
||||
import { ISecret } from '../../models';
|
||||
import { mongo } from 'mongoose';
|
||||
|
||||
// Everything is required, except the omitted types
|
||||
export type CreateSecretRequestBody = Omit<ISecret, "user" | "version" | "environment" | "workspace">;
|
||||
@ -12,3 +14,39 @@ export type SanitizedSecretModify = Partial<Omit<ISecret, "user" | "version" | "
|
||||
|
||||
// Everything is required, except the omitted types
|
||||
export type SanitizedSecretForCreate = Omit<ISecret, "version" | "_id">;
|
||||
|
||||
export interface BatchSecretRequest {
|
||||
id: string;
|
||||
method: 'POST' | 'PATCH' | 'DELETE';
|
||||
secret: Secret;
|
||||
}
|
||||
|
||||
export interface BatchSecret {
|
||||
_id: string;
|
||||
type: 'shared' | 'personal',
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretCommentCiphertext: string;
|
||||
secretCommentIV: string;
|
||||
secretCommentTag: string;
|
||||
tags: string[];
|
||||
}
|
||||
|
||||
export interface BatchSecret {
|
||||
_id: string;
|
||||
type: 'shared' | 'personal',
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretCommentCiphertext: string;
|
||||
secretCommentIV: string;
|
||||
secretCommentTag: string;
|
||||
tags: string[];
|
||||
}
|
@ -3,16 +3,20 @@ import {
|
||||
ENV_TESTING,
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
ENV_SET
|
||||
} from './environment';
|
||||
ENV_SET,
|
||||
} from "./environment";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
@ -25,17 +29,13 @@ import {
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_OPTIONS
|
||||
} from './integration';
|
||||
import {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
} from './organization';
|
||||
import { SECRET_SHARED, SECRET_PERSONAL } from './secret';
|
||||
import { EVENT_PUSH_SECRETS, EVENT_PULL_SECRETS } from './event';
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
INTEGRATION_OPTIONS,
|
||||
} from "./integration";
|
||||
import { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED } from "./organization";
|
||||
import { SECRET_SHARED, SECRET_PERSONAL } from "./secret";
|
||||
import { EVENT_PUSH_SECRETS, EVENT_PULL_SECRETS } from "./event";
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
@ -44,8 +44,21 @@ import {
|
||||
ACTION_DELETE_SECRETS,
|
||||
ACTION_READ_SECRETS
|
||||
} from './action';
|
||||
import { SMTP_HOST_SENDGRID, SMTP_HOST_MAILGUN } from './smtp';
|
||||
import {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS
|
||||
} from './smtp';
|
||||
import { PLAN_STARTER, PLAN_PRO } from './stripe';
|
||||
import {
|
||||
MFA_METHOD_EMAIL
|
||||
} from './user';
|
||||
import {
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
} from './token';
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
@ -61,12 +74,16 @@ export {
|
||||
ENV_PROD,
|
||||
ENV_SET,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
@ -79,6 +96,8 @@ export {
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
EVENT_PUSH_SECRETS,
|
||||
EVENT_PULL_SECRETS,
|
||||
ACTION_LOGIN,
|
||||
@ -90,6 +109,12 @@ export {
|
||||
INTEGRATION_OPTIONS,
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
PLAN_STARTER,
|
||||
PLAN_PRO,
|
||||
MFA_METHOD_EMAIL,
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
};
|
||||
|
@ -3,60 +3,58 @@ import {
|
||||
TENANT_ID_AZURE
|
||||
} from '../config';
|
||||
import {
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_ID_GITHUB,
|
||||
CLIENT_SLUG_VERCEL
|
||||
} from '../config';
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_ID_GITHUB,
|
||||
CLIENT_SLUG_VERCEL,
|
||||
} from "../config";
|
||||
|
||||
// integrations
|
||||
const INTEGRATION_AZURE_KEY_VAULT = 'azure-key-vault';
|
||||
const INTEGRATION_HEROKU = 'heroku';
|
||||
const INTEGRATION_VERCEL = 'vercel';
|
||||
const INTEGRATION_NETLIFY = 'netlify';
|
||||
const INTEGRATION_GITHUB = 'github';
|
||||
const INTEGRATION_RENDER = 'render';
|
||||
const INTEGRATION_FLYIO = 'flyio';
|
||||
const INTEGRATION_AWS_PARAMETER_STORE = 'aws-parameter-store';
|
||||
const INTEGRATION_AWS_SECRET_MANAGER = 'aws-secret-manager';
|
||||
const INTEGRATION_HEROKU = "heroku";
|
||||
const INTEGRATION_VERCEL = "vercel";
|
||||
const INTEGRATION_NETLIFY = "netlify";
|
||||
const INTEGRATION_GITHUB = "github";
|
||||
const INTEGRATION_RENDER = "render";
|
||||
const INTEGRATION_FLYIO = "flyio";
|
||||
const INTEGRATION_CIRCLECI = "circleci";
|
||||
const INTEGRATION_TRAVISCI = "travisci";
|
||||
const INTEGRATION_SET = new Set([
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
]);
|
||||
|
||||
// integration types
|
||||
const INTEGRATION_OAUTH2 = 'oauth2';
|
||||
const INTEGRATION_OAUTH2 = "oauth2";
|
||||
|
||||
// integration oauth endpoints
|
||||
const INTEGRATION_AZURE_TOKEN_URL = `https://login.microsoftonline.com/${TENANT_ID_AZURE}/oauth2/v2.0/token`;
|
||||
const INTEGRATION_HEROKU_TOKEN_URL = 'https://id.heroku.com/oauth/token';
|
||||
const INTEGRATION_VERCEL_TOKEN_URL =
|
||||
'https://api.vercel.com/v2/oauth/access_token';
|
||||
const INTEGRATION_NETLIFY_TOKEN_URL = 'https://api.netlify.com/oauth/token';
|
||||
"https://api.vercel.com/v2/oauth/access_token";
|
||||
const INTEGRATION_NETLIFY_TOKEN_URL = "https://api.netlify.com/oauth/token";
|
||||
const INTEGRATION_GITHUB_TOKEN_URL =
|
||||
'https://github.com/login/oauth/access_token';
|
||||
"https://github.com/login/oauth/access_token";
|
||||
|
||||
// integration apps endpoints
|
||||
const INTEGRATION_HEROKU_API_URL = 'https://api.heroku.com';
|
||||
const INTEGRATION_VERCEL_API_URL = 'https://api.vercel.com';
|
||||
const INTEGRATION_NETLIFY_API_URL = 'https://api.netlify.com';
|
||||
const INTEGRATION_RENDER_API_URL = 'https://api.render.com';
|
||||
const INTEGRATION_FLYIO_API_URL = 'https://api.fly.io/graphql';
|
||||
const INTEGRATION_HEROKU_API_URL = "https://api.heroku.com";
|
||||
const INTEGRATION_VERCEL_API_URL = "https://api.vercel.com";
|
||||
const INTEGRATION_NETLIFY_API_URL = "https://api.netlify.com";
|
||||
const INTEGRATION_RENDER_API_URL = "https://api.render.com";
|
||||
const INTEGRATION_FLYIO_API_URL = "https://api.fly.io/graphql";
|
||||
const INTEGRATION_CIRCLECI_API_URL = "https://circleci.com/api";
|
||||
const INTEGRATION_TRAVISCI_API_URL = "https://api.travis-ci.com";
|
||||
|
||||
const INTEGRATION_OPTIONS = [
|
||||
{
|
||||
name: 'Azure Key Vault',
|
||||
slug: 'azure-key-vault',
|
||||
image: 'Microsoft Azure.png',
|
||||
isAvailable: true,
|
||||
type: 'oauth',
|
||||
clientId: CLIENT_ID_AZURE,
|
||||
tenantId: TENANT_ID_AZURE,
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Heroku',
|
||||
slug: 'heroku',
|
||||
@ -113,38 +111,20 @@ const INTEGRATION_OPTIONS = [
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Google Cloud Platform',
|
||||
slug: 'gcp',
|
||||
image: 'Google Cloud Platform.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Amazon Web Services',
|
||||
slug: 'aws',
|
||||
name: 'AWS Parameter Store',
|
||||
slug: 'aws-parameter-store',
|
||||
image: 'Amazon Web Services.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
isAvailable: true,
|
||||
type: 'custom',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Microsoft Azure',
|
||||
slug: 'azure',
|
||||
image: 'Microsoft Azure.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Travis CI',
|
||||
slug: 'travisci',
|
||||
image: 'Travis CI.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
name: 'AWS Secret Manager',
|
||||
slug: 'aws-secret-manager',
|
||||
image: 'Amazon Web Services.png',
|
||||
isAvailable: true,
|
||||
type: 'custom',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
@ -152,6 +132,34 @@ const INTEGRATION_OPTIONS = [
|
||||
name: 'Circle CI',
|
||||
slug: 'circleci',
|
||||
image: 'Circle CI.png',
|
||||
isAvailable: true,
|
||||
type: 'pat',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Travis CI',
|
||||
slug: 'travisci',
|
||||
image: 'Travis CI.png',
|
||||
isAvailable: true,
|
||||
type: 'pat',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Azure Key Vault',
|
||||
slug: 'azure-key-vault',
|
||||
image: 'Microsoft Azure.png',
|
||||
isAvailable: false,
|
||||
type: 'oauth',
|
||||
clientId: CLIENT_ID_AZURE,
|
||||
tenantId: TENANT_ID_AZURE,
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Google Cloud Platform',
|
||||
slug: 'gcp',
|
||||
image: 'Google Cloud Platform.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
@ -161,23 +169,29 @@ const INTEGRATION_OPTIONS = [
|
||||
|
||||
export {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_OPTIONS
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
INTEGRATION_OPTIONS,
|
||||
};
|
||||
|
@ -1,24 +1,16 @@
|
||||
// membership roles
|
||||
const OWNER = 'owner';
|
||||
const ADMIN = 'admin';
|
||||
const MEMBER = 'member';
|
||||
const OWNER = "owner";
|
||||
const ADMIN = "admin";
|
||||
const MEMBER = "member";
|
||||
|
||||
// membership statuses
|
||||
const INVITED = 'invited';
|
||||
const INVITED = "invited";
|
||||
|
||||
// membership permissions ability
|
||||
const ABILITY_READ = 'read';
|
||||
const ABILITY_WRITE = 'write';
|
||||
const ABILITY_READ = "read";
|
||||
const ABILITY_WRITE = "write";
|
||||
|
||||
// -- organization
|
||||
const ACCEPTED = 'accepted';
|
||||
const ACCEPTED = "accepted";
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
ABILITY_READ,
|
||||
ABILITY_WRITE
|
||||
}
|
||||
export { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED, ABILITY_READ, ABILITY_WRITE };
|
||||
|
@ -1,7 +1,9 @@
|
||||
const SMTP_HOST_SENDGRID = 'smtp.sendgrid.net';
|
||||
const SMTP_HOST_MAILGUN = 'smtp.mailgun.org';
|
||||
const SMTP_HOST_SOCKETLABS = 'smtp.socketlabs.com';
|
||||
|
||||
export {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS
|
||||
}
|
11
backend/src/variables/token.ts
Normal file
11
backend/src/variables/token.ts
Normal file
@ -0,0 +1,11 @@
|
||||
const TOKEN_EMAIL_CONFIRMATION = 'emailConfirmation';
|
||||
const TOKEN_EMAIL_MFA = 'emailMfa';
|
||||
const TOKEN_EMAIL_ORG_INVITATION = 'organizationInvitation';
|
||||
const TOKEN_EMAIL_PASSWORD_RESET = 'passwordReset';
|
||||
|
||||
export {
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
}
|
5
backend/src/variables/user.ts
Normal file
5
backend/src/variables/user.ts
Normal file
@ -0,0 +1,5 @@
|
||||
const MFA_METHOD_EMAIL = 'email';
|
||||
|
||||
export {
|
||||
MFA_METHOD_EMAIL
|
||||
}
|
4
cli/docker/Dockerfile
Normal file
4
cli/docker/Dockerfile
Normal file
@ -0,0 +1,4 @@
|
||||
FROM alpine
|
||||
RUN apk add --no-cache tini
|
||||
COPY infisical /bin/infisical
|
||||
ENTRYPOINT ["/sbin/tini", "--", "/bin/infisical"]
|
@ -7,8 +7,8 @@ require (
|
||||
github.com/muesli/mango-cobra v1.2.0
|
||||
github.com/muesli/roff v0.1.0
|
||||
github.com/spf13/cobra v1.6.1
|
||||
golang.org/x/crypto v0.3.0
|
||||
golang.org/x/term v0.3.0
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d
|
||||
golang.org/x/term v0.5.0
|
||||
)
|
||||
|
||||
require (
|
||||
@ -31,8 +31,8 @@ require (
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.10.0 // indirect
|
||||
golang.org/x/net v0.2.0 // indirect
|
||||
golang.org/x/sys v0.3.0 // indirect
|
||||
golang.org/x/net v0.7.0 // indirect
|
||||
golang.org/x/sys v0.5.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
|
16
cli/go.sum
16
cli/go.sum
@ -103,13 +103,13 @@ github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgk
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
go.mongodb.org/mongo-driver v1.10.0 h1:UtV6N5k14upNp4LTduX0QCufG124fSu25Wz9tu94GLg=
|
||||
go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A=
|
||||
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
|
||||
golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw9KJexJuRLI9G7Hp5wE=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.2.0 h1:sZfSu1wtKLGlWI4ZZayP0ck9Y73K1ynO6gqzTdBVdPU=
|
||||
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||
golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@ -121,11 +121,11 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20210819135213-f52c844e1c1c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
|
||||
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
|
||||
golang.org/x/term v0.5.0 h1:n2a8QNdAb0sZNpU9R1ALUXBbY+w51fCQDN+7EdxNBsY=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
|
@ -114,6 +114,7 @@ func CallGetSecretsV2(httpClient *resty.Client, request GetEncryptedSecretsV2Req
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetQueryParam("environment", request.Environment).
|
||||
SetQueryParam("workspaceId", request.WorkspaceId).
|
||||
SetQueryParam("tagSlugs", request.TagSlugs).
|
||||
Get(fmt.Sprintf("%v/v2/secrets", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
@ -127,6 +128,68 @@ func CallGetSecretsV2(httpClient *resty.Client, request GetEncryptedSecretsV2Req
|
||||
return secretsResponse, nil
|
||||
}
|
||||
|
||||
func CallLogin1V2(httpClient *resty.Client, request GetLoginOneV2Request) (GetLoginOneV2Response, error) {
|
||||
var loginOneV2Response GetLoginOneV2Response
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&loginOneV2Response).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetBody(request).
|
||||
Post(fmt.Sprintf("%v/v2/auth/login1", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V2: Unable to complete api request [err=%s]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V2: Unsuccessful response: [response=%s]", response)
|
||||
}
|
||||
|
||||
return loginOneV2Response, nil
|
||||
}
|
||||
|
||||
func CallVerifyMfaToken(httpClient *resty.Client, request VerifyMfaTokenRequest) (*VerifyMfaTokenResponse, *VerifyMfaTokenErrorResponse, error) {
|
||||
var verifyMfaTokenResponse VerifyMfaTokenResponse
|
||||
var responseError VerifyMfaTokenErrorResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&verifyMfaTokenResponse).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetError(&responseError).
|
||||
SetBody(request).
|
||||
Post(fmt.Sprintf("%v/v2/auth/mfa/verify", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("CallVerifyMfaToken: Unable to complete api request [err=%s]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return nil, &responseError, nil
|
||||
}
|
||||
|
||||
return &verifyMfaTokenResponse, nil, nil
|
||||
}
|
||||
|
||||
func CallLogin2V2(httpClient *resty.Client, request GetLoginTwoV2Request) (GetLoginTwoV2Response, error) {
|
||||
var loginTwoV2Response GetLoginTwoV2Response
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&loginTwoV2Response).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetBody(request).
|
||||
Post(fmt.Sprintf("%v/v2/auth/login2", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V2: Unable to complete api request [err=%s]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V2: Unsuccessful response: [response=%s]", response)
|
||||
}
|
||||
|
||||
return loginTwoV2Response, nil
|
||||
}
|
||||
|
||||
func CallGetAllWorkSpacesUserBelongsTo(httpClient *resty.Client) (GetWorkSpacesResponse, error) {
|
||||
var workSpacesResponse GetWorkSpacesResponse
|
||||
response, err := httpClient.
|
||||
@ -154,15 +217,33 @@ func CallIsAuthenticated(httpClient *resty.Client) bool {
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
Post(fmt.Sprintf("%v/v1/auth/checkAuth", config.INFISICAL_URL))
|
||||
|
||||
log.Debugln(fmt.Errorf("CallIsAuthenticated: Unsuccessful response: [response=%v]", response))
|
||||
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
log.Debugln(fmt.Errorf("CallIsAuthenticated: Unsuccessful response: [response=%v]", response))
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func CallGetAccessibleEnvironments(httpClient *resty.Client, request GetAccessibleEnvironmentsRequest) (GetAccessibleEnvironmentsResponse, error) {
|
||||
var accessibleEnvironmentsResponse GetAccessibleEnvironmentsResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&accessibleEnvironmentsResponse).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
Get(fmt.Sprintf("%v/v2/workspace/%s/environments", config.INFISICAL_URL, request.WorkspaceId))
|
||||
|
||||
if err != nil {
|
||||
return GetAccessibleEnvironmentsResponse{}, err
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetAccessibleEnvironmentsResponse{}, fmt.Errorf("CallGetAccessibleEnvironments: Unsuccessful response: [response=%v]", response)
|
||||
}
|
||||
|
||||
return accessibleEnvironmentsResponse, nil
|
||||
}
|
||||
|
@ -197,6 +197,7 @@ type GetSecretsByWorkspaceIdAndEnvironmentRequest struct {
|
||||
type GetEncryptedSecretsV2Request struct {
|
||||
Environment string `json:"environment"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
TagSlugs string `json:"tagSlugs"`
|
||||
}
|
||||
|
||||
type GetEncryptedSecretsV2Response struct {
|
||||
@ -250,3 +251,75 @@ type GetServiceTokenDetailsResponse struct {
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
V int `json:"__v"`
|
||||
}
|
||||
|
||||
type GetAccessibleEnvironmentsRequest struct {
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
}
|
||||
|
||||
type GetAccessibleEnvironmentsResponse struct {
|
||||
AccessibleEnvironments []struct {
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
IsWriteDenied bool `json:"isWriteDenied"`
|
||||
} `json:"accessibleEnvironments"`
|
||||
}
|
||||
|
||||
type GetLoginOneV2Request struct {
|
||||
Email string `json:"email"`
|
||||
ClientPublicKey string `json:"clientPublicKey"`
|
||||
}
|
||||
|
||||
type GetLoginOneV2Response struct {
|
||||
ServerPublicKey string `json:"serverPublicKey"`
|
||||
Salt string `json:"salt"`
|
||||
}
|
||||
|
||||
type GetLoginTwoV2Request struct {
|
||||
Email string `json:"email"`
|
||||
ClientProof string `json:"clientProof"`
|
||||
}
|
||||
|
||||
type GetLoginTwoV2Response struct {
|
||||
MfaEnabled bool `json:"mfaEnabled"`
|
||||
EncryptionVersion int `json:"encryptionVersion"`
|
||||
Token string `json:"token"`
|
||||
PublicKey string `json:"publicKey"`
|
||||
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
|
||||
Iv string `json:"iv"`
|
||||
Tag string `json:"tag"`
|
||||
ProtectedKey string `json:"protectedKey"`
|
||||
ProtectedKeyIV string `json:"protectedKeyIV"`
|
||||
ProtectedKeyTag string `json:"protectedKeyTag"`
|
||||
}
|
||||
|
||||
type VerifyMfaTokenRequest struct {
|
||||
Email string `json:"email"`
|
||||
MFAToken string `json:"mfaToken"`
|
||||
}
|
||||
|
||||
type VerifyMfaTokenResponse struct {
|
||||
EncryptionVersion int `json:"encryptionVersion"`
|
||||
Token string `json:"token"`
|
||||
PublicKey string `json:"publicKey"`
|
||||
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
|
||||
Iv string `json:"iv"`
|
||||
Tag string `json:"tag"`
|
||||
ProtectedKey string `json:"protectedKey"`
|
||||
ProtectedKeyIV string `json:"protectedKeyIV"`
|
||||
ProtectedKeyTag string `json:"protectedKeyTag"`
|
||||
}
|
||||
|
||||
type VerifyMfaTokenErrorResponse struct {
|
||||
Type string `json:"type"`
|
||||
Message string `json:"message"`
|
||||
Context struct {
|
||||
Code string `json:"code"`
|
||||
TriesLeft int `json:"triesLeft"`
|
||||
} `json:"context"`
|
||||
Level int `json:"level"`
|
||||
LevelName string `json:"level_name"`
|
||||
StatusCode int `json:"status_code"`
|
||||
DatetimeIso time.Time `json:"datetime_iso"`
|
||||
Application string `json:"application"`
|
||||
Extra []interface{} `json:"extra"`
|
||||
}
|
||||
|
49
cli/packages/cmd/cmd_test.go
Normal file
49
cli/packages/cmd/cmd_test.go
Normal file
@ -0,0 +1,49 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
)
|
||||
|
||||
func TestFilterReservedEnvVars(t *testing.T) {
|
||||
|
||||
// some test env vars.
|
||||
// HOME and PATH are reserved key words and should be filtered out
|
||||
// XDG_SESSION_ID and LC_CTYPE are reserved key word prefixes and should be filtered out
|
||||
// The filter function only checks the keys of the env map, so we dont need to set any values
|
||||
env := map[string]models.SingleEnvironmentVariable{
|
||||
"test": {},
|
||||
"test2": {},
|
||||
"HOME": {},
|
||||
"PATH": {},
|
||||
"XDG_SESSION_ID": {},
|
||||
"LC_CTYPE": {},
|
||||
}
|
||||
|
||||
// check to see if there are any reserved key words in secrets to inject
|
||||
filterReservedEnvVars(env)
|
||||
|
||||
if len(env) != 2 {
|
||||
t.Errorf("Expected 2 secrets to be returned, got %d", len(env))
|
||||
}
|
||||
if _, ok := env["test"]; !ok {
|
||||
t.Errorf("Expected test to be returned")
|
||||
}
|
||||
if _, ok := env["test2"]; !ok {
|
||||
t.Errorf("Expected test2 to be returned")
|
||||
}
|
||||
if _, ok := env["HOME"]; ok {
|
||||
t.Errorf("Expected HOME to be filtered out")
|
||||
}
|
||||
if _, ok := env["PATH"]; ok {
|
||||
t.Errorf("Expected PATH to be filtered out")
|
||||
}
|
||||
if _, ok := env["XDG_SESSION_ID"]; ok {
|
||||
t.Errorf("Expected XDG_SESSION_ID to be filtered out")
|
||||
}
|
||||
if _, ok := env["LC_CTYPE"]; ok {
|
||||
t.Errorf("Expected LC_CTYPE to be filtered out")
|
||||
}
|
||||
|
||||
}
|
@ -36,9 +36,12 @@ var exportCmd = &cobra.Command{
|
||||
// util.RequireLocalWorkspaceFile()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
envName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
|
||||
@ -61,7 +64,12 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: envName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to fetch secrets")
|
||||
}
|
||||
@ -97,6 +105,7 @@ func init() {
|
||||
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
|
||||
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
}
|
||||
|
||||
// Format according to the format flag
|
||||
|
@ -56,7 +56,7 @@ var initCmd = &cobra.Command{
|
||||
workspaces := workspaceResponse.Workspaces
|
||||
if len(workspaces) == 0 {
|
||||
message := fmt.Sprintf("You don't have any projects created in Infisical. You must first create a project at %s", util.INFISICAL_TOKEN_NAME)
|
||||
util.PrintMessageAndExit(message)
|
||||
util.PrintErrorMessageAndExit(message)
|
||||
}
|
||||
|
||||
var workspaceNames []string
|
||||
@ -91,7 +91,7 @@ func writeWorkspaceFile(selectedWorkspace models.Workspace) error {
|
||||
WorkspaceId: selectedWorkspace.ID,
|
||||
}
|
||||
|
||||
marshalledWorkspaceFile, err := json.Marshal(workspaceFileToSave)
|
||||
marshalledWorkspaceFile, err := json.MarshalIndent(workspaceFileToSave, "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
"regexp"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/config"
|
||||
"github.com/Infisical/infisical-merge/packages/crypto"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/srp"
|
||||
@ -23,8 +22,17 @@ import (
|
||||
"github.com/manifoldco/promptui"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/crypto/argon2"
|
||||
)
|
||||
|
||||
type params struct {
|
||||
memory uint32
|
||||
iterations uint32
|
||||
parallelism uint8
|
||||
saltLength uint32
|
||||
keyLength uint32
|
||||
}
|
||||
|
||||
// loginCmd represents the login command
|
||||
var loginCmd = &cobra.Command{
|
||||
Use: "login",
|
||||
@ -55,36 +63,158 @@ var loginCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse email and password for authentication")
|
||||
}
|
||||
|
||||
userCredentials, err := getFreshUserCredentials(email, password)
|
||||
loginOneResponse, loginTwoResponse, err := getFreshUserCredentials(email, password)
|
||||
if err != nil {
|
||||
log.Infoln("Unable to authenticate with the provided credentials, please try again")
|
||||
log.Debugln(err)
|
||||
return
|
||||
}
|
||||
|
||||
encryptedPrivateKey, _ := base64.StdEncoding.DecodeString(userCredentials.EncryptedPrivateKey)
|
||||
tag, err := base64.StdEncoding.DecodeString(userCredentials.Tag)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
if loginTwoResponse.MfaEnabled {
|
||||
i := 1
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode()
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(loginTwoResponse.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
})
|
||||
|
||||
if requestError != nil {
|
||||
util.HandleError(err)
|
||||
break
|
||||
} else if mfaErrorResponse != nil {
|
||||
if mfaErrorResponse.Context.Code == "mfa_invalid" {
|
||||
msg := fmt.Sprintf("Incorrect, verification code. You have %v attempts left", 5-i)
|
||||
fmt.Println(msg)
|
||||
if i == 5 {
|
||||
util.PrintErrorMessageAndExit("No tries left, please try again in a bit")
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if mfaErrorResponse.Context.Code == "mfa_expired" {
|
||||
util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again")
|
||||
break
|
||||
}
|
||||
i++
|
||||
} else {
|
||||
loginTwoResponse.EncryptedPrivateKey = verifyMFAresponse.EncryptedPrivateKey
|
||||
loginTwoResponse.EncryptionVersion = verifyMFAresponse.EncryptionVersion
|
||||
loginTwoResponse.Iv = verifyMFAresponse.Iv
|
||||
loginTwoResponse.ProtectedKey = verifyMFAresponse.ProtectedKey
|
||||
loginTwoResponse.ProtectedKeyIV = verifyMFAresponse.ProtectedKeyIV
|
||||
loginTwoResponse.ProtectedKeyTag = verifyMFAresponse.ProtectedKeyTag
|
||||
loginTwoResponse.PublicKey = verifyMFAresponse.PublicKey
|
||||
loginTwoResponse.Tag = verifyMFAresponse.Tag
|
||||
loginTwoResponse.Token = verifyMFAresponse.Token
|
||||
loginTwoResponse.EncryptionVersion = verifyMFAresponse.EncryptionVersion
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IV, err := base64.StdEncoding.DecodeString(userCredentials.IV)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
var decryptedPrivateKey []byte
|
||||
|
||||
if loginTwoResponse.EncryptionVersion == 1 {
|
||||
log.Debug("Login version 1")
|
||||
encryptedPrivateKey, _ := base64.StdEncoding.DecodeString(loginTwoResponse.EncryptedPrivateKey)
|
||||
tag, err := base64.StdEncoding.DecodeString(loginTwoResponse.Tag)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
IV, err := base64.StdEncoding.DecodeString(loginTwoResponse.Iv)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
paddedPassword := fmt.Sprintf("%032s", password)
|
||||
key := []byte(paddedPassword)
|
||||
|
||||
computedDecryptedPrivateKey, err := crypto.DecryptSymmetric(key, encryptedPrivateKey, tag, IV)
|
||||
if err != nil || len(computedDecryptedPrivateKey) == 0 {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
decryptedPrivateKey = computedDecryptedPrivateKey
|
||||
|
||||
} else if loginTwoResponse.EncryptionVersion == 2 {
|
||||
log.Debug("Login version 2")
|
||||
protectedKey, err := base64.StdEncoding.DecodeString(loginTwoResponse.ProtectedKey)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
protectedKeyTag, err := base64.StdEncoding.DecodeString(loginTwoResponse.ProtectedKeyTag)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
protectedKeyIV, err := base64.StdEncoding.DecodeString(loginTwoResponse.ProtectedKeyIV)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
nonProtectedTag, err := base64.StdEncoding.DecodeString(loginTwoResponse.Tag)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
nonProtectedIv, err := base64.StdEncoding.DecodeString(loginTwoResponse.Iv)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
parameters := ¶ms{
|
||||
memory: 64 * 1024,
|
||||
iterations: 3,
|
||||
parallelism: 1,
|
||||
keyLength: 32,
|
||||
}
|
||||
|
||||
derivedKey, err := generateFromPassword(password, []byte(loginOneResponse.Salt), parameters)
|
||||
if err != nil {
|
||||
util.HandleError(fmt.Errorf("unable to generate argon hash from password [err=%s]", err))
|
||||
}
|
||||
|
||||
decryptedProtectedKey, err := crypto.DecryptSymmetric(derivedKey, protectedKey, protectedKeyTag, protectedKeyIV)
|
||||
if err != nil {
|
||||
util.HandleError(fmt.Errorf("unable to get decrypted protected key [err=%s]", err))
|
||||
}
|
||||
|
||||
encryptedPrivateKey, err := base64.StdEncoding.DecodeString(loginTwoResponse.EncryptedPrivateKey)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
decryptedProtectedKeyInHex, err := hex.DecodeString(string(decryptedProtectedKey))
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
computedDecryptedPrivateKey, err := crypto.DecryptSymmetric(decryptedProtectedKeyInHex, encryptedPrivateKey, nonProtectedTag, nonProtectedIv)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
decryptedPrivateKey = computedDecryptedPrivateKey
|
||||
} else {
|
||||
util.PrintErrorMessageAndExit("Insufficient details to decrypt private key")
|
||||
}
|
||||
|
||||
paddedPassword := fmt.Sprintf("%032s", password)
|
||||
key := []byte(paddedPassword)
|
||||
|
||||
decryptedPrivateKey, err := crypto.DecryptSymmetric(key, encryptedPrivateKey, tag, IV)
|
||||
if err != nil || len(decryptedPrivateKey) == 0 {
|
||||
util.HandleError(err)
|
||||
if string(decryptedPrivateKey) == "" || email == "" || loginTwoResponse.Token == "" {
|
||||
log.Debugf("[decryptedPrivateKey=%s] [email=%s] [loginTwoResponse.Token=%s]", string(decryptedPrivateKey), email, loginTwoResponse.Token)
|
||||
util.PrintErrorMessageAndExit("We were unable to fetch required details to complete your login. Run with -d to see more info")
|
||||
}
|
||||
|
||||
userCredentialsToBeStored := &models.UserCredentials{
|
||||
Email: email,
|
||||
PrivateKey: string(decryptedPrivateKey),
|
||||
JTWToken: userCredentials.JTWToken,
|
||||
JTWToken: loginTwoResponse.Token,
|
||||
}
|
||||
|
||||
err = util.StoreUserCredsInKeyRing(userCredentialsToBeStored)
|
||||
@ -104,7 +234,13 @@ var loginCmd = &cobra.Command{
|
||||
// clear backed up secrets from prev account
|
||||
util.DeleteBackupSecrets()
|
||||
|
||||
color.Green("Nice! You are logged in as: %v", email)
|
||||
whilte := color.New(color.FgWhite)
|
||||
boldWhite := whilte.Add(color.Bold)
|
||||
boldWhite.Printf(">>>> Welcome to Infisical!")
|
||||
color.Green(" You are now logged in as %v <<<<", email)
|
||||
boldWhite.Println("\nQuick links")
|
||||
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
|
||||
fmt.Println("- Stuck? Join our slack for quick support https://tinyurl.com/infisical-slack")
|
||||
|
||||
},
|
||||
}
|
||||
@ -155,7 +291,7 @@ func askForLoginCredentials() (email string, password string, err error) {
|
||||
return userEmail, userPassword, nil
|
||||
}
|
||||
|
||||
func getFreshUserCredentials(email string, password string) (*api.LoginTwoResponse, error) {
|
||||
func getFreshUserCredentials(email string, password string) (*api.GetLoginOneV2Response, *api.GetLoginTwoV2Response, error) {
|
||||
log.Debugln("getFreshUserCredentials:", "email", email, "password", password)
|
||||
httpClient := resty.New()
|
||||
httpClient.SetRetryCount(5)
|
||||
@ -166,36 +302,24 @@ func getFreshUserCredentials(email string, password string) (*api.LoginTwoRespon
|
||||
srpA := hex.EncodeToString(srpClient.ComputeA())
|
||||
|
||||
// ** Login one
|
||||
loginOneRequest := api.LoginOneRequest{
|
||||
loginOneResponseResult, err := api.CallLogin1V2(httpClient, api.GetLoginOneV2Request{
|
||||
Email: email,
|
||||
ClientPublicKey: srpA,
|
||||
}
|
||||
|
||||
var loginOneResponseResult api.LoginOneResponse
|
||||
|
||||
loginOneResponse, err := httpClient.
|
||||
R().
|
||||
SetBody(loginOneRequest).
|
||||
SetResult(&loginOneResponseResult).
|
||||
Post(fmt.Sprintf("%v/v1/auth/login1", config.INFISICAL_URL))
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if loginOneResponse.StatusCode() > 299 {
|
||||
return nil, fmt.Errorf("ops, unsuccessful response code. [response=%v]", loginOneResponse)
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
// **** Login 2
|
||||
serverPublicKey_bytearray, err := hex.DecodeString(loginOneResponseResult.ServerPublicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
userSalt, err := hex.DecodeString(loginOneResponseResult.ServerSalt)
|
||||
userSalt, err := hex.DecodeString(loginOneResponseResult.Salt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
srpClient.SetSalt(userSalt, []byte(email), []byte(password))
|
||||
@ -203,27 +327,16 @@ func getFreshUserCredentials(email string, password string) (*api.LoginTwoRespon
|
||||
|
||||
srpM1 := srpClient.ComputeM1()
|
||||
|
||||
LoginTwoRequest := api.LoginTwoRequest{
|
||||
loginTwoResponseResult, err := api.CallLogin2V2(httpClient, api.GetLoginTwoV2Request{
|
||||
Email: email,
|
||||
ClientProof: hex.EncodeToString(srpM1),
|
||||
}
|
||||
|
||||
var loginTwoResponseResult api.LoginTwoResponse
|
||||
loginTwoResponse, err := httpClient.
|
||||
R().
|
||||
SetBody(LoginTwoRequest).
|
||||
SetResult(&loginTwoResponseResult).
|
||||
Post(fmt.Sprintf("%v/v1/auth/login2", config.INFISICAL_URL))
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if loginTwoResponse.StatusCode() > 299 {
|
||||
return nil, fmt.Errorf("ops, unsuccessful response code. [response=%v]", loginTwoResponse)
|
||||
}
|
||||
|
||||
return &loginTwoResponseResult, nil
|
||||
return &loginOneResponseResult, &loginTwoResponseResult, nil
|
||||
}
|
||||
|
||||
func shouldOverrideLoginPrompt(currentLoggedInUserEmail string) (bool, error) {
|
||||
@ -237,3 +350,21 @@ func shouldOverrideLoginPrompt(currentLoggedInUserEmail string) (bool, error) {
|
||||
}
|
||||
return result == "Yes", err
|
||||
}
|
||||
|
||||
func generateFromPassword(password string, salt []byte, p *params) (hash []byte, err error) {
|
||||
hash = argon2.IDKey([]byte(password), salt, p.iterations, p.memory, p.parallelism, p.keyLength)
|
||||
return hash, nil
|
||||
}
|
||||
|
||||
func askForMFACode() string {
|
||||
mfaCodePromptUI := promptui.Prompt{
|
||||
Label: "Enter the 2FA verification code sent to your email",
|
||||
}
|
||||
|
||||
mfaVerifyCode, err := mfaCodePromptUI.Run()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
return mfaVerifyCode
|
||||
}
|
||||
|
45
cli/packages/cmd/reset.go
Normal file
45
cli/packages/cmd/reset.go
Normal file
@ -0,0 +1,45 @@
|
||||
/*
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var resetCmd = &cobra.Command{
|
||||
Use: "reset",
|
||||
Short: "Used delete all Infisical related data on your machine",
|
||||
DisableFlagsInUseLine: true,
|
||||
Example: "infisical reset",
|
||||
Args: cobra.NoArgs,
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
toggleDebug(cmd, args)
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
// delete config
|
||||
_, pathToDir, err := util.GetFullConfigFilePath()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
os.RemoveAll(pathToDir)
|
||||
|
||||
// delete keyring
|
||||
keyringInstance, err := util.GetKeyRing()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
keyringInstance.Remove(util.KEYRING_SERVICE_NAME)
|
||||
|
||||
util.PrintSuccessMessage("Reset successful")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(resetCmd)
|
||||
}
|
@ -54,9 +54,12 @@ var runCmd = &cobra.Command{
|
||||
return nil
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
envName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
@ -64,10 +67,6 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
// if !util.IsSecretEnvironmentValid(envName) {
|
||||
// util.PrintMessageAndExit("Invalid environment name passed. Environment names can only be prod, dev, test or staging")
|
||||
// }
|
||||
|
||||
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@ -78,7 +77,12 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: envName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
|
||||
@ -106,13 +110,7 @@ var runCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
// check to see if there are any reserved key words in secrets to inject
|
||||
reservedEnvironmentVariables := []string{"HOME", "PATH", "PS1", "PS2"}
|
||||
for _, reservedEnvName := range reservedEnvironmentVariables {
|
||||
if _, ok := secretsByKey[reservedEnvName]; ok {
|
||||
delete(secretsByKey, reservedEnvName)
|
||||
util.PrintWarning(fmt.Sprintf("Infisical secret named [%v] has been removed because it is a reserved secret name", reservedEnvName))
|
||||
}
|
||||
}
|
||||
filterReservedEnvVars(secretsByKey)
|
||||
|
||||
// now add infisical secrets
|
||||
for k, v := range secretsByKey {
|
||||
@ -145,6 +143,37 @@ var runCmd = &cobra.Command{
|
||||
},
|
||||
}
|
||||
|
||||
var (
|
||||
reservedEnvVars = []string{
|
||||
"HOME", "PATH", "PS1", "PS2",
|
||||
"PWD", "EDITOR", "XAUTHORITY", "USER",
|
||||
"TERM", "TERMINFO", "SHELL", "MAIL",
|
||||
}
|
||||
|
||||
reservedEnvVarPrefixes = []string{
|
||||
"XDG_",
|
||||
"LC_",
|
||||
}
|
||||
)
|
||||
|
||||
func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
|
||||
for _, reservedEnvName := range reservedEnvVars {
|
||||
if _, ok := env[reservedEnvName]; ok {
|
||||
delete(env, reservedEnvName)
|
||||
util.PrintWarning(fmt.Sprintf("Infisical secret named [%v] has been removed because it is a reserved secret name", reservedEnvName))
|
||||
}
|
||||
}
|
||||
|
||||
for _, reservedEnvPrefix := range reservedEnvVarPrefixes {
|
||||
for envName := range env {
|
||||
if strings.HasPrefix(envName, reservedEnvPrefix) {
|
||||
delete(env, envName)
|
||||
util.PrintWarning(fmt.Sprintf("Infisical secret named [%v] has been removed because it contains a reserved prefix", envName))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(runCmd)
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
@ -152,15 +181,19 @@ func init() {
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")")
|
||||
runCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs ")
|
||||
}
|
||||
|
||||
// Will execute a single command and pass in the given secrets into the process
|
||||
func executeSingleCommandWithEnvs(args []string, secretsCount int, env []string) error {
|
||||
command := args[0]
|
||||
argsForCommand := args[1:]
|
||||
shell := subShellCmd()
|
||||
color.Green("Injecting %v Infisical secrets into your application process", secretsCount)
|
||||
|
||||
cmd := exec.Command(command, argsForCommand...)
|
||||
args = append(args[:1], args[0:]...) // shift args to the right
|
||||
args[0] = shell[1]
|
||||
|
||||
cmd := exec.Command(shell[0], args...)
|
||||
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
@ -170,15 +203,7 @@ func executeSingleCommandWithEnvs(args []string, secretsCount int, env []string)
|
||||
}
|
||||
|
||||
func executeMultipleCommandWithEnvs(fullCommand string, secretsCount int, env []string) error {
|
||||
shell := [2]string{"sh", "-c"}
|
||||
if runtime.GOOS == "windows" {
|
||||
shell = [2]string{"cmd", "/C"}
|
||||
} else {
|
||||
currentShell := os.Getenv("SHELL")
|
||||
if currentShell != "" {
|
||||
shell[0] = currentShell
|
||||
}
|
||||
}
|
||||
shell := subShellCmd()
|
||||
|
||||
cmd := exec.Command(shell[0], shell[1], fullCommand)
|
||||
cmd.Stdin = os.Stdin
|
||||
@ -192,6 +217,23 @@ func executeMultipleCommandWithEnvs(fullCommand string, secretsCount int, env []
|
||||
return execCmd(cmd)
|
||||
}
|
||||
|
||||
func subShellCmd() [2]string {
|
||||
// default to sh -c
|
||||
shell := [...]string{"sh", "-c"}
|
||||
|
||||
currentShell := os.Getenv("SHELL")
|
||||
if currentShell != "" {
|
||||
shell[0] = currentShell
|
||||
} else if runtime.GOOS == "windows" {
|
||||
// if the SHELL env var is not set and we're on Windows, use cmd.exe
|
||||
// The SHELL var should always be checked first, in case the user executes
|
||||
// infisical from something like Git Bash.
|
||||
return [...]string{"cmd", "/C"}
|
||||
}
|
||||
|
||||
return shell
|
||||
}
|
||||
|
||||
// Credit: inspired by AWS Valut
|
||||
func execCmd(cmd *exec.Cmd) error {
|
||||
sigChannel := make(chan os.Signal, 1)
|
||||
|
@ -19,7 +19,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/Infisical/infisical-merge/packages/visualize"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -31,9 +30,12 @@ var secretsCmd = &cobra.Command{
|
||||
PreRun: toggleDebug,
|
||||
Args: cobra.NoArgs,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
@ -46,7 +48,12 @@ var secretsCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
@ -87,13 +94,14 @@ var secretsSetCmd = &cobra.Command{
|
||||
PreRun: toggleDebug,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
if !util.IsSecretEnvironmentValid(environmentName) {
|
||||
util.PrintMessageAndExit("You have entered a invalid environment name", "Environment names can only be prod, dev, test or staging")
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
@ -148,11 +156,11 @@ var secretsSetCmd = &cobra.Command{
|
||||
for _, arg := range args {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
util.PrintMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
util.PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
util.PrintMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
util.PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
@ -267,11 +275,12 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
PreRun: toggleDebug,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
log.Errorln("Unable to parse the environment name flag")
|
||||
log.Debugln(err)
|
||||
return
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
@ -303,7 +312,7 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
|
||||
if len(invalidSecretNamesThatDoNotExist) != 0 {
|
||||
message := fmt.Sprintf("secret name(s) [%v] does not exist in your project. To see which secrets exist run [infisical secrets]", strings.Join(invalidSecretNamesThatDoNotExist, ", "))
|
||||
util.PrintMessageAndExit(message)
|
||||
util.PrintErrorMessageAndExit(message)
|
||||
}
|
||||
|
||||
request := api.BatchDeleteSecretsBySecretIdsRequest{
|
||||
@ -327,14 +336,12 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
workspaceFileExists := util.WorkspaceConfigFileExistsInCurrentPath()
|
||||
if !workspaceFileExists {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
@ -342,17 +349,19 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
}
|
||||
|
||||
requestedSecrets := []models.SingleEnvironmentVariable{}
|
||||
|
||||
secretsMap := make(map[string]models.SingleEnvironmentVariable)
|
||||
for _, secret := range secrets {
|
||||
secretsMap[secret.Key] = secret
|
||||
}
|
||||
secretsMap := getSecretsByKeys(secrets)
|
||||
|
||||
for _, secretKeyFromArg := range args {
|
||||
if value, ok := secretsMap[strings.ToUpper(secretKeyFromArg)]; ok {
|
||||
@ -370,14 +379,12 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
workspaceFileExists := util.WorkspaceConfigFileExistsInCurrentPath()
|
||||
if !workspaceFileExists {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
@ -385,12 +392,22 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
}
|
||||
|
||||
tagsHashToSecretKey := make(map[string]int)
|
||||
slugsToFilerBy := make(map[string]int)
|
||||
|
||||
for _, slug := range strings.Split(tagSlugs, ",") {
|
||||
slugsToFilerBy[slug] = 1
|
||||
}
|
||||
|
||||
type TagsAndSecrets struct {
|
||||
Secrets []models.SingleEnvironmentVariable
|
||||
@ -407,6 +424,25 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
return len(secrets[i].Tags) > len(secrets[j].Tags)
|
||||
})
|
||||
|
||||
for i, secret := range secrets {
|
||||
filteredTag := []struct {
|
||||
ID string "json:\"_id\""
|
||||
Name string "json:\"name\""
|
||||
Slug string "json:\"slug\""
|
||||
Workspace string "json:\"workspace\""
|
||||
}{}
|
||||
|
||||
for _, secretTag := range secret.Tags {
|
||||
_, exists := slugsToFilerBy[secretTag.Slug]
|
||||
if !exists {
|
||||
filteredTag = append(filteredTag, secretTag)
|
||||
}
|
||||
}
|
||||
|
||||
secret.Tags = filteredTag
|
||||
secrets[i] = secret
|
||||
}
|
||||
|
||||
for _, secret := range secrets {
|
||||
listOfTagSlugs := []string{}
|
||||
|
||||
@ -470,6 +506,8 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
return len(listOfsecretDetails[i].Tags) < len(listOfsecretDetails[j].Tags)
|
||||
})
|
||||
|
||||
tableOfContents := []string{}
|
||||
fullyGeneratedDocuments := []string{}
|
||||
for _, secretDetails := range listOfsecretDetails {
|
||||
listOfKeyValue := []string{}
|
||||
|
||||
@ -510,11 +548,22 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
heading := CenterString(strings.Join(listOfTagNames, " & "), 80)
|
||||
|
||||
if len(listOfTagNames) == 0 {
|
||||
fmt.Printf("\n%s \n", strings.Join(listOfKeyValue, "\n \n"))
|
||||
fullyGeneratedDocuments = append(fullyGeneratedDocuments, fmt.Sprintf("\n%s \n", strings.Join(listOfKeyValue, "\n")))
|
||||
} else {
|
||||
fmt.Printf("\n\n\n%s\n \n%s \n", heading, strings.Join(listOfKeyValue, "\n \n"))
|
||||
fullyGeneratedDocuments = append(fullyGeneratedDocuments, fmt.Sprintf("\n\n\n%s \n%s \n", heading, strings.Join(listOfKeyValue, "\n")))
|
||||
tableOfContents = append(tableOfContents, strings.ToUpper(strings.Join(listOfTagNames, " & ")))
|
||||
}
|
||||
}
|
||||
|
||||
dashedList := []string{}
|
||||
for _, item := range tableOfContents {
|
||||
dashedList = append(dashedList, fmt.Sprintf("# - %s \n", item))
|
||||
}
|
||||
if len(dashedList) > 0 {
|
||||
fmt.Println(CenterString("TABLE OF CONTENTS", 80))
|
||||
fmt.Println(strings.Join(dashedList, ""))
|
||||
}
|
||||
fmt.Println(strings.Join(fullyGeneratedDocuments, ""))
|
||||
}
|
||||
|
||||
func CenterString(s string, numStars int) string {
|
||||
@ -535,7 +584,7 @@ func addHash(input string) string {
|
||||
}
|
||||
|
||||
func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]models.SingleEnvironmentVariable {
|
||||
secretMapByName := make(map[string]models.SingleEnvironmentVariable)
|
||||
secretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
|
||||
for _, secret := range secrets {
|
||||
secretMapByName[secret.Key] = secret
|
||||
@ -567,5 +616,6 @@ func init() {
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsCmd.PersistentFlags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
rootCmd.AddCommand(secretsCmd)
|
||||
}
|
||||
|
@ -39,7 +39,9 @@ type Workspace struct {
|
||||
}
|
||||
|
||||
type WorkspaceConfigFile struct {
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
DefaultEnvironment string `json:"defaultEnvironment"`
|
||||
GitBranchToEnvironmentMapping map[string]string `json:"gitBranchToEnvironmentMapping"`
|
||||
}
|
||||
|
||||
type SymmetricEncryptionResult struct {
|
||||
@ -49,6 +51,8 @@ type SymmetricEncryptionResult struct {
|
||||
}
|
||||
|
||||
type GetAllSecretsParameters struct {
|
||||
Environment string
|
||||
InfisicalToken string
|
||||
Environment string
|
||||
EnvironmentPassedViaFlag bool
|
||||
InfisicalToken string
|
||||
TagSlugs string
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
log "github.com/sirupsen/logrus"
|
||||
@ -73,7 +74,12 @@ func WorkspaceConfigFileExistsInCurrentPath() bool {
|
||||
}
|
||||
|
||||
func GetWorkSpaceFromFile() (models.WorkspaceConfigFile, error) {
|
||||
configFileAsBytes, err := os.ReadFile(INFISICAL_WORKSPACE_CONFIG_FILE_NAME)
|
||||
cfgFile, err := FindWorkspaceConfigFile()
|
||||
if err != nil {
|
||||
return models.WorkspaceConfigFile{}, err
|
||||
}
|
||||
|
||||
configFileAsBytes, err := os.ReadFile(cfgFile)
|
||||
if err != nil {
|
||||
return models.WorkspaceConfigFile{}, err
|
||||
}
|
||||
@ -87,6 +93,37 @@ func GetWorkSpaceFromFile() (models.WorkspaceConfigFile, error) {
|
||||
return workspaceConfigFile, nil
|
||||
}
|
||||
|
||||
// FindWorkspaceConfigFile searches for a .infisical.json file in the current directory and all parent directories.
|
||||
func FindWorkspaceConfigFile() (string, error) {
|
||||
dir, err := os.Getwd()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for {
|
||||
path := filepath.Join(dir, INFISICAL_WORKSPACE_CONFIG_FILE_NAME)
|
||||
_, err := os.Stat(path)
|
||||
if err == nil {
|
||||
// file found
|
||||
log.Debugf("FindWorkspaceConfigFile: workspace file found at [path=%s]", path)
|
||||
|
||||
return path, nil
|
||||
}
|
||||
|
||||
// check if we have reached the root directory
|
||||
if dir == filepath.Dir(dir) {
|
||||
break
|
||||
}
|
||||
|
||||
// move up one directory
|
||||
dir = filepath.Dir(dir)
|
||||
}
|
||||
|
||||
// file not found
|
||||
return "", fmt.Errorf("file not found: %s", INFISICAL_WORKSPACE_CONFIG_FILE_NAME)
|
||||
|
||||
}
|
||||
|
||||
func GetFullConfigFilePath() (fullPathToFile string, fullPathToDirectory string, err error) {
|
||||
homeDir, err := GetHomeDir()
|
||||
if err != nil {
|
||||
|
@ -1,10 +1,14 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type DecodedSymmetricEncryptionDetails = struct {
|
||||
@ -65,29 +69,29 @@ func RequireLogin() {
|
||||
}
|
||||
|
||||
if !currentUserDetails.IsUserLoggedIn {
|
||||
PrintMessageAndExit("You must be logged in to run this command. To login, run [infisical login]")
|
||||
PrintErrorMessageAndExit("You must be logged in to run this command. To login, run [infisical login]")
|
||||
}
|
||||
|
||||
if currentUserDetails.LoginExpired {
|
||||
PrintMessageAndExit("Your login expired, please login in again. To login, run [infisical login]")
|
||||
PrintErrorMessageAndExit("Your login expired, please login in again. To login, run [infisical login]")
|
||||
}
|
||||
|
||||
if currentUserDetails.UserCredentials.Email == "" && currentUserDetails.UserCredentials.JTWToken == "" && currentUserDetails.UserCredentials.PrivateKey == "" {
|
||||
PrintMessageAndExit("One or more of your login details is empty. Please try logging in again via by running [infisical login]")
|
||||
PrintErrorMessageAndExit("One or more of your login details is empty. Please try logging in again via by running [infisical login]")
|
||||
}
|
||||
}
|
||||
|
||||
func RequireServiceToken() {
|
||||
serviceToken := os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
if serviceToken == "" {
|
||||
PrintMessageAndExit("No service token is found in your terminal")
|
||||
PrintErrorMessageAndExit("No service token is found in your terminal")
|
||||
}
|
||||
}
|
||||
|
||||
func RequireLocalWorkspaceFile() {
|
||||
workspaceFileExists := WorkspaceConfigFileExistsInCurrentPath()
|
||||
if !workspaceFileExists {
|
||||
PrintMessageAndExit("It looks you have not yet connected this project to Infisical", "To do so, run [infisical init] then run your command again")
|
||||
workspaceFilePath, _ := FindWorkspaceConfigFile()
|
||||
if workspaceFilePath == "" {
|
||||
PrintErrorMessageAndExit("It looks you have not yet connected this project to Infisical", "To do so, run [infisical init] then run your command again")
|
||||
}
|
||||
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
@ -96,7 +100,7 @@ func RequireLocalWorkspaceFile() {
|
||||
}
|
||||
|
||||
if workspaceFile.WorkspaceId == "" {
|
||||
PrintMessageAndExit("Your project id is missing in your local config file. Please add it or run again [infisical init]")
|
||||
PrintErrorMessageAndExit("Your project id is missing in your local config file. Please add it or run again [infisical init]")
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,3 +114,26 @@ func GetHashFromStringList(list []string) string {
|
||||
sum := sha256.Sum256(hash.Sum(nil))
|
||||
return fmt.Sprintf("%x", sum)
|
||||
}
|
||||
|
||||
// execCmd is a struct that holds the command and arguments to be executed.
|
||||
// By using this struct, we can easily mock the command and arguments.
|
||||
type execCmd struct {
|
||||
cmd string
|
||||
args []string
|
||||
}
|
||||
|
||||
var getCurrentBranchCmd = execCmd{
|
||||
cmd: "git",
|
||||
args: []string{"symbolic-ref", "--short", "HEAD"},
|
||||
}
|
||||
|
||||
func getCurrentBranch() (string, error) {
|
||||
cmd := exec.Command(getCurrentBranchCmd.cmd, getCurrentBranchCmd.args...)
|
||||
var out bytes.Buffer
|
||||
cmd.Stdout = &out
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return path.Base(strings.TrimSpace(out.String())), nil
|
||||
}
|
||||
|
@ -27,10 +27,14 @@ func PrintWarning(message string) {
|
||||
color.New(color.FgYellow).Fprintf(os.Stderr, "Warning: %v \n", message)
|
||||
}
|
||||
|
||||
func PrintMessageAndExit(messages ...string) {
|
||||
func PrintSuccessMessage(message string) {
|
||||
color.New(color.FgGreen).Println(message)
|
||||
}
|
||||
|
||||
func PrintErrorMessageAndExit(messages ...string) {
|
||||
if len(messages) > 0 {
|
||||
for _, message := range messages {
|
||||
fmt.Println(message)
|
||||
fmt.Fprintln(os.Stderr, message)
|
||||
}
|
||||
}
|
||||
|
||||
@ -38,5 +42,5 @@ func PrintMessageAndExit(messages ...string) {
|
||||
}
|
||||
|
||||
func printError(e error) {
|
||||
color.Red("Hmm, we ran into an error: %v", e)
|
||||
color.New(color.FgRed).Fprintf(os.Stderr, "Hmm, we ran into an error: %v", e)
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string) ([]models.Singl
|
||||
return plainTextSecrets, nil
|
||||
}
|
||||
|
||||
func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string) ([]models.SingleEnvironmentVariable, error) {
|
||||
func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string, tagSlugs string) ([]models.SingleEnvironmentVariable, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
@ -85,6 +85,7 @@ func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, work
|
||||
encryptedSecrets, err := api.CallGetSecretsV2(httpClient, api.GetEncryptedSecretsV2Request{
|
||||
WorkspaceId: workspaceId,
|
||||
Environment: environmentName,
|
||||
TagSlugs: tagSlugs,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@ -130,7 +131,13 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models
|
||||
return nil, err
|
||||
}
|
||||
|
||||
secretsToReturn, errorToReturn = GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, workspaceFile.WorkspaceId, params.Environment)
|
||||
// Verify environment
|
||||
err = ValidateEnvironmentName(params.Environment, workspaceFile.WorkspaceId, loggedInUserDetails.UserCredentials)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to validate environment name because [err=%s]", err)
|
||||
}
|
||||
|
||||
secretsToReturn, errorToReturn = GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, workspaceFile.WorkspaceId, params.Environment, params.TagSlugs)
|
||||
log.Debugf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", errorToReturn)
|
||||
|
||||
backupSecretsEncryptionKey := []byte(loggedInUserDetails.UserCredentials.PrivateKey)[0:32]
|
||||
@ -156,6 +163,33 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models
|
||||
return secretsToReturn, errorToReturn
|
||||
}
|
||||
|
||||
func ValidateEnvironmentName(environmentName string, workspaceId string, userLoggedInDetails models.UserCredentials) error {
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(userLoggedInDetails.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
response, err := api.CallGetAccessibleEnvironments(httpClient, api.GetAccessibleEnvironmentsRequest{WorkspaceId: workspaceId})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
listOfEnvSlugs := []string{}
|
||||
mapOfEnvSlugs := make(map[string]interface{})
|
||||
|
||||
for _, environment := range response.AccessibleEnvironments {
|
||||
listOfEnvSlugs = append(listOfEnvSlugs, environment.Slug)
|
||||
mapOfEnvSlugs[environment.Slug] = environment
|
||||
}
|
||||
|
||||
_, exists := mapOfEnvSlugs[environmentName]
|
||||
if !exists {
|
||||
HandleError(fmt.Errorf("the environment [%s] does not exist in project with [id=%s]. Only [%s] are available", environmentName, workspaceId, strings.Join(listOfEnvSlugs, ",")))
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func getExpandedEnvVariable(secrets []models.SingleEnvironmentVariable, variableWeAreLookingFor string, hashMapOfCompleteVariables map[string]string, hashMapOfSelfRefs map[string]string) string {
|
||||
if value, found := hashMapOfCompleteVariables[variableWeAreLookingFor]; found {
|
||||
return value
|
||||
@ -447,3 +481,35 @@ func DeleteBackupSecrets() error {
|
||||
|
||||
return os.RemoveAll(fullPathToSecretsBackupFolder)
|
||||
}
|
||||
|
||||
func GetEnvFromWorkspaceFile() string {
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
log.Debugf("getEnvFromWorkspaceFile: [err=%s]", err)
|
||||
return ""
|
||||
}
|
||||
|
||||
if env := GetEnvelopmentBasedOnGitBranch(workspaceFile); env != "" {
|
||||
return env
|
||||
}
|
||||
|
||||
return workspaceFile.DefaultEnvironment
|
||||
}
|
||||
|
||||
func GetEnvelopmentBasedOnGitBranch(workspaceFile models.WorkspaceConfigFile) string {
|
||||
branch, err := getCurrentBranch()
|
||||
if err != nil {
|
||||
log.Debugf("getEnvelopmentBasedOnGitBranch: [err=%s]", err)
|
||||
}
|
||||
|
||||
envBasedOnGitBranch, ok := workspaceFile.GitBranchToEnvironmentMapping[branch]
|
||||
|
||||
log.Debugf("GetEnvelopmentBasedOnGitBranch: [envBasedOnGitBranch=%s] [ok=%t]", envBasedOnGitBranch, ok)
|
||||
|
||||
if err == nil && ok {
|
||||
return envBasedOnGitBranch
|
||||
} else {
|
||||
log.Debugf("getEnvelopmentBasedOnGitBranch: [err=%s]", err)
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user