Compare commits
183 Commits
Author | SHA1 | Date | |
---|---|---|---|
df52c56e83 | |||
4276fb54cc | |||
bb5a0db79c | |||
b906048ea1 | |||
7ce9c816c5 | |||
3fef6e4849 | |||
e7ce1e36e7 | |||
734c915206 | |||
783174adc6 | |||
d769db7668 | |||
00e532fce4 | |||
7cf8cba54b | |||
70b26811d9 | |||
e7aafecbc2 | |||
949fb052cd | |||
fcb1f5a51b | |||
e24f70b891 | |||
bd233ebe9b | |||
f92269f2ec | |||
2143db5eb5 | |||
0c72f50b5e | |||
3c4c616242 | |||
153baad49f | |||
75a2ab636c | |||
05a77e612c | |||
d02bc06dce | |||
e1f88f1a7b | |||
86a2647134 | |||
621b640af4 | |||
af64582efd | |||
6ad70f24a2 | |||
89bc9a823c | |||
40250b7ecf | |||
2d6d32923d | |||
7cb6aee3f7 | |||
469d042f4b | |||
c38ccdb915 | |||
baaa92427f | |||
1ff2c61b3a | |||
0b356e0e83 | |||
eb55c053eb | |||
07b307e4b1 | |||
5bee6a5e24 | |||
bdc99e34cc | |||
cee10fb507 | |||
74e78bb967 | |||
ea5811c24c | |||
d31b7ae4af | |||
75eac1b972 | |||
c65ce14de3 | |||
f8c4ccd64c | |||
43ce222725 | |||
c7ebeecb6b | |||
243c6ca22e | |||
66f1c57a2a | |||
c0d1495761 | |||
e5f6ed3dc7 | |||
ab62d91b09 | |||
59beabb445 | |||
d5bc377e3d | |||
2bdb20f42f | |||
0062df58a2 | |||
b6bbfc08ad | |||
5baccc73c9 | |||
20e7eae4fe | |||
8432f71d58 | |||
604c22d64d | |||
c1deb08df8 | |||
66f201746f | |||
1c61ffbd36 | |||
e5ba8eb281 | |||
f542e07c33 | |||
1082d7f869 | |||
4a3adaa347 | |||
1659dab87d | |||
d88599714f | |||
71bf56a2b7 | |||
0fba78ad16 | |||
92560f5e1f | |||
0d484b93eb | |||
5f3b8c55b8 | |||
553416689c | |||
b0744fd21d | |||
be38844a5b | |||
54e2b661bc | |||
b81d8eba25 | |||
dbcd2b0988 | |||
1d11f11eaf | |||
f2d7401d1d | |||
91cb9750b4 | |||
3e0d4cb70a | |||
dab677b360 | |||
625c0785b5 | |||
540a8b4201 | |||
11f86da1f6 | |||
ab5ffa9ee6 | |||
65bec23292 | |||
635ae941d7 | |||
a9753fb784 | |||
b587d9b35a | |||
aa68bc05d9 | |||
66566a401f | |||
5aa75ecd3f | |||
0a77f9a0c8 | |||
b5d4cfed03 | |||
c57394bdab | |||
da857f321b | |||
754ea09400 | |||
f28a2ea151 | |||
c7dd028771 | |||
3c94bacda9 | |||
8e85847de3 | |||
0c10bbb569 | |||
b710944630 | |||
280f482fc8 | |||
e1ad8fbee8 | |||
56ca6039ba | |||
fba54ae0c6 | |||
e243c72ca6 | |||
23ea6fd4f9 | |||
3f9f2ef238 | |||
77cb20f5c7 | |||
ddf630c269 | |||
39adb9a0c2 | |||
97fde96b7b | |||
190391e493 | |||
d3fcb69c50 | |||
2db4a29ad7 | |||
4df82a6ff1 | |||
cdf73043e1 | |||
ca07d1c50e | |||
868011479b | |||
6f6df3e63a | |||
23c740d225 | |||
702d4de3b5 | |||
445fa35ab5 | |||
9868476965 | |||
bfa6b955ca | |||
13b1805d04 | |||
90f5934440 | |||
30b2b85446 | |||
0adc3d2027 | |||
e53fd110f6 | |||
edf0294d51 | |||
8850b44115 | |||
17f9e53779 | |||
a61233d2ba | |||
2022988e77 | |||
409de81bd2 | |||
2b289ddf77 | |||
b066a55ead | |||
8dfc0138f5 | |||
517f508e44 | |||
2f1a671121 | |||
2fb4b261a8 | |||
9c3c745fdf | |||
6a75147719 | |||
295b363d8a | |||
d96b5943b9 | |||
17406e413d | |||
9b219f67b0 | |||
8fd2578a6d | |||
cc809a6bc0 | |||
66659c8fc8 | |||
31293bbe06 | |||
1c3488f8db | |||
20e536cec0 | |||
e8b498ca6d | |||
b5bcd0a308 | |||
03c72ea00f | |||
a486390015 | |||
8dc47110a0 | |||
52a6fe64a7 | |||
669861d7a8 | |||
e6539a5566 | |||
07c056523f | |||
80d219c3e0 | |||
b0ffac2f00 | |||
5ba851adff | |||
bb752863fa | |||
cf5603c8e3 | |||
77b1011207 | |||
5cadb9e2f9 |
22
.github/pull_request_template.md
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
# Description 📣
|
||||
|
||||
*Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.*
|
||||
|
||||
## Type ✨
|
||||
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Breaking change
|
||||
- [ ] Documentation
|
||||
|
||||
# Tests 🛠️
|
||||
|
||||
*Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration. You may want to add screenshots when relevant and possible*
|
||||
|
||||
```sh
|
||||
# Here's some code block to paste some code snippets
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- [ ] I have read the [contributing guide](https://infisical.com/docs/contributing/overview), agreed and acknowledged the [code of conduct](https://infisical.com/docs/contributing/code-of-conduct). 📝
|
86
.github/values.yaml
vendored
@ -1,11 +1,5 @@
|
||||
#####
|
||||
# INFISICAL K8 DEFAULT VALUES FILE
|
||||
# PLEASE REPLACE VALUES/EDIT AS REQUIRED
|
||||
#####
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
frontend:
|
||||
enabled: true
|
||||
name: frontend
|
||||
podAnnotations: {}
|
||||
deploymentAnnotations:
|
||||
@ -13,17 +7,18 @@ frontend:
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: infisical/frontend
|
||||
pullPolicy: Always
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
kubeSecretRef: managed-secret-frontend
|
||||
service:
|
||||
# type of the frontend service
|
||||
type: ClusterIP
|
||||
# define the nodePort if service type is NodePort
|
||||
# nodePort:
|
||||
annotations: {}
|
||||
type: ClusterIP
|
||||
nodePort: ""
|
||||
|
||||
frontendEnvironmentVariables: null
|
||||
|
||||
backend:
|
||||
enabled: true
|
||||
name: backend
|
||||
podAnnotations: {}
|
||||
deploymentAnnotations:
|
||||
@ -31,63 +26,46 @@ backend:
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: infisical/backend
|
||||
pullPolicy: Always
|
||||
tag: "latest"
|
||||
pullPolicy: Always
|
||||
kubeSecretRef: managed-backend-secret
|
||||
service:
|
||||
annotations: {}
|
||||
type: ClusterIP
|
||||
nodePort: ""
|
||||
|
||||
backendEnvironmentVariables: null
|
||||
|
||||
## Mongo DB persistence
|
||||
mongodb:
|
||||
name: mongodb
|
||||
podAnnotations: {}
|
||||
image:
|
||||
repository: mongo
|
||||
pullPolicy: IfNotPresent
|
||||
tag: "latest"
|
||||
service:
|
||||
annotations: {}
|
||||
enabled: true
|
||||
persistence:
|
||||
enabled: false
|
||||
|
||||
# By default the backend will be connected to a Mongo instance in the cluster.
|
||||
# However, it is recommended to add a managed document DB connection string because the DB instance in the cluster does not have persistence yet ( data will be deleted on next deploy).
|
||||
# Learn about connection string type here https://www.mongodb.com/docs/manual/reference/connection-string/
|
||||
mongodbConnection: {}
|
||||
# externalMongoDBConnectionString: <>
|
||||
## By default the backend will be connected to a Mongo instance within the cluster
|
||||
## However, it is recommended to add a managed document DB connection string for production-use (DBaaS)
|
||||
## Learn about connection string type here https://www.mongodb.com/docs/manual/reference/connection-string/
|
||||
## e.g. "mongodb://<user>:<pass>@<host>:<port>/<database-name>"
|
||||
mongodbConnection:
|
||||
externalMongoDBConnectionString: ""
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: "nginx"
|
||||
hostName: gamma.infisical.com # replace with your domain
|
||||
frontend:
|
||||
# cert-manager.io/issuer: letsencrypt-nginx
|
||||
hostName: gamma.infisical.com ## <- Replace with your own domain
|
||||
frontend:
|
||||
path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
path: /api
|
||||
pathType: Prefix
|
||||
tls: []
|
||||
tls:
|
||||
[]
|
||||
# - secretName: letsencrypt-nginx
|
||||
# hosts:
|
||||
# - infisical.local
|
||||
|
||||
|
||||
## Complete Ingress example
|
||||
# ingress:
|
||||
# enabled: true
|
||||
# annotations:
|
||||
# kubernetes.io/ingress.class: "nginx"
|
||||
# cert-manager.io/issuer: letsencrypt-nginx
|
||||
# hostName: k8.infisical.com
|
||||
# frontend:
|
||||
# path: /
|
||||
# pathType: Prefix
|
||||
# backend:
|
||||
# path: /api
|
||||
# pathType: Prefix
|
||||
# tls:
|
||||
# - secretName: letsencrypt-nginx
|
||||
# hosts:
|
||||
# - k8.infisical.com
|
||||
|
||||
###
|
||||
### YOU MUST FILL IN ALL SECRETS BELOW
|
||||
###
|
||||
backendEnvironmentVariables: {}
|
||||
|
||||
frontendEnvironmentVariables: {}
|
||||
mailhog:
|
||||
enabled: false
|
||||
|
12
.github/workflows/release_build.yml
vendored
@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- 'v*'
|
||||
- "v*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@ -18,11 +18,16 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '>=1.19.3'
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
@ -45,8 +50,7 @@ jobs:
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
||||
|
@ -68,10 +68,10 @@ archives:
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
mode: 'replace'
|
||||
mode: "replace"
|
||||
|
||||
checksum:
|
||||
name_template: 'checksums.txt'
|
||||
name_template: "checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}-devel"
|
||||
@ -80,8 +80,8 @@ changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^test:'
|
||||
- "^docs:"
|
||||
- "^test:"
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
@ -109,30 +109,30 @@ brews:
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
|
||||
scoop:
|
||||
bucket:
|
||||
@ -146,15 +146,14 @@ scoop:
|
||||
license: MIT
|
||||
|
||||
aurs:
|
||||
-
|
||||
name: infisical-bin
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
maintainers:
|
||||
- Infisical, Inc <support@infisical.com>
|
||||
license: MIT
|
||||
private_key: '{{ .Env.AUR_KEY }}'
|
||||
git_url: 'ssh://aur@aur.archlinux.org/infisical-bin.git'
|
||||
private_key: "{{ .Env.AUR_KEY }}"
|
||||
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
package: |-
|
||||
# bin
|
||||
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
@ -169,19 +168,13 @@ aurs:
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
# dockers:
|
||||
# - dockerfile: goreleaser.dockerfile
|
||||
# - dockerfile: cli/docker/Dockerfile
|
||||
# goos: linux
|
||||
# goarch: amd64
|
||||
# ids:
|
||||
# - infisical
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}"
|
||||
# - "infisical/cli:{{ .Major }}"
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:latest"
|
||||
# build_flag_templates:
|
||||
# - "--label=org.label-schema.schema-version=1.0"
|
||||
# - "--label=org.label-schema.version={{.Version}}"
|
||||
# - "--label=org.label-schema.name={{.ProjectName}}"
|
||||
# - "--platform=linux/amd64"
|
64
backend/package-lock.json
generated
@ -19,6 +19,7 @@
|
||||
"await-to-js": "^3.0.0",
|
||||
"aws-sdk": "^2.1311.0",
|
||||
"axios": "^1.1.3",
|
||||
"axios-retry": "^3.4.0",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"builder-pattern": "^2.2.0",
|
||||
@ -3473,6 +3474,17 @@
|
||||
"@babel/core": "^7.0.0-0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/runtime": {
|
||||
"version": "7.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz",
|
||||
"integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==",
|
||||
"dependencies": {
|
||||
"regenerator-runtime": "^0.13.11"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/template": {
|
||||
"version": "7.18.10",
|
||||
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz",
|
||||
@ -5317,6 +5329,15 @@
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/axios-retry": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-3.4.0.tgz",
|
||||
"integrity": "sha512-VdgaP+gHH4iQYCCNUWF2pcqeciVOdGrBBAYUfTY+wPcO5Ltvp/37MLFNCmJKo7Gj3SHvCSdL8ouI1qLYJN3liA==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.15.4",
|
||||
"is-retry-allowed": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest": {
|
||||
"version": "29.3.1",
|
||||
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.3.1.tgz",
|
||||
@ -7604,6 +7625,17 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-retry-allowed": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz",
|
||||
"integrity": "sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/is-stream": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
@ -12179,6 +12211,11 @@
|
||||
"node": ">=8.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/regenerator-runtime": {
|
||||
"version": "0.13.11",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
|
||||
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg=="
|
||||
},
|
||||
"node_modules/regexpp": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
|
||||
@ -16584,6 +16621,14 @@
|
||||
"@babel/helper-plugin-utils": "^7.19.0"
|
||||
}
|
||||
},
|
||||
"@babel/runtime": {
|
||||
"version": "7.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz",
|
||||
"integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==",
|
||||
"requires": {
|
||||
"regenerator-runtime": "^0.13.11"
|
||||
}
|
||||
},
|
||||
"@babel/template": {
|
||||
"version": "7.18.10",
|
||||
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz",
|
||||
@ -18075,6 +18120,15 @@
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"axios-retry": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-3.4.0.tgz",
|
||||
"integrity": "sha512-VdgaP+gHH4iQYCCNUWF2pcqeciVOdGrBBAYUfTY+wPcO5Ltvp/37MLFNCmJKo7Gj3SHvCSdL8ouI1qLYJN3liA==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.15.4",
|
||||
"is-retry-allowed": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"babel-jest": {
|
||||
"version": "29.3.1",
|
||||
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.3.1.tgz",
|
||||
@ -19771,6 +19825,11 @@
|
||||
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
|
||||
"integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q=="
|
||||
},
|
||||
"is-retry-allowed": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz",
|
||||
"integrity": "sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg=="
|
||||
},
|
||||
"is-stream": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
@ -23088,6 +23147,11 @@
|
||||
"picomatch": "^2.2.1"
|
||||
}
|
||||
},
|
||||
"regenerator-runtime": {
|
||||
"version": "0.13.11",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
|
||||
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg=="
|
||||
},
|
||||
"regexpp": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
|
||||
|
@ -10,6 +10,7 @@
|
||||
"await-to-js": "^3.0.0",
|
||||
"aws-sdk": "^2.1311.0",
|
||||
"axios": "^1.1.3",
|
||||
"axios-retry": "^3.4.0",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"builder-pattern": "^2.2.0",
|
||||
|
@ -1,7 +1,7 @@
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { patchRouterParam } = require('./utils/patchAsyncRoutes');
|
||||
|
||||
import express, { Request, Response } from 'express';
|
||||
import express from 'express';
|
||||
import helmet from 'helmet';
|
||||
import cors from 'cors';
|
||||
import cookieParser from 'cookie-parser';
|
||||
@ -42,6 +42,8 @@ import {
|
||||
integrationAuth as v1IntegrationAuthRouter
|
||||
} from './routes/v1';
|
||||
import {
|
||||
signup as v2SignupRouter,
|
||||
auth as v2AuthRouter,
|
||||
users as v2UsersRouter,
|
||||
organizations as v2OrganizationsRouter,
|
||||
workspace as v2WorkspaceRouter,
|
||||
@ -110,6 +112,8 @@ app.use('/api/v1/integration', v1IntegrationRouter);
|
||||
app.use('/api/v1/integration-auth', v1IntegrationAuthRouter);
|
||||
|
||||
// v2 routes
|
||||
app.use('/api/v2/signup', v2SignupRouter);
|
||||
app.use('/api/v2/auth', v2AuthRouter);
|
||||
app.use('/api/v2/users', v2UsersRouter);
|
||||
app.use('/api/v2/organizations', v2OrganizationsRouter);
|
||||
app.use('/api/v2/workspace', v2EnvironmentRouter);
|
||||
|
@ -1,9 +1,12 @@
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const EMAIL_TOKEN_LIFETIME = process.env.EMAIL_TOKEN_LIFETIME! || '86400';
|
||||
const EMAIL_TOKEN_LIFETIME = parseInt(process.env.EMAIL_TOKEN_LIFETIME! || '86400');
|
||||
const INVITE_ONLY_SIGNUP = process.env.INVITE_ONLY_SIGNUP == undefined ? false : process.env.INVITE_ONLY_SIGNUP
|
||||
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY!;
|
||||
const SALT_ROUNDS = parseInt(process.env.SALT_ROUNDS!) || 10;
|
||||
const JWT_AUTH_LIFETIME = process.env.JWT_AUTH_LIFETIME! || '10d';
|
||||
const JWT_AUTH_SECRET = process.env.JWT_AUTH_SECRET!;
|
||||
const JWT_MFA_LIFETIME = process.env.JWT_MFA_LIFETIME! || '5m';
|
||||
const JWT_MFA_SECRET = process.env.JWT_MFA_SECRET!;
|
||||
const JWT_REFRESH_LIFETIME = process.env.JWT_REFRESH_LIFETIME! || '90d';
|
||||
const JWT_REFRESH_SECRET = process.env.JWT_REFRESH_SECRET!;
|
||||
const JWT_SERVICE_SECRET = process.env.JWT_SERVICE_SECRET!;
|
||||
@ -24,7 +27,7 @@ const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
|
||||
const CLIENT_SECRET_VERCEL = process.env.CLIENT_SECRET_VERCEL!;
|
||||
const CLIENT_SECRET_NETLIFY = process.env.CLIENT_SECRET_NETLIFY!;
|
||||
const CLIENT_SECRET_GITHUB = process.env.CLIENT_SECRET_GITHUB!;
|
||||
const CLIENT_SLUG_VERCEL= process.env.CLIENT_SLUG_VERCEL!;
|
||||
const CLIENT_SLUG_VERCEL = process.env.CLIENT_SLUG_VERCEL!;
|
||||
const POSTHOG_HOST = process.env.POSTHOG_HOST! || 'https://app.posthog.com';
|
||||
const POSTHOG_PROJECT_API_KEY =
|
||||
process.env.POSTHOG_PROJECT_API_KEY! ||
|
||||
@ -50,10 +53,13 @@ const LICENSE_KEY = process.env.LICENSE_KEY!;
|
||||
export {
|
||||
PORT,
|
||||
EMAIL_TOKEN_LIFETIME,
|
||||
INVITE_ONLY_SIGNUP,
|
||||
ENCRYPTION_KEY,
|
||||
SALT_ROUNDS,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_MFA_LIFETIME,
|
||||
JWT_MFA_SECRET,
|
||||
JWT_REFRESH_LIFETIME,
|
||||
JWT_REFRESH_SECRET,
|
||||
JWT_SERVICE_SECRET,
|
||||
|
16
backend/src/config/request.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import axios from 'axios';
|
||||
import axiosRetry from 'axios-retry';
|
||||
|
||||
const axiosInstance = axios.create();
|
||||
|
||||
// add retry functionality to the axios instance
|
||||
axiosRetry(axiosInstance, {
|
||||
retries: 3,
|
||||
retryDelay: (retryCount) => retryCount * 1000, // delay between retries (in milliseconds)
|
||||
retryCondition: (error) => {
|
||||
// only retry if the error is a network error or a 5xx server error
|
||||
return axiosRetry.isNetworkError(error) || axiosRetry.isRetryableError(error);
|
||||
},
|
||||
});
|
||||
|
||||
export default axiosInstance;
|
@ -5,7 +5,8 @@ import * as Sentry from '@sentry/node';
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
const jsrp = require('jsrp');
|
||||
import { User, LoginSRPDetail } from '../../models';
|
||||
import { createToken, issueTokens, clearTokens } from '../../helpers/auth';
|
||||
import { createToken, issueAuthTokens, clearTokens } from '../../helpers/auth';
|
||||
import { checkUserDevice } from '../../helpers/user';
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT
|
||||
@ -111,7 +112,14 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({ userId: user._id.toString() });
|
||||
|
||||
await checkUserDevice({
|
||||
user,
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'] ?? ''
|
||||
});
|
||||
|
||||
const tokens = await issueAuthTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
|
@ -35,14 +35,11 @@ export const getIntegrationAuth = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
export const getIntegrationOptions = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS
|
||||
});
|
||||
}
|
||||
export const getIntegrationOptions = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange as part of integration [integration] for workspace with id [workspaceId]
|
||||
@ -90,8 +87,8 @@ export const oAuthExchange = async (
|
||||
* @param res
|
||||
*/
|
||||
export const saveIntegrationAccessToken = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
// TODO: refactor
|
||||
// TODO: check if access token is valid for each integration
|
||||
@ -157,23 +154,23 @@ export const saveIntegrationAccessToken = async (
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
let apps;
|
||||
try {
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integration authorization applications'
|
||||
});
|
||||
}
|
||||
let apps;
|
||||
try {
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get integration authorization applications",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apps
|
||||
});
|
||||
return res.status(200).send({
|
||||
apps,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -183,21 +180,21 @@ export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration authorization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth
|
||||
});
|
||||
}
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete integration authorization",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth,
|
||||
});
|
||||
};
|
||||
|
@ -12,9 +12,9 @@ import { eventPushSecrets } from '../../events';
|
||||
|
||||
/**
|
||||
* Create/initialize an (empty) integration for integration authorization
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
@ -65,10 +65,10 @@ export const createIntegration = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
}
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change environment or name of integration with id [integrationId]
|
||||
@ -77,57 +77,57 @@ export const createIntegration = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const updateIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const {
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner, // github-specific integration param
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id
|
||||
},
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace.toString()
|
||||
})
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update integration'
|
||||
});
|
||||
}
|
||||
let integration;
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const {
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner, // github-specific integration param
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id,
|
||||
},
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace.toString(),
|
||||
}),
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to update integration",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -138,24 +138,24 @@ export const updateIntegration = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
try {
|
||||
const { integrationId } = req.params;
|
||||
let integration;
|
||||
try {
|
||||
const { integrationId } = req.params;
|
||||
|
||||
integration = await Integration.findOneAndDelete({
|
||||
_id: integrationId
|
||||
});
|
||||
|
||||
if (!integration) throw new Error('Failed to find integration');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
integration = await Integration.findOneAndDelete({
|
||||
_id: integrationId,
|
||||
});
|
||||
|
||||
if (!integration) throw new Error("Failed to find integration");
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete integration",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
@ -1,14 +1,13 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
import { SITE_URL, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
|
||||
import { MembershipOrg, Organization, User, Token } from '../../models';
|
||||
import { MembershipOrg, Organization, User } from '../../models';
|
||||
import { deleteMembershipOrg as deleteMemberFromOrg } from '../../helpers/membershipOrg';
|
||||
import { checkEmailVerification } from '../../helpers/signup';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { updateSubscriptionOrgQuantity } from '../../helpers/organization';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { OWNER, ADMIN, MEMBER, ACCEPTED, INVITED } from '../../variables';
|
||||
import { TokenService } from '../../services';
|
||||
import { OWNER, ADMIN, MEMBER, ACCEPTED, INVITED, TOKEN_EMAIL_ORG_INVITATION } from '../../variables';
|
||||
|
||||
/**
|
||||
* Delete organization membership with id [membershipOrgId] from organization
|
||||
@ -113,14 +112,14 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
|
||||
|
||||
invitee = await User.findOne({
|
||||
email: inviteeEmail
|
||||
}).select('+publicKey');
|
||||
|
||||
if (invitee) {
|
||||
// case: invitee is an existing user
|
||||
|
||||
|
||||
inviteeMembershipOrg = await MembershipOrg.findOne({
|
||||
user: invitee._id,
|
||||
organization: organizationId
|
||||
@ -163,17 +162,11 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
const organization = await Organization.findOne({ _id: organizationId });
|
||||
|
||||
if (organization) {
|
||||
const token = crypto.randomBytes(16).toString('hex');
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
{ email: inviteeEmail },
|
||||
{
|
||||
email: inviteeEmail,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_ORG_INVITATION,
|
||||
email: inviteeEmail,
|
||||
organizationId: organization._id
|
||||
});
|
||||
|
||||
await sendMail({
|
||||
template: 'organizationInvitation.handlebars',
|
||||
@ -225,10 +218,12 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
|
||||
if (!membershipOrg)
|
||||
throw new Error('Failed to find any invitations for email');
|
||||
|
||||
await checkEmailVerification({
|
||||
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_ORG_INVITATION,
|
||||
email,
|
||||
code
|
||||
organizationId: membershipOrg.organization,
|
||||
token: code
|
||||
});
|
||||
|
||||
if (user && user?.publicKey) {
|
||||
@ -241,7 +236,7 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
message: 'Successfully verified email',
|
||||
user,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
// initialize user account
|
||||
|
@ -14,11 +14,13 @@ import {
|
||||
MembershipOrg,
|
||||
Organization,
|
||||
Workspace,
|
||||
IncidentContactOrg
|
||||
IncidentContactOrg,
|
||||
IMembershipOrg
|
||||
} from '../../models';
|
||||
import { createOrganization as create } from '../../helpers/organization';
|
||||
import { addMembershipsOrg } from '../../helpers/membershipOrg';
|
||||
import { OWNER, ACCEPTED } from '../../variables';
|
||||
import _ from 'lodash';
|
||||
|
||||
export const getOrganizations = async (req: Request, res: Response) => {
|
||||
let organizations;
|
||||
@ -382,3 +384,44 @@ export const getOrganizationSubscriptions = async (
|
||||
subscriptions
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Given a org id, return the projects each member of the org belongs to
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationMembersAndTheirWorkspaces = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
const { organizationId } = req.params;
|
||||
|
||||
const workspacesSet = (
|
||||
await Workspace.find(
|
||||
{
|
||||
organization: organizationId
|
||||
},
|
||||
'_id'
|
||||
)
|
||||
).map((w) => w._id.toString());
|
||||
|
||||
const memberships = (
|
||||
await Membership.find({
|
||||
workspace: { $in: workspacesSet }
|
||||
}).populate('workspace')
|
||||
);
|
||||
const userToWorkspaceIds: any = {};
|
||||
|
||||
memberships.forEach(membership => {
|
||||
const user = membership.user.toString();
|
||||
if (userToWorkspaceIds[user]) {
|
||||
userToWorkspaceIds[user].push(membership.workspace);
|
||||
} else {
|
||||
userToWorkspaceIds[user] = [membership.workspace];
|
||||
}
|
||||
});
|
||||
|
||||
return res.json(userToWorkspaceIds);
|
||||
};
|
@ -1,14 +1,14 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const jsrp = require('jsrp');
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
import { User, Token, BackupPrivateKey, LoginSRPDetail } from '../../models';
|
||||
import { checkEmailVerification } from '../../helpers/signup';
|
||||
import { User, BackupPrivateKey, LoginSRPDetail } from '../../models';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { TokenService } from '../../services';
|
||||
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, SITE_URL } from '../../config';
|
||||
import { TOKEN_EMAIL_PASSWORD_RESET } from '../../variables';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
|
||||
/**
|
||||
@ -31,19 +31,12 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
error: 'Failed to send email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
const token = crypto.randomBytes(16).toString('hex');
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
{ email },
|
||||
{
|
||||
email,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
|
||||
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_PASSWORD_RESET,
|
||||
email
|
||||
});
|
||||
|
||||
await sendMail({
|
||||
template: 'passwordReset.handlebars',
|
||||
subjectLine: 'Infisical password reset',
|
||||
@ -54,7 +47,6 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
callback_url: SITE_URL + '/password-reset'
|
||||
}
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
@ -87,10 +79,11 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
error: 'Failed email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
await checkEmailVerification({
|
||||
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_PASSWORD_RESET,
|
||||
email,
|
||||
code
|
||||
token: code
|
||||
});
|
||||
|
||||
// generate temporary password-reset token
|
||||
@ -173,8 +166,18 @@ export const srp1 = async (req: Request, res: Response) => {
|
||||
*/
|
||||
export const changePassword = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { clientProof, encryptedPrivateKey, iv, tag, salt, verifier } =
|
||||
req.body;
|
||||
const {
|
||||
clientProof,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
} = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email: req.user.email
|
||||
}).select('+salt +verifier');
|
||||
@ -204,9 +207,13 @@ export const changePassword = async (req: Request, res: Response) => {
|
||||
await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
@ -340,9 +347,12 @@ export const getBackupPrivateKey = async (req: Request, res: Response) => {
|
||||
export const resetPassword = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier,
|
||||
} = req.body;
|
||||
@ -350,9 +360,13 @@ export const resetPassword = async (req: Request, res: Response) => {
|
||||
await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
|
@ -1,16 +1,13 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { NODE_ENV, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
|
||||
import { User, MembershipOrg } from '../../models';
|
||||
import { completeAccount } from '../../helpers/user';
|
||||
import { User } from '../../models';
|
||||
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, INVITE_ONLY_SIGNUP } from '../../config';
|
||||
import {
|
||||
sendEmailVerification,
|
||||
checkEmailVerification,
|
||||
initializeDefaultOrg
|
||||
} from '../../helpers/signup';
|
||||
import { issueTokens, createToken } from '../../helpers/auth';
|
||||
import { INVITED, ACCEPTED } from '../../variables';
|
||||
import axios from 'axios';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
|
||||
/**
|
||||
* Signup step 1: Initialize account for user under email [email] and send a verification code
|
||||
@ -24,6 +21,14 @@ export const beginEmailSignup = async (req: Request, res: Response) => {
|
||||
try {
|
||||
email = req.body.email;
|
||||
|
||||
if (INVITE_ONLY_SIGNUP) {
|
||||
// Only one user can create an account without being invited. The rest need to be invited in order to make an account
|
||||
const userCount = await User.countDocuments({})
|
||||
if (userCount != 0) {
|
||||
throw BadRequestError({ message: "New user sign ups are not allowed at this time. You must be invited to sign up." })
|
||||
}
|
||||
}
|
||||
|
||||
const user = await User.findOne({ email }).select('+publicKey');
|
||||
if (user && user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
@ -103,201 +108,3 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
|
||||
token
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
* signup flow
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier,
|
||||
organizationName
|
||||
} = req.body;
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed to complete account for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
// complete setting up user's account
|
||||
user = await completeAccount({
|
||||
userId: user._id.toString(),
|
||||
firstName,
|
||||
lastName,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user'); // ensure user is non-null
|
||||
|
||||
// initialize default organization and workspace
|
||||
await initializeDefaultOrg({
|
||||
organizationName,
|
||||
user
|
||||
});
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
},
|
||||
{
|
||||
user,
|
||||
status: ACCEPTED
|
||||
}
|
||||
);
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({
|
||||
userId: user._id.toString()
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
refreshToken = tokens.refreshToken;
|
||||
|
||||
// sending a welcome email to new users
|
||||
if (process.env.LOOPS_API_KEY) {
|
||||
await axios.post("https://app.loops.so/api/v1/events/send", {
|
||||
"email": email,
|
||||
"eventName": "Sign Up",
|
||||
"firstName": firstName,
|
||||
"lastName": lastName
|
||||
}, {
|
||||
headers: {
|
||||
"Accept": "application/json",
|
||||
"Authorization": "Bearer " + process.env.LOOPS_API_KEY
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to complete account setup'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully set up account',
|
||||
user,
|
||||
token,
|
||||
refreshToken
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
* invite flow
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier
|
||||
} = req.body;
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed to complete account for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
});
|
||||
|
||||
if (!membershipOrg) throw new Error('Failed to find invitations for email');
|
||||
|
||||
// complete setting up user's account
|
||||
user = await completeAccount({
|
||||
userId: user._id.toString(),
|
||||
firstName,
|
||||
lastName,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user');
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
},
|
||||
{
|
||||
user,
|
||||
status: ACCEPTED
|
||||
}
|
||||
);
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({
|
||||
userId: user._id.toString()
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
refreshToken = tokens.refreshToken;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to complete account setup'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully set up account',
|
||||
user,
|
||||
token,
|
||||
refreshToken
|
||||
});
|
||||
};
|
||||
|
@ -1,21 +1,21 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response } from "express";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import {
|
||||
Workspace,
|
||||
Membership,
|
||||
MembershipOrg,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
IUser,
|
||||
ServiceToken,
|
||||
ServiceTokenData
|
||||
} from '../../models';
|
||||
Workspace,
|
||||
Membership,
|
||||
MembershipOrg,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
IUser,
|
||||
ServiceToken,
|
||||
ServiceTokenData,
|
||||
} from "../../models";
|
||||
import {
|
||||
createWorkspace as create,
|
||||
deleteWorkspace as deleteWork
|
||||
} from '../../helpers/workspace';
|
||||
import { addMemberships } from '../../helpers/membership';
|
||||
import { ADMIN } from '../../variables';
|
||||
createWorkspace as create,
|
||||
deleteWorkspace as deleteWork,
|
||||
} from "../../helpers/workspace";
|
||||
import { addMemberships } from "../../helpers/membership";
|
||||
import { ADMIN } from "../../variables";
|
||||
|
||||
/**
|
||||
* Return public keys of members of workspace with id [workspaceId]
|
||||
@ -24,32 +24,31 @@ import { ADMIN } from '../../variables';
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
|
||||
let publicKeys;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let publicKeys;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
publicKeys = (
|
||||
await Membership.find({
|
||||
workspace: workspaceId
|
||||
}).populate<{ user: IUser }>('user', 'publicKey')
|
||||
)
|
||||
.map((member) => {
|
||||
return {
|
||||
publicKey: member.user.publicKey,
|
||||
userId: member.user._id
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace member public keys'
|
||||
});
|
||||
}
|
||||
publicKeys = (
|
||||
await Membership.find({
|
||||
workspace: workspaceId,
|
||||
}).populate<{ user: IUser }>("user", "publicKey")
|
||||
).map((member) => {
|
||||
return {
|
||||
publicKey: member.user.publicKey,
|
||||
userId: member.user._id,
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace member public keys",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
publicKeys
|
||||
});
|
||||
return res.status(200).send({
|
||||
publicKeys,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -59,24 +58,24 @@ export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
let users;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let users;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
users = await Membership.find({
|
||||
workspace: workspaceId
|
||||
}).populate('user', '+publicKey');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace members'
|
||||
});
|
||||
}
|
||||
users = await Membership.find({
|
||||
workspace: workspaceId,
|
||||
}).populate("user", "+publicKey");
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace members",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
users
|
||||
});
|
||||
return res.status(200).send({
|
||||
users,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -86,24 +85,24 @@ export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaces = async (req: Request, res: Response) => {
|
||||
let workspaces;
|
||||
try {
|
||||
workspaces = (
|
||||
await Membership.find({
|
||||
user: req.user._id
|
||||
}).populate('workspace')
|
||||
).map((m) => m.workspace);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspaces'
|
||||
});
|
||||
}
|
||||
let workspaces;
|
||||
try {
|
||||
workspaces = (
|
||||
await Membership.find({
|
||||
user: req.user._id,
|
||||
}).populate("workspace")
|
||||
).map((m) => m.workspace);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspaces",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspaces
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspaces,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -113,24 +112,24 @@ export const getWorkspaces = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspace = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
workspace = await Workspace.findOne({
|
||||
_id: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace'
|
||||
});
|
||||
}
|
||||
workspace = await Workspace.findOne({
|
||||
_id: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -141,46 +140,46 @@ export const getWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const createWorkspace = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceName, organizationId } = req.body;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceName, organizationId } = req.body;
|
||||
|
||||
// validate organization membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId
|
||||
});
|
||||
// validate organization membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId,
|
||||
});
|
||||
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
if (!membershipOrg) {
|
||||
throw new Error("Failed to validate organization membership");
|
||||
}
|
||||
|
||||
if (workspaceName.length < 1) {
|
||||
throw new Error('Workspace names must be at least 1-character long');
|
||||
}
|
||||
if (workspaceName.length < 1) {
|
||||
throw new Error("Workspace names must be at least 1-character long");
|
||||
}
|
||||
|
||||
// create workspace and add user as member
|
||||
workspace = await create({
|
||||
name: workspaceName,
|
||||
organizationId
|
||||
});
|
||||
// create workspace and add user as member
|
||||
workspace = await create({
|
||||
name: workspaceName,
|
||||
organizationId,
|
||||
});
|
||||
|
||||
await addMemberships({
|
||||
userIds: [req.user._id],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN]
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to create workspace'
|
||||
});
|
||||
}
|
||||
await addMemberships({
|
||||
userIds: [req.user._id],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN],
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to create workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -190,24 +189,24 @@ export const createWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteWorkspace = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// delete workspace
|
||||
await deleteWork({
|
||||
id: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete workspace'
|
||||
});
|
||||
}
|
||||
// delete workspace
|
||||
await deleteWork({
|
||||
id: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully deleted workspace'
|
||||
});
|
||||
return res.status(200).send({
|
||||
message: "Successfully deleted workspace",
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -217,34 +216,34 @@ export const deleteWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const changeWorkspaceName = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { name } = req.body;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { name } = req.body;
|
||||
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId
|
||||
},
|
||||
{
|
||||
name
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change workspace name'
|
||||
});
|
||||
}
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId,
|
||||
},
|
||||
{
|
||||
name,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to change workspace name",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully changed workspace name',
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
message: "Successfully changed workspace name",
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -254,24 +253,24 @@ export const changeWorkspaceName = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
|
||||
let integrations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let integrations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
integrations = await Integration.find({
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace integrations'
|
||||
});
|
||||
}
|
||||
integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace integrations",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrations
|
||||
});
|
||||
return res.status(200).send({
|
||||
integrations,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -281,56 +280,56 @@ export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceIntegrationAuthorizations = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let authorizations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let authorizations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
authorizations = await IntegrationAuth.find({
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace integration authorizations'
|
||||
});
|
||||
}
|
||||
authorizations = await IntegrationAuth.find({
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace integration authorizations",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
authorizations
|
||||
});
|
||||
return res.status(200).send({
|
||||
authorizations,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return service service tokens for workspace [workspaceId] belonging to user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceServiceTokens = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let serviceTokens;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
// ?? FIX.
|
||||
serviceTokens = await ServiceToken.find({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace service tokens'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
serviceTokens
|
||||
});
|
||||
}
|
||||
let serviceTokens;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
// ?? FIX.
|
||||
serviceTokens = await ServiceToken.find({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace service tokens",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
serviceTokens,
|
||||
});
|
||||
};
|
||||
|
351
backend/src/controllers/v2/authController.ts
Normal file
@ -0,0 +1,351 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
import { Request, Response } from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
const jsrp = require('jsrp');
|
||||
import { User, LoginSRPDetail } from '../../models';
|
||||
import { issueAuthTokens, createToken } from '../../helpers/auth';
|
||||
import { checkUserDevice } from '../../helpers/user';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { TokenService } from '../../services';
|
||||
import { EELogService } from '../../ee/services';
|
||||
import {
|
||||
NODE_ENV,
|
||||
JWT_MFA_LIFETIME,
|
||||
JWT_MFA_SECRET
|
||||
} from '../../config';
|
||||
import { BadRequestError, InternalServerError } from '../../utils/errors';
|
||||
import {
|
||||
TOKEN_EMAIL_MFA,
|
||||
ACTION_LOGIN
|
||||
} from '../../variables';
|
||||
import { getChannelFromUserAgent } from '../../utils/posthog'; // TODO: move this
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
}
|
||||
|
||||
const clientPublicKeys: any = {};
|
||||
|
||||
/**
|
||||
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login1 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
clientPublicKey
|
||||
}: { email: string; clientPublicKey: string } = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
async () => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
|
||||
await LoginSRPDetail.findOneAndReplace({ email: email }, {
|
||||
email: email,
|
||||
clientPublicKey: clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt),
|
||||
}, { upsert: true, returnNewDocument: false });
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
salt: user.salt
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to start authentication process'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log in user step 2: complete step 2 of SRP protocol and return token and their (encrypted)
|
||||
* private key
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const login2 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
|
||||
if (!req.headers['user-agent']) throw InternalServerError({ message: 'User-Agent header is required' });
|
||||
|
||||
const { email, clientProof } = req.body;
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +encryptionVersion +protectedKey +protectedKeyIV +protectedKeyTag +publicKey +encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const loginSRPDetail = await LoginSRPDetail.findOneAndDelete({ email: email })
|
||||
|
||||
if (!loginSRPDetail) {
|
||||
return BadRequestError(Error("Failed to find login details for SRP"))
|
||||
}
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: loginSRPDetail.serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(loginSRPDetail.clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
|
||||
if (user.isMfaEnabled) {
|
||||
// case: user has MFA enabled
|
||||
|
||||
// generate temporary MFA token
|
||||
const token = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: JWT_MFA_LIFETIME,
|
||||
secret: JWT_MFA_SECRET
|
||||
});
|
||||
|
||||
const code = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_MFA,
|
||||
email
|
||||
});
|
||||
|
||||
// send MFA code [code] to [email]
|
||||
await sendMail({
|
||||
template: 'emailMfa.handlebars',
|
||||
subjectLine: 'Infisical MFA code',
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
code
|
||||
}
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
mfaEnabled: true,
|
||||
token
|
||||
});
|
||||
}
|
||||
|
||||
await checkUserDevice({
|
||||
user,
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'] ?? ''
|
||||
});
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueAuthTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
// case: user does not have MFA enablgged
|
||||
// return (access) token in response
|
||||
|
||||
interface ResponseData {
|
||||
mfaEnabled: boolean;
|
||||
encryptionVersion: any;
|
||||
protectedKey?: string;
|
||||
protectedKeyIV?: string;
|
||||
protectedKeyTag?: string;
|
||||
token: string;
|
||||
publicKey?: string;
|
||||
encryptedPrivateKey?: string;
|
||||
iv?: string;
|
||||
tag?: string;
|
||||
}
|
||||
|
||||
const response: ResponseData = {
|
||||
mfaEnabled: false,
|
||||
encryptionVersion: user.encryptionVersion,
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag
|
||||
}
|
||||
|
||||
if (
|
||||
user?.protectedKey &&
|
||||
user?.protectedKeyIV &&
|
||||
user?.protectedKeyTag
|
||||
) {
|
||||
response.protectedKey = user.protectedKey;
|
||||
response.protectedKeyIV = user.protectedKeyIV
|
||||
response.protectedKeyTag = user.protectedKeyTag;
|
||||
}
|
||||
|
||||
const loginAction = await EELogService.createAction({
|
||||
name: ACTION_LOGIN,
|
||||
userId: user._id
|
||||
});
|
||||
|
||||
loginAction && await EELogService.createLog({
|
||||
userId: user._id,
|
||||
actions: [loginAction],
|
||||
channel: getChannelFromUserAgent(req.headers['user-agent']),
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
return res.status(200).send(response);
|
||||
}
|
||||
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Send MFA token to email [email]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const sendMfaToken = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email } = req.body;
|
||||
|
||||
const code = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_MFA,
|
||||
email
|
||||
});
|
||||
|
||||
// send MFA code [code] to [email]
|
||||
await sendMail({
|
||||
template: 'emailMfa.handlebars',
|
||||
subjectLine: 'Infisical MFA code',
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
code
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to send MFA code'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully sent new MFA code'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify MFA token [mfaToken] and issue JWT and refresh tokens if the
|
||||
* MFA token [mfaToken] is valid
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const verifyMfaToken = async (req: Request, res: Response) => {
|
||||
const { email, mfaToken } = req.body;
|
||||
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_MFA,
|
||||
email,
|
||||
token: mfaToken
|
||||
});
|
||||
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +encryptionVersion +protectedKey +protectedKeyIV +protectedKeyTag +publicKey +encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
await checkUserDevice({
|
||||
user,
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'] ?? ''
|
||||
});
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueAuthTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
interface VerifyMfaTokenRes {
|
||||
encryptionVersion: number;
|
||||
protectedKey?: string;
|
||||
protectedKeyIV?: string;
|
||||
protectedKeyTag?: string;
|
||||
token: string;
|
||||
publicKey: string;
|
||||
encryptedPrivateKey: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}
|
||||
|
||||
const resObj: VerifyMfaTokenRes = {
|
||||
encryptionVersion: user.encryptionVersion,
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey as string,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey as string,
|
||||
iv: user.iv as string,
|
||||
tag: user.tag as string
|
||||
}
|
||||
|
||||
if (user?.protectedKey && user?.protectedKeyIV && user?.protectedKeyTag) {
|
||||
resObj.protectedKey = user.protectedKey;
|
||||
resObj.protectedKeyIV = user.protectedKeyIV;
|
||||
resObj.protectedKeyTag = user.protectedKeyTag;
|
||||
}
|
||||
|
||||
const loginAction = await EELogService.createAction({
|
||||
name: ACTION_LOGIN,
|
||||
userId: user._id
|
||||
});
|
||||
|
||||
loginAction && await EELogService.createLog({
|
||||
userId: user._id,
|
||||
actions: [loginAction],
|
||||
channel: getChannelFromUserAgent(req.headers['user-agent']),
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
return res.status(200).send(resObj);
|
||||
}
|
||||
|
@ -246,13 +246,14 @@ export const getAllAccessibleEnvironmentsOfWorkspace = async (
|
||||
relatedWorkspace.environments.forEach(environment => {
|
||||
const isReadBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_READ })
|
||||
const isWriteBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_WRITE })
|
||||
if (isReadBlocked) {
|
||||
if (isReadBlocked && isWriteBlocked) {
|
||||
return
|
||||
} else {
|
||||
accessibleEnvironments.push({
|
||||
name: environment.name,
|
||||
slug: environment.slug,
|
||||
isWriteDenied: isWriteBlocked
|
||||
isWriteDenied: isWriteBlocked,
|
||||
isReadDenied: isReadBlocked
|
||||
})
|
||||
}
|
||||
})
|
||||
|
@ -1,3 +1,5 @@
|
||||
import * as authController from './authController';
|
||||
import * as signupController from './signupController';
|
||||
import * as usersController from './usersController';
|
||||
import * as organizationsController from './organizationsController';
|
||||
import * as workspaceController from './workspaceController';
|
||||
@ -9,6 +11,8 @@ import * as environmentController from './environmentController';
|
||||
import * as tagController from './tagController';
|
||||
|
||||
export {
|
||||
authController,
|
||||
signupController,
|
||||
usersController,
|
||||
organizationsController,
|
||||
workspaceController,
|
||||
|
@ -1,7 +1,8 @@
|
||||
import to from 'await-to-js';
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response } from 'express';
|
||||
import { ISecret, Membership, Secret, Workspace } from '../../models';
|
||||
import { ISecret, Secret } from '../../models';
|
||||
import { IAction } from '../../ee/models';
|
||||
import {
|
||||
SECRET_PERSONAL,
|
||||
SECRET_SHARED,
|
||||
@ -17,7 +18,255 @@ import { EESecretService, EELogService } from '../../ee/services';
|
||||
import { postHogClient } from '../../services';
|
||||
import { getChannelFromUserAgent } from '../../utils/posthog';
|
||||
import { ABILITY_READ, ABILITY_WRITE } from '../../variables/organization';
|
||||
import { userHasWorkspaceAccess } from '../../ee/helpers/checkMembershipPermissions';
|
||||
import { userHasNoAbility, userHasWorkspaceAccess, userHasWriteOnlyAbility } from '../../ee/helpers/checkMembershipPermissions';
|
||||
import Tag from '../../models/tag';
|
||||
import _ from 'lodash';
|
||||
import {
|
||||
BatchSecretRequest,
|
||||
BatchSecret
|
||||
} from '../../types/secret';
|
||||
|
||||
/**
|
||||
* Peform a batch of any specified CUD secret operations
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const batchSecrets = async (req: Request, res: Response) => {
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent']);
|
||||
const {
|
||||
workspaceId,
|
||||
environment,
|
||||
requests
|
||||
}: {
|
||||
workspaceId: string;
|
||||
environment: string;
|
||||
requests: BatchSecretRequest[];
|
||||
}= req.body;
|
||||
|
||||
const createSecrets: BatchSecret[] = [];
|
||||
const updateSecrets: BatchSecret[] = [];
|
||||
const deleteSecrets: Types.ObjectId[] = [];
|
||||
const actions: IAction[] = [];
|
||||
|
||||
requests.forEach((request) => {
|
||||
switch (request.method) {
|
||||
case 'POST':
|
||||
createSecrets.push({
|
||||
...request.secret,
|
||||
version: 1,
|
||||
user: request.secret.type === SECRET_PERSONAL ? req.user : undefined,
|
||||
environment,
|
||||
workspace: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
break;
|
||||
case 'PATCH':
|
||||
updateSecrets.push({
|
||||
...request.secret,
|
||||
_id: new Types.ObjectId(request.secret._id)
|
||||
});
|
||||
break;
|
||||
case 'DELETE':
|
||||
deleteSecrets.push(new Types.ObjectId(request.secret._id));
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
// handle create secrets
|
||||
let createdSecrets: ISecret[] = [];
|
||||
if (createSecrets.length > 0) {
|
||||
createdSecrets = await Secret.insertMany(createSecrets);
|
||||
// (EE) add secret versions for new secrets
|
||||
await EESecretService.addSecretVersions({
|
||||
secretVersions: createdSecrets.map((n: any) => {
|
||||
return ({
|
||||
...n._doc,
|
||||
_id: new Types.ObjectId(),
|
||||
secret: n._id,
|
||||
isDeleted: false
|
||||
});
|
||||
})
|
||||
});
|
||||
|
||||
const addAction = await EELogService.createAction({
|
||||
name: ACTION_ADD_SECRETS,
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: createdSecrets.map((n) => n._id)
|
||||
}) as IAction;
|
||||
actions.push(addAction);
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets added',
|
||||
distinctId: req.user.email,
|
||||
properties: {
|
||||
numberOfSecrets: createdSecrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel,
|
||||
userAgent: req.headers?.['user-agent']
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// handle update secrets
|
||||
let updatedSecrets: ISecret[] = [];
|
||||
if (updateSecrets.length > 0 && req.secrets) {
|
||||
// construct object containing all secrets
|
||||
let listedSecretsObj: {
|
||||
[key: string]: {
|
||||
version: number;
|
||||
type: string;
|
||||
}
|
||||
} = {};
|
||||
|
||||
listedSecretsObj = req.secrets.reduce((obj: any, secret: ISecret) => ({
|
||||
...obj,
|
||||
[secret._id.toString()]: secret
|
||||
}), {});
|
||||
|
||||
const updateOperations = updateSecrets.map((u) => ({
|
||||
updateOne: {
|
||||
filter: { _id: new Types.ObjectId(u._id) },
|
||||
update: {
|
||||
$inc: {
|
||||
version: 1
|
||||
},
|
||||
...u,
|
||||
_id: new Types.ObjectId(u._id)
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
await Secret.bulkWrite(updateOperations);
|
||||
|
||||
const secretVersions = updateSecrets.map((u) => ({
|
||||
secret: new Types.ObjectId(u._id),
|
||||
version: listedSecretsObj[u._id.toString()].version,
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
type: listedSecretsObj[u._id.toString()].type,
|
||||
environment,
|
||||
isDeleted: false,
|
||||
secretKeyCiphertext: u.secretKeyCiphertext,
|
||||
secretKeyIV: u.secretKeyIV,
|
||||
secretKeyTag: u.secretKeyTag,
|
||||
secretValueCiphertext: u.secretValueCiphertext,
|
||||
secretValueIV: u.secretValueIV,
|
||||
secretValueTag: u.secretValueTag,
|
||||
secretCommentCiphertext: u.secretCommentCiphertext,
|
||||
secretCommentIV: u.secretCommentIV,
|
||||
secretCommentTag: u.secretCommentTag,
|
||||
tags: u.tags
|
||||
}));
|
||||
|
||||
await EESecretService.addSecretVersions({
|
||||
secretVersions
|
||||
});
|
||||
|
||||
updatedSecrets = await Secret.find({
|
||||
_id: {
|
||||
$in: updateSecrets.map((u) => new Types.ObjectId(u._id))
|
||||
}
|
||||
});
|
||||
|
||||
const updateAction = await EELogService.createAction({
|
||||
name: ACTION_UPDATE_SECRETS,
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: updatedSecrets.map((u) => u._id)
|
||||
}) as IAction;
|
||||
actions.push(updateAction);
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets modified',
|
||||
distinctId: req.user.email,
|
||||
properties: {
|
||||
numberOfSecrets: updateSecrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel,
|
||||
userAgent: req.headers?.['user-agent']
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// handle delete secrets
|
||||
if (deleteSecrets.length > 0) {
|
||||
await Secret.deleteMany({
|
||||
_id: {
|
||||
$in: deleteSecrets
|
||||
}
|
||||
});
|
||||
|
||||
await EESecretService.markDeletedSecretVersions({
|
||||
secretIds: deleteSecrets
|
||||
});
|
||||
|
||||
const deleteAction = await EELogService.createAction({
|
||||
name: ACTION_DELETE_SECRETS,
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: deleteSecrets
|
||||
}) as IAction;
|
||||
actions.push(deleteAction);
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets deleted',
|
||||
distinctId: req.user.email,
|
||||
properties: {
|
||||
numberOfSecrets: deleteSecrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel: channel,
|
||||
userAgent: req.headers?.['user-agent']
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (actions.length > 0) {
|
||||
// (EE) create (audit) log
|
||||
await EELogService.createLog({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
actions,
|
||||
channel,
|
||||
ipAddress: req.ip
|
||||
});
|
||||
}
|
||||
|
||||
// // trigger event - push secrets
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId
|
||||
})
|
||||
});
|
||||
|
||||
// (EE) take a secret snapshot
|
||||
await EESecretService.takeSecretSnapshot({
|
||||
workspaceId
|
||||
});
|
||||
|
||||
const resObj: { [key: string]: ISecret[] | string[] } = {}
|
||||
|
||||
if (createSecrets.length > 0) {
|
||||
resObj['createdSecrets'] = createdSecrets;
|
||||
}
|
||||
|
||||
if (updateSecrets.length > 0) {
|
||||
resObj['updatedSecrets'] = updatedSecrets;
|
||||
}
|
||||
|
||||
if (deleteSecrets.length > 0) {
|
||||
resObj['deletedSecrets'] = deleteSecrets.map((d) => d.toString());
|
||||
}
|
||||
|
||||
return res.status(200).send(resObj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create secret(s) for workspace with id [workspaceId] and environment [environment]
|
||||
@ -164,11 +413,9 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
@ -185,11 +432,9 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
@ -284,8 +529,10 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
}
|
||||
*/
|
||||
const { workspaceId, environment } = req.query;
|
||||
|
||||
|
||||
const { workspaceId, environment, tagSlugs } = req.query;
|
||||
const tagNamesList = typeof tagSlugs === 'string' && tagSlugs !== '' ? tagSlugs.split(',') : [];
|
||||
let userId = "" // used for getting personal secrets for user
|
||||
let userEmail = "" // used for posthog
|
||||
if (req.user) {
|
||||
@ -298,16 +545,38 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
userEmail = req.serviceTokenData.user.email;
|
||||
}
|
||||
|
||||
// none service token case as service tokens are already scoped
|
||||
// none service token case as service tokens are already scoped to env and project
|
||||
let hasWriteOnlyAccess
|
||||
if (!req.serviceTokenData) {
|
||||
const hasAccess = await userHasWorkspaceAccess(userId, workspaceId, environment, ABILITY_READ)
|
||||
if (!hasAccess) {
|
||||
hasWriteOnlyAccess = await userHasWriteOnlyAbility(userId, workspaceId, environment)
|
||||
const hasNoAccess = await userHasNoAbility(userId, workspaceId, environment)
|
||||
if (hasNoAccess) {
|
||||
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
|
||||
}
|
||||
}
|
||||
let secrets: any
|
||||
let secretQuery: any
|
||||
|
||||
const [err, secrets] = await to(Secret.find(
|
||||
{
|
||||
if (tagNamesList != undefined && tagNamesList.length != 0) {
|
||||
const workspaceFromDB = await Tag.find({ workspace: workspaceId })
|
||||
|
||||
const tagIds = _.map(tagNamesList, (tagName) => {
|
||||
const tag = _.find(workspaceFromDB, { slug: tagName });
|
||||
return tag ? tag.id : null;
|
||||
});
|
||||
|
||||
secretQuery = {
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
{ user: userId },
|
||||
{ user: { $exists: false } }
|
||||
],
|
||||
tags: { $in: tagIds },
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
} else {
|
||||
secretQuery = {
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
@ -316,9 +585,13 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
],
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
).populate("tags").then())
|
||||
}
|
||||
|
||||
if (err) throw ValidationError({ message: 'Failed to get secrets', stack: err.stack });
|
||||
if (hasWriteOnlyAccess) {
|
||||
secrets = await Secret.find(secretQuery).select("secretKeyCiphertext secretKeyIV secretKeyTag")
|
||||
} else {
|
||||
secrets = await Secret.find(secretQuery).populate("tags")
|
||||
}
|
||||
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent'])
|
||||
|
||||
@ -356,6 +629,59 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
export const getOnlySecretKeys = async (req: Request, res: Response) => {
|
||||
const { workspaceId, environment } = req.query;
|
||||
|
||||
let userId = "" // used for getting personal secrets for user
|
||||
let userEmail = "" // used for posthog
|
||||
if (req.user) {
|
||||
userId = req.user._id;
|
||||
userEmail = req.user.email;
|
||||
}
|
||||
|
||||
if (req.serviceTokenData) {
|
||||
userId = req.serviceTokenData.user._id
|
||||
userEmail = req.serviceTokenData.user.email;
|
||||
}
|
||||
|
||||
// none service token case as service tokens are already scoped
|
||||
if (!req.serviceTokenData) {
|
||||
const hasAccess = await userHasWorkspaceAccess(userId, workspaceId, environment, ABILITY_READ)
|
||||
if (!hasAccess) {
|
||||
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
|
||||
}
|
||||
}
|
||||
|
||||
const [err, secretKeys] = await to(Secret.find(
|
||||
{
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
{ user: userId },
|
||||
{ user: { $exists: false } }
|
||||
],
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
)
|
||||
.select("secretKeyIV secretKeyTag secretKeyCiphertext")
|
||||
.then())
|
||||
|
||||
if (err) throw ValidationError({ message: 'Failed to get secrets', stack: err.stack });
|
||||
|
||||
// readAction && await EELogService.createLog({
|
||||
// userId: new Types.ObjectId(userId),
|
||||
// workspaceId: new Types.ObjectId(workspaceId as string),
|
||||
// actions: [readAction],
|
||||
// channel,
|
||||
// ipAddress: req.ip
|
||||
// });
|
||||
|
||||
return res.status(200).send({
|
||||
secretKeys
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update secret(s)
|
||||
* @param req
|
||||
|
250
backend/src/controllers/v2/signupController.ts
Normal file
@ -0,0 +1,250 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { User, MembershipOrg } from '../../models';
|
||||
import { completeAccount } from '../../helpers/user';
|
||||
import {
|
||||
initializeDefaultOrg
|
||||
} from '../../helpers/signup';
|
||||
import { issueAuthTokens } from '../../helpers/auth';
|
||||
import { INVITED, ACCEPTED } from '../../variables';
|
||||
import { NODE_ENV } from '../../config';
|
||||
import axios from 'axios';
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
* signup flow
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier,
|
||||
organizationName
|
||||
}: {
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
protectedKey: string;
|
||||
protectedKeyIV: string;
|
||||
protectedKeyTag: string;
|
||||
publicKey: string;
|
||||
encryptedPrivateKey: string;
|
||||
encryptedPrivateKeyIV: string;
|
||||
encryptedPrivateKeyTag: string;
|
||||
salt: string;
|
||||
verifier: string;
|
||||
organizationName: string;
|
||||
} = req.body;
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed to complete account for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
// complete setting up user's account
|
||||
user = await completeAccount({
|
||||
userId: user._id.toString(),
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user'); // ensure user is non-null
|
||||
|
||||
// initialize default organization and workspace
|
||||
await initializeDefaultOrg({
|
||||
organizationName,
|
||||
user
|
||||
});
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
},
|
||||
{
|
||||
user,
|
||||
status: ACCEPTED
|
||||
}
|
||||
);
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueAuthTokens({
|
||||
userId: user._id.toString()
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
|
||||
// sending a welcome email to new users
|
||||
if (process.env.LOOPS_API_KEY) {
|
||||
await axios.post("https://app.loops.so/api/v1/events/send", {
|
||||
"email": email,
|
||||
"eventName": "Sign Up",
|
||||
"firstName": firstName,
|
||||
"lastName": lastName
|
||||
}, {
|
||||
headers: {
|
||||
"Accept": "application/json",
|
||||
"Authorization": "Bearer " + process.env.LOOPS_API_KEY
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to complete account setup'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully set up account',
|
||||
user,
|
||||
token
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
* invite flow
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
} = req.body;
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
return res.status(403).send({
|
||||
error: 'Failed to complete account for complete user'
|
||||
});
|
||||
}
|
||||
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
});
|
||||
|
||||
if (!membershipOrg) throw new Error('Failed to find invitations for email');
|
||||
|
||||
// complete setting up user's account
|
||||
user = await completeAccount({
|
||||
userId: user._id.toString(),
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new Error('Failed to complete account for non-existent user');
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
await MembershipOrg.updateMany(
|
||||
{
|
||||
inviteEmail: email,
|
||||
status: INVITED
|
||||
},
|
||||
{
|
||||
user,
|
||||
status: ACCEPTED
|
||||
}
|
||||
);
|
||||
|
||||
// issue tokens
|
||||
const tokens = await issueAuthTokens({
|
||||
userId: user._id.toString()
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to complete account setup'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully set up account',
|
||||
user,
|
||||
token
|
||||
});
|
||||
};
|
@ -2,7 +2,7 @@ import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
Membership,
|
||||
Membership, Secret,
|
||||
} from '../../models';
|
||||
import Tag, { ITag } from '../../models/tag';
|
||||
import { Builder } from "builder-pattern"
|
||||
@ -54,6 +54,12 @@ export const deleteWorkspaceTag = async (req: Request, res: Response) => {
|
||||
|
||||
const result = await Tag.findByIdAndDelete(tagId);
|
||||
|
||||
// remove the tag from secrets
|
||||
await Secret.updateMany(
|
||||
{ tags: { $in: [tagId] } },
|
||||
{ $pull: { tags: tagId } }
|
||||
);
|
||||
|
||||
res.json(result);
|
||||
}
|
||||
|
||||
|
@ -55,6 +55,44 @@ export const getMe = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the current user's MFA-enabled status [isMfaEnabled].
|
||||
* Note: Infisical currently only supports email-based 2FA only; this will expand to
|
||||
* include SMS and authenticator app modes of authentication in the future.
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateMyMfaEnabled = async (req: Request, res: Response) => {
|
||||
let user;
|
||||
try {
|
||||
const { isMfaEnabled }: { isMfaEnabled: boolean } = req.body;
|
||||
req.user.isMfaEnabled = isMfaEnabled;
|
||||
|
||||
if (isMfaEnabled) {
|
||||
// TODO: adapt this route/controller
|
||||
// to work for different forms of MFA
|
||||
req.user.mfaMethods = ['email'];
|
||||
} else {
|
||||
req.user.mfaMethods = [];
|
||||
}
|
||||
|
||||
await req.user.save();
|
||||
|
||||
user = req.user;
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to update current user's MFA status"
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
user
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return organizations that the current user is part of.
|
||||
* @param req
|
||||
|
@ -158,11 +158,9 @@ export const rollbackSecretVersion = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash
|
||||
} = oldSecretVersion;
|
||||
|
||||
// update secret
|
||||
@ -179,11 +177,9 @@ export const rollbackSecretVersion = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash
|
||||
},
|
||||
{
|
||||
new: true
|
||||
@ -204,11 +200,9 @@ export const rollbackSecretVersion = async (req: Request, res: Response) => {
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretKeyHash,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash
|
||||
secretValueTag
|
||||
}).save();
|
||||
|
||||
// take secret snapshot
|
||||
|
@ -1,5 +1,6 @@
|
||||
import _ from "lodash";
|
||||
import { Membership } from "../../models";
|
||||
import { ABILITY_READ, ABILITY_WRITE } from "../../variables/organization";
|
||||
|
||||
export const userHasWorkspaceAccess = async (userId: any, workspaceId: any, environment: any, action: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
@ -15,4 +16,39 @@ export const userHasWorkspaceAccess = async (userId: any, workspaceId: any, envi
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
export const userHasWriteOnlyAbility = async (userId: any, workspaceId: any, environment: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
if (!membershipForWorkspace) {
|
||||
return false
|
||||
}
|
||||
|
||||
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
|
||||
const isWriteDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_WRITE });
|
||||
const isReadDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_READ });
|
||||
|
||||
// case: you have write only if read is blocked and write is not
|
||||
if (isReadDisallowed && !isWriteDisallowed) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export const userHasNoAbility = async (userId: any, workspaceId: any, environment: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
if (!membershipForWorkspace) {
|
||||
return true
|
||||
}
|
||||
|
||||
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
|
||||
const isWriteDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_WRITE });
|
||||
const isReadBlocked = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_READ });
|
||||
|
||||
if (isReadBlocked && isWriteDisallowed) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
@ -5,22 +5,19 @@ import {
|
||||
} from '../../variables';
|
||||
|
||||
export interface ISecretVersion {
|
||||
_id: Types.ObjectId;
|
||||
secret: Types.ObjectId;
|
||||
version: number;
|
||||
workspace: Types.ObjectId; // new
|
||||
type: string; // new
|
||||
user: Types.ObjectId; // new
|
||||
user?: Types.ObjectId; // new
|
||||
environment: string; // new
|
||||
isDeleted: boolean;
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretKeyHash: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretValueHash: string;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
@ -72,9 +69,6 @@ const secretVersionSchema = new Schema<ISecretVersion>(
|
||||
type: String, // symmetric
|
||||
required: true
|
||||
},
|
||||
secretKeyHash: {
|
||||
type: String
|
||||
},
|
||||
secretValueCiphertext: {
|
||||
type: String,
|
||||
required: true
|
||||
@ -87,9 +81,6 @@ const secretVersionSchema = new Schema<ISecretVersion>(
|
||||
type: String, // symmetric
|
||||
required: true
|
||||
},
|
||||
secretValueHash: {
|
||||
type: String
|
||||
},
|
||||
tags: {
|
||||
ref: 'Tag',
|
||||
type: [Schema.Types.ObjectId],
|
||||
|
@ -211,7 +211,7 @@ const getAuthAPIKeyPayload = async ({
|
||||
* @return {String} obj.token - issued JWT token
|
||||
* @return {String} obj.refreshToken - issued refresh token
|
||||
*/
|
||||
const issueTokens = async ({ userId }: { userId: string }) => {
|
||||
const issueAuthTokens = async ({ userId }: { userId: string }) => {
|
||||
let token: string;
|
||||
let refreshToken: string;
|
||||
try {
|
||||
@ -298,6 +298,6 @@ export {
|
||||
getAuthSTDPayload,
|
||||
getAuthAPIKeyPayload,
|
||||
createToken,
|
||||
issueTokens,
|
||||
issueAuthTokens,
|
||||
clearTokens
|
||||
};
|
||||
|
@ -1,12 +1,11 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
import { Token, IToken, IUser } from '../models';
|
||||
import { IUser } from '../models';
|
||||
import { createOrganization } from './organization';
|
||||
import { addMembershipsOrg } from './membershipOrg';
|
||||
import { createWorkspace } from './workspace';
|
||||
import { addMemberships } from './membership';
|
||||
import { OWNER, ADMIN, ACCEPTED } from '../variables';
|
||||
import { OWNER, ACCEPTED } from '../variables';
|
||||
import { sendMail } from '../helpers/nodemailer';
|
||||
import { TokenService } from '../services';
|
||||
import { TOKEN_EMAIL_CONFIRMATION } from '../variables';
|
||||
|
||||
/**
|
||||
* Send magic link to verify email to [email]
|
||||
@ -14,21 +13,13 @@ import { sendMail } from '../helpers/nodemailer';
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.email - email
|
||||
* @returns {Boolean} success - whether or not operation was successful
|
||||
*
|
||||
*/
|
||||
const sendEmailVerification = async ({ email }: { email: string }) => {
|
||||
try {
|
||||
const token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
{ email },
|
||||
{
|
||||
email,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
const token = await TokenService.createToken({
|
||||
type: TOKEN_EMAIL_CONFIRMATION,
|
||||
email
|
||||
});
|
||||
|
||||
// send mail
|
||||
await sendMail({
|
||||
@ -62,12 +53,11 @@ const checkEmailVerification = async ({
|
||||
code: string;
|
||||
}) => {
|
||||
try {
|
||||
const token = await Token.findOneAndDelete({
|
||||
await TokenService.validateToken({
|
||||
type: TOKEN_EMAIL_CONFIRMATION,
|
||||
email,
|
||||
token: code
|
||||
});
|
||||
|
||||
if (!token) throw new Error('Failed to find email verification token');
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
@ -103,18 +93,6 @@ const initializeDefaultOrg = async ({
|
||||
roles: [OWNER],
|
||||
statuses: [ACCEPTED]
|
||||
});
|
||||
|
||||
// initialize a default workspace inside the new organization
|
||||
const workspace = await createWorkspace({
|
||||
name: `Example Project`,
|
||||
organizationId: organization._id.toString()
|
||||
});
|
||||
|
||||
await addMemberships({
|
||||
userIds: [user._id.toString()],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN]
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to initialize default organization and workspace [err=${err}]`);
|
||||
}
|
||||
|
217
backend/src/helpers/token.ts
Normal file
@ -0,0 +1,217 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { TokenData } from '../models';
|
||||
import crypto from 'crypto';
|
||||
import bcrypt from 'bcrypt';
|
||||
import {
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
} from '../variables';
|
||||
import {
|
||||
SALT_ROUNDS
|
||||
} from '../config';
|
||||
import { UnauthorizedRequestError } from '../utils/errors';
|
||||
|
||||
/**
|
||||
* Create and store a token in the database for purpose [type]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.type
|
||||
* @param {String} obj.email
|
||||
* @param {String} obj.phoneNumber
|
||||
* @param {Types.ObjectId} obj.organizationId
|
||||
* @returns {String} token - the created token
|
||||
*/
|
||||
const createTokenHelper = async ({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId
|
||||
}) => {
|
||||
let token, expiresAt, triesLeft;
|
||||
try {
|
||||
// generate random token based on specified token use-case
|
||||
// type [type]
|
||||
switch (type) {
|
||||
case TOKEN_EMAIL_CONFIRMATION:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
expiresAt = new Date((new Date()).getTime() + 86400000);
|
||||
break;
|
||||
case TOKEN_EMAIL_MFA:
|
||||
// generate random 6-digit code
|
||||
token = String(crypto.randomInt(Math.pow(10, 5), Math.pow(10, 6) - 1));
|
||||
triesLeft = 5;
|
||||
expiresAt = new Date((new Date()).getTime() + 300000);
|
||||
break;
|
||||
case TOKEN_EMAIL_ORG_INVITATION:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date((new Date()).getTime() + 259200000);
|
||||
break;
|
||||
case TOKEN_EMAIL_PASSWORD_RESET:
|
||||
// generate random hex
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date((new Date()).getTime() + 86400000);
|
||||
break;
|
||||
default:
|
||||
token = crypto.randomBytes(16).toString('hex');
|
||||
expiresAt = new Date();
|
||||
break;
|
||||
}
|
||||
|
||||
interface TokenDataQuery {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
interface TokenDataUpdate {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
tokenHash: string;
|
||||
triesLeft?: number;
|
||||
expiresAt: Date;
|
||||
}
|
||||
|
||||
const query: TokenDataQuery = { type };
|
||||
const update: TokenDataUpdate = {
|
||||
type,
|
||||
tokenHash: await bcrypt.hash(token, SALT_ROUNDS),
|
||||
expiresAt
|
||||
}
|
||||
|
||||
if (email) {
|
||||
query.email = email;
|
||||
update.email = email;
|
||||
}
|
||||
if (phoneNumber) {
|
||||
query.phoneNumber = phoneNumber;
|
||||
update.phoneNumber = phoneNumber;
|
||||
}
|
||||
if (organizationId) {
|
||||
query.organization = organizationId
|
||||
update.organization = organizationId
|
||||
}
|
||||
|
||||
if (triesLeft) {
|
||||
update.triesLeft = triesLeft;
|
||||
}
|
||||
|
||||
await TokenData.findOneAndUpdate(
|
||||
query,
|
||||
update,
|
||||
{
|
||||
new: true,
|
||||
upsert: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(
|
||||
"Failed to create token"
|
||||
);
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.email - email associated with the token
|
||||
* @param {String} obj.token - value of the token
|
||||
*/
|
||||
const validateTokenHelper = async ({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
token: string;
|
||||
}) => {
|
||||
interface Query {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
}
|
||||
|
||||
const query: Query = { type };
|
||||
|
||||
if (email) { query.email = email; }
|
||||
if (phoneNumber) { query.phoneNumber = phoneNumber; }
|
||||
if (organizationId) { query.organization = organizationId; }
|
||||
|
||||
const tokenData = await TokenData.findOne(query).select('+tokenHash');
|
||||
|
||||
if (!tokenData) throw new Error('Failed to find token to validate');
|
||||
|
||||
if (tokenData.expiresAt < new Date()) {
|
||||
// case: token expired
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA session expired. Please log in again',
|
||||
context: {
|
||||
code: 'mfa_expired'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const isValid = await bcrypt.compare(token, tokenData.tokenHash);
|
||||
if (!isValid) {
|
||||
// case: token is not valid
|
||||
if (tokenData?.triesLeft !== undefined) {
|
||||
// case: token has a try-limit
|
||||
if (tokenData.triesLeft === 1) {
|
||||
// case: token is out of tries
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
} else {
|
||||
// case: token has more than 1 try left
|
||||
await TokenData.findByIdAndUpdate(tokenData._id, {
|
||||
triesLeft: tokenData.triesLeft - 1
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA code is invalid',
|
||||
context: {
|
||||
code: 'mfa_invalid',
|
||||
triesLeft: tokenData.triesLeft - 1
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
throw UnauthorizedRequestError({
|
||||
message: 'MFA code is invalid',
|
||||
context: {
|
||||
code: 'mfa_invalid'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// case: token is valid
|
||||
await TokenData.findByIdAndDelete(tokenData._id);
|
||||
}
|
||||
|
||||
export {
|
||||
createTokenHelper,
|
||||
validateTokenHelper
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { User, IUser } from '../models';
|
||||
import { IUser, User } from '../models';
|
||||
import { sendMail } from './nodemailer';
|
||||
|
||||
/**
|
||||
* Initialize a user under email [email]
|
||||
@ -28,10 +29,14 @@ const setupAccount = async ({ email }: { email: string }) => {
|
||||
* @param {String} obj.userId - id of user to finish setting up
|
||||
* @param {String} obj.firstName - first name of user
|
||||
* @param {String} obj.lastName - last name of user
|
||||
* @param {Number} obj.encryptionVersion - version of auth encryption scheme used
|
||||
* @param {String} obj.protectedKey - protected key in encryption version 2
|
||||
* @param {String} obj.protectedKeyIV - IV of protected key in encryption version 2
|
||||
* @param {String} obj.protectedKeyTag - tag of protected key in encryption version 2
|
||||
* @param {String} obj.publicKey - publickey of user
|
||||
* @param {String} obj.encryptedPrivateKey - (encrypted) private key of user
|
||||
* @param {String} obj.iv - iv for (encrypted) private key of user
|
||||
* @param {String} obj.tag - tag for (encrypted) private key of user
|
||||
* @param {String} obj.encryptedPrivateKeyIV - iv for (encrypted) private key of user
|
||||
* @param {String} obj.encryptedPrivateKeyTag - tag for (encrypted) private key of user
|
||||
* @param {String} obj.salt - salt for auth SRP
|
||||
* @param {String} obj.verifier - verifier for auth SRP
|
||||
* @returns {Object} user - the completed user
|
||||
@ -40,20 +45,28 @@ const completeAccount = async ({
|
||||
userId,
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
}: {
|
||||
userId: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
encryptionVersion: number;
|
||||
protectedKey: string;
|
||||
protectedKeyIV: string;
|
||||
protectedKeyTag: string;
|
||||
publicKey: string;
|
||||
encryptedPrivateKey: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
encryptedPrivateKeyIV: string;
|
||||
encryptedPrivateKeyTag: string;
|
||||
salt: string;
|
||||
verifier: string;
|
||||
}) => {
|
||||
@ -67,10 +80,14 @@ const completeAccount = async ({
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
encryptionVersion,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
@ -85,4 +102,48 @@ const completeAccount = async ({
|
||||
return user;
|
||||
};
|
||||
|
||||
export { setupAccount, completeAccount };
|
||||
/**
|
||||
* Check if device with ip [ip] and user-agent [userAgent] has been seen for user [user].
|
||||
* If the device is unseen, then notify the user of the new device
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.ip - login ip address
|
||||
* @param {String} obj.userAgent - login user-agent
|
||||
*/
|
||||
const checkUserDevice = async ({
|
||||
user,
|
||||
ip,
|
||||
userAgent
|
||||
}: {
|
||||
user: IUser;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
}) => {
|
||||
const isDeviceSeen = user.devices.some((device) => device.ip === ip && device.userAgent === userAgent);
|
||||
|
||||
if (!isDeviceSeen) {
|
||||
// case: unseen login ip detected for user
|
||||
// -> notify user about the sign-in from new ip
|
||||
|
||||
user.devices = user.devices.concat([{
|
||||
ip: String(ip),
|
||||
userAgent
|
||||
}]);
|
||||
|
||||
await user.save();
|
||||
|
||||
// send MFA code [code] to [email]
|
||||
await sendMail({
|
||||
template: 'newDevice.handlebars',
|
||||
subjectLine: `Successful login from new device`,
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
email: user.email,
|
||||
timestamp: new Date().toString(),
|
||||
ip,
|
||||
userAgent
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export { setupAccount, completeAccount, checkUserDevice };
|
||||
|
@ -1,7 +1,7 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { IIntegrationAuth } from '../models';
|
||||
import axios from "axios";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
@ -12,12 +12,14 @@ import {
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL
|
||||
} from '../variables';
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
} from "../variables";
|
||||
|
||||
/**
|
||||
* Return list of names of apps for integration named [integration]
|
||||
@ -29,7 +31,7 @@ import {
|
||||
*/
|
||||
const getApps = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
accessToken,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
@ -54,40 +56,45 @@ const getApps = async ({
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
apps = await getAppsHeroku({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
apps = await getAppsVercel({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
apps = await getAppsNetlify({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
apps = await getAppsGithub({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RENDER:
|
||||
apps = await getAppsRender({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_FLYIO:
|
||||
apps = await getAppsFlyio({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CIRCLECI:
|
||||
apps = await getAppsCircleCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get integration apps');
|
||||
throw new Error("Failed to get integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -106,19 +113,19 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: 'application/vnd.heroku+json; version=3',
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Heroku integration apps');
|
||||
throw new Error("Failed to get Heroku integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -131,10 +138,10 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {Object[]} apps - names of Vercel apps
|
||||
* @returns {String} apps.name - name of Vercel app
|
||||
*/
|
||||
const getAppsVercel = async ({
|
||||
const getAppsVercel = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
}: {
|
||||
accessToken,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
@ -146,21 +153,23 @@ const getAppsVercel = async ({
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
},
|
||||
...( integrationAuth?.teamId ? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId
|
||||
}
|
||||
} : {})
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.projects.map((a: any) => ({
|
||||
name: a.name
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Vercel integration apps');
|
||||
throw new Error("Failed to get Vercel integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -173,11 +182,7 @@ const getAppsVercel = async ({
|
||||
* @returns {Object[]} apps - names of Netlify sites
|
||||
* @returns {String} apps.name - name of Netlify site
|
||||
*/
|
||||
const getAppsNetlify = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
@ -191,12 +196,12 @@ const getAppsNetlify = async ({
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
appId: a.site_id
|
||||
appId: a.site_id,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Netlify integration apps');
|
||||
throw new Error("Failed to get Netlify integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -209,35 +214,32 @@ const getAppsNetlify = async ({
|
||||
* @returns {Object[]} apps - names of Netlify sites
|
||||
* @returns {String} apps.name - name of Netlify site
|
||||
*/
|
||||
const getAppsGithub = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const octokit = new Octokit({
|
||||
auth: accessToken
|
||||
auth: accessToken,
|
||||
});
|
||||
|
||||
const repos = (await octokit.request(
|
||||
'GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}',
|
||||
{
|
||||
per_page: 100
|
||||
}
|
||||
)).data;
|
||||
const repos = (
|
||||
await octokit.request(
|
||||
"GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}",
|
||||
{
|
||||
per_page: 100,
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
apps = repos
|
||||
.filter((a:any) => a.permissions.admin === true)
|
||||
.filter((a: any) => a.permissions.admin === true)
|
||||
.map((a: any) => ({
|
||||
name: a.name,
|
||||
owner: a.owner.login
|
||||
})
|
||||
);
|
||||
name: a.name,
|
||||
owner: a.owner.login,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Github repos');
|
||||
throw new Error("Failed to get Github repos");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -251,11 +253,7 @@ const getAppsGithub = async ({
|
||||
* @returns {String} apps.name - name of Render service
|
||||
* @returns {String} apps.appId - id of Render service
|
||||
*/
|
||||
const getAppsRender = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
@ -263,8 +261,8 @@ const getAppsRender = async ({
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
@ -277,11 +275,11 @@ const getAppsRender = async ({
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Render services');
|
||||
throw new Error("Failed to get Render services");
|
||||
}
|
||||
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of apps for Fly.io integration
|
||||
@ -290,11 +288,7 @@ const getAppsRender = async ({
|
||||
* @returns {Object[]} apps - names and ids of Fly.io apps
|
||||
* @returns {String} apps.name - name of Fly.io apps
|
||||
*/
|
||||
const getAppsFlyio = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const query = `
|
||||
@ -308,34 +302,71 @@ const getAppsFlyio = async ({
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const res = (await axios({
|
||||
url: INTEGRATION_FLYIO_API_URL,
|
||||
method: 'post',
|
||||
headers: {
|
||||
'Authorization': 'Bearer ' + accessToken,
|
||||
|
||||
const res = (
|
||||
await axios({
|
||||
url: INTEGRATION_FLYIO_API_URL,
|
||||
method: "post",
|
||||
headers: {
|
||||
Authorization: "Bearer " + accessToken,
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
},
|
||||
data: {
|
||||
query,
|
||||
variables: {
|
||||
role: null
|
||||
}
|
||||
}
|
||||
})).data.data.apps.nodes;
|
||||
|
||||
apps = res
|
||||
.map((a: any) => ({
|
||||
name: a.name
|
||||
}));
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
data: {
|
||||
query,
|
||||
variables: {
|
||||
role: null,
|
||||
},
|
||||
},
|
||||
})
|
||||
).data.data.apps.nodes;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Fly.io apps');
|
||||
throw new Error("Failed to get Fly.io apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of projects for CircleCI integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for CircleCI API
|
||||
* @returns {Object[]} apps -
|
||||
* @returns {String} apps.name - name of CircleCI apps
|
||||
*/
|
||||
const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await axios.get(
|
||||
`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data
|
||||
|
||||
apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.reponame
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get CircleCI projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
export { getApps };
|
||||
|
@ -138,7 +138,7 @@ const exchangeCodeAzure = async ({
|
||||
try {
|
||||
res = (await axios.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
scope: 'https://vault.azure.net/.default openid offline_access',
|
||||
@ -147,16 +147,16 @@ const exchangeCodeAzure = async ({
|
||||
redirect_uri: `${SITE_URL}/integrations/azure-key-vault/oauth2/callback`
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err: any) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Azure');
|
||||
}
|
||||
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
@ -175,36 +175,36 @@ const exchangeCodeAzure = async ({
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeHeroku = async ({
|
||||
code
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
code: string;
|
||||
}) => {
|
||||
let res: ExchangeCodeHerokuResponse;
|
||||
const accessExpiresAt = new Date();
|
||||
try {
|
||||
res = (await axios.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_secret: CLIENT_SECRET_HEROKU
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Heroku');
|
||||
}
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
let res: ExchangeCodeHerokuResponse;
|
||||
const accessExpiresAt = new Date();
|
||||
try {
|
||||
res = (await axios.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_secret: CLIENT_SECRET_HEROKU
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Heroku');
|
||||
}
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@ -234,7 +234,7 @@ const exchangeCodeVercel = async ({ code }: { code: string }) => {
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Vercel');
|
||||
throw new Error(`Failed OAuth2 code-token exchange with Vercel [err=${err}]`);
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -1,4 +1,5 @@
|
||||
import requireAuth from './requireAuth';
|
||||
import requireMfaAuth from './requireMfaAuth';
|
||||
import requireBotAuth from './requireBotAuth';
|
||||
import requireSignupAuth from './requireSignupAuth';
|
||||
import requireWorkspaceAuth from './requireWorkspaceAuth';
|
||||
@ -15,6 +16,7 @@ import validateRequest from './validateRequest';
|
||||
|
||||
export {
|
||||
requireAuth,
|
||||
requireMfaAuth,
|
||||
requireBotAuth,
|
||||
requireSignupAuth,
|
||||
requireWorkspaceAuth,
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { ErrorRequestHandler } from "express";
|
||||
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { InternalServerError } from "../utils/errors";
|
||||
import { InternalServerError, UnauthorizedRequestError } from "../utils/errors";
|
||||
import { getLogger } from "../utils/logger";
|
||||
import RequestError, { LogLevel } from "../utils/requestError";
|
||||
import { NODE_ENV } from "../config";
|
||||
|
||||
import { TokenExpiredError } from 'jsonwebtoken';
|
||||
|
||||
export const requestErrorHandler: ErrorRequestHandler = (error: RequestError | Error, req, res, next) => {
|
||||
if (res.headersSent) return next();
|
||||
|
43
backend/src/middleware/requireMfaAuth.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { User } from '../models';
|
||||
import { JWT_MFA_SECRET } from '../config';
|
||||
import { BadRequestError, UnauthorizedRequestError } from '../utils/errors';
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate if (MFA) JWT temporary token on request is valid (e.g. not expired)
|
||||
* and if there is an associated user.
|
||||
*/
|
||||
const requireMfaAuth = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
// JWT (temporary) authentication middleware for complete signup
|
||||
const [ AUTH_TOKEN_TYPE, AUTH_TOKEN_VALUE ] = <[string, string]>req.headers['authorization']?.split(' ', 2) ?? [null, null]
|
||||
if(AUTH_TOKEN_TYPE === null) return next(BadRequestError({message: `Missing Authorization Header in the request header.`}))
|
||||
if(AUTH_TOKEN_TYPE.toLowerCase() !== 'bearer') return next(BadRequestError({message: `The provided authentication type '${AUTH_TOKEN_TYPE}' is not supported.`}))
|
||||
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(AUTH_TOKEN_VALUE, JWT_MFA_SECRET)
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
_id: decodedToken.userId
|
||||
}).select('+publicKey');
|
||||
|
||||
if (!user)
|
||||
return next(UnauthorizedRequestError({message: 'Unable to authenticate for User account completion. Try logging in again'}))
|
||||
|
||||
req.user = user;
|
||||
return next();
|
||||
};
|
||||
|
||||
export default requireMfaAuth;
|
@ -10,7 +10,7 @@ import MembershipOrg, { IMembershipOrg } from './membershipOrg';
|
||||
import Organization, { IOrganization } from './organization';
|
||||
import Secret, { ISecret } from './secret';
|
||||
import ServiceToken, { IServiceToken } from './serviceToken';
|
||||
import Token, { IToken } from './token';
|
||||
import TokenData, { ITokenData } from './tokenData';
|
||||
import User, { IUser } from './user';
|
||||
import UserAction, { IUserAction } from './userAction';
|
||||
import Workspace, { IWorkspace } from './workspace';
|
||||
@ -43,8 +43,8 @@ export {
|
||||
ISecret,
|
||||
ServiceToken,
|
||||
IServiceToken,
|
||||
Token,
|
||||
IToken,
|
||||
TokenData,
|
||||
ITokenData,
|
||||
User,
|
||||
IUser,
|
||||
UserAction,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { Schema, model, Types } from "mongoose";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
@ -8,8 +8,9 @@ import {
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
} from '../variables';
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
} from "../variables";
|
||||
|
||||
export interface IIntegration {
|
||||
_id: Types.ObjectId;
|
||||
@ -31,44 +32,47 @@ export interface IIntegration {
|
||||
| 'netlify'
|
||||
| 'github'
|
||||
| 'render'
|
||||
| 'flyio';
|
||||
| 'flyio'
|
||||
| 'circleci';
|
||||
integrationAuth: Types.ObjectId;
|
||||
}
|
||||
|
||||
const integrationSchema = new Schema<IIntegration>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true,
|
||||
},
|
||||
environment: {
|
||||
type: String,
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
app: {
|
||||
// name of app in provider
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
appId: { // (new)
|
||||
appId: {
|
||||
// (new)
|
||||
// id of app in provider
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
targetEnvironment: { // (new)
|
||||
// target environment
|
||||
targetEnvironment: {
|
||||
// (new)
|
||||
// target environment
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
owner: {
|
||||
// github-specific repo owner-login
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
path: {
|
||||
// aws-parameter-store-specific path
|
||||
@ -91,21 +95,22 @@ const integrationSchema = new Schema<IIntegration>(
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
],
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
integrationAuth: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'IntegrationAuth',
|
||||
required: true
|
||||
}
|
||||
ref: "IntegrationAuth",
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const Integration = model<IIntegration>('Integration', integrationSchema);
|
||||
const Integration = model<IIntegration>("Integration", integrationSchema);
|
||||
|
||||
export default Integration;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { Schema, model, Types } from "mongoose";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
@ -8,24 +8,16 @@ import {
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
} from '../variables';
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
} from "../variables";
|
||||
|
||||
export interface IIntegrationAuth {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
integration:
|
||||
| 'azure-key-vault'
|
||||
| 'aws-parameter-store'
|
||||
| 'aws-secret-manager'
|
||||
| 'heroku'
|
||||
| 'vercel'
|
||||
| 'netlify'
|
||||
| 'github'
|
||||
| 'render'
|
||||
| 'flyio';
|
||||
teamId: string; // TODO: deprecate (vercel) -> move to accessId
|
||||
accountId: string; // TODO: deprecate (netlify) -> move to accessId
|
||||
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio' | 'azure-key-vault' | 'circleci' | 'aws-parameter-store' | 'aws-secret-manager';
|
||||
teamId: string;
|
||||
accountId: string;
|
||||
refreshCiphertext?: string;
|
||||
refreshIV?: string;
|
||||
refreshTag?: string;
|
||||
@ -41,9 +33,9 @@ export interface IIntegrationAuth {
|
||||
const integrationAuthSchema = new Schema<IIntegrationAuth>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true,
|
||||
},
|
||||
integration: {
|
||||
type: String,
|
||||
@ -56,29 +48,30 @@ const integrationAuthSchema = new Schema<IIntegrationAuth>(
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
],
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
teamId: {
|
||||
// vercel-specific integration param
|
||||
type: String
|
||||
type: String,
|
||||
},
|
||||
accountId: {
|
||||
// netlify-specific integration param
|
||||
type: String
|
||||
type: String,
|
||||
},
|
||||
refreshCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
refreshIV: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
refreshTag: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessIdCiphertext: {
|
||||
type: String,
|
||||
@ -94,28 +87,28 @@ const integrationAuthSchema = new Schema<IIntegrationAuth>(
|
||||
},
|
||||
accessCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessIV: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessTag: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessExpiresAt: {
|
||||
type: Date,
|
||||
select: false
|
||||
}
|
||||
select: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const IntegrationAuth = model<IIntegrationAuth>(
|
||||
'IntegrationAuth',
|
||||
"IntegrationAuth",
|
||||
integrationAuthSchema
|
||||
);
|
||||
|
||||
|
@ -109,6 +109,9 @@ const secretSchema = new Schema<ISecret>(
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
secretSchema.index({ tags: 1 }, { background: true })
|
||||
|
||||
const Secret = model<ISecret>('Secret', secretSchema);
|
||||
|
||||
export default Secret;
|
||||
|
83
backend/src/models/secretApprovalRequest.ts
Normal file
@ -0,0 +1,83 @@
|
||||
import mongoose, { Schema, model } from 'mongoose';
|
||||
import Secret, { ISecret } from './secret';
|
||||
|
||||
interface ISecretApprovalRequest {
|
||||
secret: mongoose.Types.ObjectId;
|
||||
requestedChanges: ISecret;
|
||||
requestedBy: mongoose.Types.ObjectId;
|
||||
approvers: IApprover[];
|
||||
status: ApprovalStatus;
|
||||
timestamp: Date;
|
||||
requestType: RequestType;
|
||||
requestId: string;
|
||||
}
|
||||
|
||||
interface IApprover {
|
||||
userId: mongoose.Types.ObjectId;
|
||||
status: ApprovalStatus;
|
||||
}
|
||||
|
||||
export enum ApprovalStatus {
|
||||
PENDING = 'pending',
|
||||
APPROVED = 'approved',
|
||||
REJECTED = 'rejected'
|
||||
}
|
||||
|
||||
export enum RequestType {
|
||||
UPDATE = 'update',
|
||||
DELETE = 'delete',
|
||||
CREATE = 'create'
|
||||
}
|
||||
|
||||
const approverSchema = new mongoose.Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true
|
||||
},
|
||||
status: {
|
||||
type: String,
|
||||
enum: [ApprovalStatus],
|
||||
default: ApprovalStatus.PENDING
|
||||
}
|
||||
});
|
||||
|
||||
const secretApprovalRequestSchema = new Schema<ISecretApprovalRequest>(
|
||||
{
|
||||
secret: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'Secret'
|
||||
},
|
||||
requestedChanges: Secret,
|
||||
requestedBy: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
},
|
||||
approvers: [approverSchema],
|
||||
status: {
|
||||
type: String,
|
||||
enum: ApprovalStatus,
|
||||
default: ApprovalStatus.PENDING
|
||||
},
|
||||
timestamp: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
requestType: {
|
||||
type: String,
|
||||
enum: RequestType,
|
||||
required: true
|
||||
},
|
||||
requestId: {
|
||||
type: String,
|
||||
required: false
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
const SecretApprovalRequest = model<ISecretApprovalRequest>('SecretApprovalRequest', secretApprovalRequestSchema);
|
||||
|
||||
export default SecretApprovalRequest;
|
@ -5,6 +5,7 @@ export interface IToken {
|
||||
email: string;
|
||||
token: string;
|
||||
createdAt: Date;
|
||||
ttl: number;
|
||||
}
|
||||
|
||||
const tokenSchema = new Schema<IToken>({
|
||||
@ -19,14 +20,13 @@ const tokenSchema = new Schema<IToken>({
|
||||
createdAt: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
ttl: {
|
||||
type: Number,
|
||||
}
|
||||
});
|
||||
|
||||
tokenSchema.index({
|
||||
createdAt: 1
|
||||
}, {
|
||||
expireAfterSeconds: parseInt(EMAIL_TOKEN_LIFETIME)
|
||||
});
|
||||
tokenSchema.index({ email: 1 });
|
||||
|
||||
const Token = model<IToken>('Token', tokenSchema);
|
||||
|
||||
|
61
backend/src/models/tokenData.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { Schema, Types, model } from 'mongoose';
|
||||
|
||||
export interface ITokenData {
|
||||
type: string;
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organization?: Types.ObjectId;
|
||||
tokenHash: string;
|
||||
triesLeft?: number;
|
||||
expiresAt: Date;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
const tokenDataSchema = new Schema<ITokenData>({
|
||||
type: {
|
||||
type: String,
|
||||
enum: [
|
||||
'emailConfirmation',
|
||||
'emailMfa',
|
||||
'organizationInvitation',
|
||||
'passwordReset'
|
||||
],
|
||||
required: true
|
||||
},
|
||||
email: {
|
||||
type: String
|
||||
},
|
||||
phoneNumber: {
|
||||
type: String
|
||||
},
|
||||
organization: { // organizationInvitation-specific field
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Organization'
|
||||
},
|
||||
tokenHash: {
|
||||
type: String,
|
||||
select: false,
|
||||
required: true
|
||||
},
|
||||
triesLeft: {
|
||||
type: Number
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
expires: 0,
|
||||
required: true
|
||||
}
|
||||
}, {
|
||||
timestamps: true
|
||||
});
|
||||
|
||||
tokenDataSchema.index({
|
||||
expiresAt: 1
|
||||
}, {
|
||||
expireAfterSeconds: 0
|
||||
});
|
||||
|
||||
const TokenData = model<ITokenData>('TokenData', tokenDataSchema);
|
||||
|
||||
export default TokenData;
|
@ -1,10 +1,14 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { Schema, model, Types, Document } from 'mongoose';
|
||||
|
||||
export interface IUser {
|
||||
export interface IUser extends Document {
|
||||
_id: Types.ObjectId;
|
||||
email: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
encryptionVersion: number;
|
||||
protectedKey: string;
|
||||
protectedKeyIV: string;
|
||||
protectedKeyTag: string;
|
||||
publicKey?: string;
|
||||
encryptedPrivateKey?: string;
|
||||
iv?: string;
|
||||
@ -12,7 +16,12 @@ export interface IUser {
|
||||
salt?: string;
|
||||
verifier?: string;
|
||||
refreshVersion?: number;
|
||||
seenIps: [string];
|
||||
isMfaEnabled: boolean;
|
||||
mfaMethods: boolean;
|
||||
devices: {
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
const userSchema = new Schema<IUser>(
|
||||
@ -27,6 +36,23 @@ const userSchema = new Schema<IUser>(
|
||||
lastName: {
|
||||
type: String
|
||||
},
|
||||
encryptionVersion: {
|
||||
type: Number,
|
||||
select: false,
|
||||
default: 1 // to resolve backward-compatibility issues
|
||||
},
|
||||
protectedKey: { // introduced as part of encryption version 2
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
protectedKeyIV: { // introduced as part of encryption version 2
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
protectedKeyTag: { // introduced as part of encryption version 2
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
publicKey: {
|
||||
type: String,
|
||||
select: false
|
||||
@ -35,11 +61,11 @@ const userSchema = new Schema<IUser>(
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
iv: {
|
||||
iv: { // iv of [encryptedPrivateKey]
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
tag: {
|
||||
tag: { // tag of [encryptedPrivateKey]
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
@ -56,8 +82,21 @@ const userSchema = new Schema<IUser>(
|
||||
default: 0,
|
||||
select: false
|
||||
},
|
||||
seenIps: [String]
|
||||
},
|
||||
isMfaEnabled: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
mfaMethods: [{
|
||||
type: String
|
||||
}],
|
||||
devices: {
|
||||
type: [{
|
||||
ip: String,
|
||||
userAgent: String
|
||||
}],
|
||||
default: []
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ import { authLimiter } from '../../helpers/rateLimiter';
|
||||
|
||||
router.post('/token', validateRequest, authController.getNewToken);
|
||||
|
||||
router.post(
|
||||
router.post( // deprecated (moved to api/v2/auth/login1)
|
||||
'/login1',
|
||||
authLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
@ -16,7 +16,7 @@ router.post(
|
||||
authController.login1
|
||||
);
|
||||
|
||||
router.post(
|
||||
router.post( // deprecated (moved to api/v2/auth/login2)
|
||||
'/login2',
|
||||
authLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
|
@ -31,7 +31,7 @@ router.patch(
|
||||
requireBotAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
}),
|
||||
body('isActive').isBoolean(),
|
||||
body('isActive').exists().isBoolean(),
|
||||
body('botKey'),
|
||||
validateRequest,
|
||||
botController.setBotActiveState
|
||||
|
@ -156,4 +156,19 @@ router.get(
|
||||
organizationController.getOrganizationSubscriptions
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:organizationId/workspace-memberships',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED]
|
||||
}),
|
||||
param('organizationId').exists().trim(),
|
||||
validateRequest,
|
||||
organizationController.getOrganizationMembersAndTheirWorkspaces
|
||||
);
|
||||
|
||||
|
||||
export default router;
|
||||
|
@ -10,7 +10,7 @@ router.post(
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
body('clientPublicKey').exists().trim().notEmpty(),
|
||||
body('clientPublicKey').exists().isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
passwordController.srp1
|
||||
);
|
||||
@ -22,11 +22,14 @@ router.post(
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
body('clientProof').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty().notEmpty(), // private key encrypted under new pwd
|
||||
body('iv').exists().trim().notEmpty(), // new iv for private key
|
||||
body('tag').exists().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().trim().notEmpty(), // part of new pwd
|
||||
body('protectedKey').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(), // private key encrypted under new pwd
|
||||
body('encryptedPrivateKeyIV').exists().isString().trim().notEmpty(), // new iv for private key
|
||||
body('encryptedPrivateKeyTag').exists().isString().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().isString().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().isString().trim().notEmpty(), // part of new pwd
|
||||
validateRequest,
|
||||
passwordController.changePassword
|
||||
);
|
||||
@ -34,7 +37,7 @@ router.post(
|
||||
router.post(
|
||||
'/email/password-reset',
|
||||
passwordLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
body('email').exists().isString().trim().notEmpty().isEmail(),
|
||||
validateRequest,
|
||||
passwordController.emailPasswordReset
|
||||
);
|
||||
@ -42,8 +45,8 @@ router.post(
|
||||
router.post(
|
||||
'/email/password-reset-verify',
|
||||
passwordLimiter,
|
||||
body('email').exists().trim().notEmpty().isEmail(),
|
||||
body('code').exists().trim().notEmpty(),
|
||||
body('email').exists().isString().trim().notEmpty().isEmail(),
|
||||
body('code').exists().isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
passwordController.emailPasswordResetVerify
|
||||
);
|
||||
@ -61,12 +64,12 @@ router.post(
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
body('clientProof').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(), // (backup) private key encrypted under a strong key
|
||||
body('iv').exists().trim().notEmpty(), // new iv for (backup) private key
|
||||
body('tag').exists().trim().notEmpty(), // new tag for (backup) private key
|
||||
body('salt').exists().trim().notEmpty(), // salt generated from strong key
|
||||
body('verifier').exists().trim().notEmpty(), // salt generated from strong key
|
||||
body('clientProof').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(), // (backup) private key encrypted under a strong key
|
||||
body('iv').exists().isString().trim().notEmpty(), // new iv for (backup) private key
|
||||
body('tag').exists().isString().trim().notEmpty(), // new tag for (backup) private key
|
||||
body('salt').exists().isString().trim().notEmpty(), // salt generated from strong key
|
||||
body('verifier').exists().isString().trim().notEmpty(), // salt generated from strong key
|
||||
validateRequest,
|
||||
passwordController.createBackupPrivateKey
|
||||
);
|
||||
@ -74,11 +77,14 @@ router.post(
|
||||
router.post(
|
||||
'/password-reset',
|
||||
requireSignupAuth,
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(), // private key encrypted under new pwd
|
||||
body('iv').exists().trim().notEmpty(), // new iv for private key
|
||||
body('tag').exists().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().trim().notEmpty(), // part of new pwd
|
||||
body('protectedKey').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(), // private key encrypted under new pwd
|
||||
body('encryptedPrivateKeyIV').exists().isString().trim().notEmpty(), // new iv for private key
|
||||
body('encryptedPrivateKeyTag').exists().isString().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().isString().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().isString().trim().notEmpty(), // part of new pwd
|
||||
validateRequest,
|
||||
passwordController.resetPassword
|
||||
);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import { body } from 'express-validator';
|
||||
import { requireSignupAuth, validateRequest } from '../../middleware';
|
||||
import { validateRequest } from '../../middleware';
|
||||
import { signupController } from '../../controllers/v1';
|
||||
import { authLimiter } from '../../helpers/rateLimiter';
|
||||
|
||||
@ -22,39 +22,4 @@ router.post(
|
||||
signupController.verifyEmailSignup
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/complete-account/signup',
|
||||
authLimiter,
|
||||
requireSignupAuth,
|
||||
body('email').exists().trim().notEmpty().isEmail(),
|
||||
body('firstName').exists().trim().notEmpty(),
|
||||
body('lastName').exists().trim().notEmpty(),
|
||||
body('publicKey').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(),
|
||||
body('iv').exists().trim().notEmpty(),
|
||||
body('tag').exists().trim().notEmpty(),
|
||||
body('salt').exists().trim().notEmpty(),
|
||||
body('verifier').exists().trim().notEmpty(),
|
||||
body('organizationName').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
signupController.completeAccountSignup
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/complete-account/invite',
|
||||
authLimiter,
|
||||
requireSignupAuth,
|
||||
body('email').exists().trim().notEmpty().isEmail(),
|
||||
body('firstName').exists().trim().notEmpty(),
|
||||
body('lastName').exists().trim().notEmpty(),
|
||||
body('publicKey').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(),
|
||||
body('iv').exists().trim().notEmpty(),
|
||||
body('tag').exists().trim().notEmpty(),
|
||||
body('salt').exists().trim().notEmpty(),
|
||||
body('verifier').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
signupController.completeAccountInvite
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
44
backend/src/routes/v2/auth.ts
Normal file
@ -0,0 +1,44 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import { body } from 'express-validator';
|
||||
import { requireMfaAuth, validateRequest } from '../../middleware';
|
||||
import { authController } from '../../controllers/v2';
|
||||
import { authLimiter } from '../../helpers/rateLimiter';
|
||||
|
||||
router.post(
|
||||
'/login1',
|
||||
authLimiter,
|
||||
body('email').isString().trim().notEmpty(),
|
||||
body('clientPublicKey').isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.login1
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/login2',
|
||||
authLimiter,
|
||||
body('email').isString().trim().notEmpty(),
|
||||
body('clientProof').isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.login2
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/mfa/send',
|
||||
authLimiter,
|
||||
body('email').isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.sendMfaToken
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/mfa/verify',
|
||||
authLimiter,
|
||||
requireMfaAuth,
|
||||
body('email').isString().trim().notEmpty(),
|
||||
body('mfaToken').isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.verifyMfaToken
|
||||
);
|
||||
|
||||
export default router;
|
@ -1,3 +1,5 @@
|
||||
import auth from './auth';
|
||||
import signup from './signup';
|
||||
import users from './users';
|
||||
import organizations from './organizations';
|
||||
import workspace from './workspace';
|
||||
@ -9,6 +11,8 @@ import environment from "./environment"
|
||||
import tags from "./tags"
|
||||
|
||||
export {
|
||||
auth,
|
||||
signup,
|
||||
users,
|
||||
organizations,
|
||||
workspace,
|
||||
|
@ -6,14 +6,52 @@ import {
|
||||
requireSecretsAuth,
|
||||
validateRequest
|
||||
} from '../../middleware';
|
||||
import { query, check, body } from 'express-validator';
|
||||
import { query, body } from 'express-validator';
|
||||
import { secretsController } from '../../controllers/v2';
|
||||
import { validateSecrets } from '../../helpers/secret';
|
||||
import {
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
SECRET_PERSONAL,
|
||||
SECRET_SHARED
|
||||
} from '../../variables';
|
||||
import {
|
||||
BatchSecretRequest
|
||||
} from '../../types/secret';
|
||||
|
||||
router.post(
|
||||
'/batch',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey']
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
location: 'body'
|
||||
}),
|
||||
body('workspaceId').exists().isString().trim(),
|
||||
body('environment').exists().isString().trim(),
|
||||
body('requests')
|
||||
.exists()
|
||||
.custom(async (requests: BatchSecretRequest[], { req }) => {
|
||||
if (Array.isArray(requests)) {
|
||||
const secretIds = requests
|
||||
.map((request) => request.secret._id)
|
||||
.filter((secretId) => secretId !== undefined)
|
||||
|
||||
if (secretIds.length > 0) {
|
||||
const relevantSecrets = await validateSecrets({
|
||||
userId: req.user._id.toString(),
|
||||
secretIds
|
||||
});
|
||||
|
||||
req.secrets = relevantSecrets;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
validateRequest,
|
||||
secretsController.batchSecrets
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
@ -74,6 +112,7 @@ router.get(
|
||||
'/',
|
||||
query('workspaceId').exists().trim(),
|
||||
query('environment').exists().trim(),
|
||||
query('tagSlugs'),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken']
|
||||
|
49
backend/src/routes/v2/signup.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import { body } from 'express-validator';
|
||||
import { requireSignupAuth, validateRequest } from '../../middleware';
|
||||
import { signupController } from '../../controllers/v2';
|
||||
import { authLimiter } from '../../helpers/rateLimiter';
|
||||
|
||||
router.post(
|
||||
'/complete-account/signup',
|
||||
authLimiter,
|
||||
requireSignupAuth,
|
||||
body('email').exists().isString().trim().notEmpty().isEmail(),
|
||||
body('firstName').exists().isString().trim().notEmpty(),
|
||||
body('lastName').exists().isString().trim().notEmpty(),
|
||||
body('protectedKey').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('publicKey').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('salt').exists().isString().trim().notEmpty(),
|
||||
body('verifier').exists().isString().trim().notEmpty(),
|
||||
body('organizationName').exists().isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
signupController.completeAccountSignup
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/complete-account/invite',
|
||||
authLimiter,
|
||||
requireSignupAuth,
|
||||
body('email').exists().isString().trim().notEmpty().isEmail(),
|
||||
body('firstName').exists().isString().trim().notEmpty(),
|
||||
body('lastName').exists().isString().trim().notEmpty(),
|
||||
body('protectedKey').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('protectedKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('publicKey').exists().trim().notEmpty(),
|
||||
body('encryptedPrivateKey').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKeyIV').exists().isString().trim().notEmpty(),
|
||||
body('encryptedPrivateKeyTag').exists().isString().trim().notEmpty(),
|
||||
body('salt').exists().isString().trim().notEmpty(),
|
||||
body('verifier').exists().isString().trim().notEmpty(),
|
||||
validateRequest,
|
||||
signupController.completeAccountInvite
|
||||
);
|
||||
|
||||
export default router;
|
@ -1,8 +1,10 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import {
|
||||
requireAuth
|
||||
requireAuth,
|
||||
validateRequest
|
||||
} from '../../middleware';
|
||||
import { body } from 'express-validator';
|
||||
import { usersController } from '../../controllers/v2';
|
||||
|
||||
router.get(
|
||||
@ -13,6 +15,16 @@ router.get(
|
||||
usersController.getMe
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/me/mfa',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey']
|
||||
}),
|
||||
body('isMfaEnabled').exists().isBoolean(),
|
||||
validateRequest,
|
||||
usersController.updateMyMfaEnabled
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/me/organizations',
|
||||
requireAuth({
|
||||
|
@ -118,7 +118,6 @@ router.delete( // TODO - rewire dashboard to this route
|
||||
workspaceController.deleteWorkspaceMembership
|
||||
);
|
||||
|
||||
|
||||
router.patch(
|
||||
'/:workspaceId/auto-capitalization',
|
||||
requireAuth({
|
||||
|
@ -1,5 +1,3 @@
|
||||
import { Bot, IBot } from '../models';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { handleEventHelper } from '../helpers/event';
|
||||
|
||||
interface Event {
|
||||
|
69
backend/src/services/TokenService.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import { Types } from 'mongoose';
|
||||
import { createTokenHelper, validateTokenHelper } from '../helpers/token';
|
||||
|
||||
/**
|
||||
* Class to handle token actions
|
||||
* TODO: elaborate more on this class
|
||||
*/
|
||||
class TokenService {
|
||||
/**
|
||||
* Create a token [token] for type [type] with associated details
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.type - type or context of token (e.g. emailConfirmation)
|
||||
* @param {String} obj.email - email associated with the token
|
||||
* @param {String} obj.phoneNumber - phone number associated with the token
|
||||
* @param {Types.ObjectId} obj.organizationId - id of organization associated with the token
|
||||
* @returns {String} token - the token to create
|
||||
*/
|
||||
static async createToken({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
}) {
|
||||
return await createTokenHelper({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate whether or not token [token] and its associated details match a token in the DB
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.type - type or context of token (e.g. emailConfirmation)
|
||||
* @param {String} obj.email - email associated with the token
|
||||
* @param {String} obj.phoneNumber - phone number associated with the token
|
||||
* @param {Types.ObjectId} obj.organizationId - id of organization associated with the token
|
||||
* @param {String} obj.token - the token to validate
|
||||
*/
|
||||
static async validateToken({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token
|
||||
}: {
|
||||
type: 'emailConfirmation' | 'emailMfa' | 'organizationInvitation' | 'passwordReset';
|
||||
email?: string;
|
||||
phoneNumber?: string;
|
||||
organizationId?: Types.ObjectId;
|
||||
token: string;
|
||||
}) {
|
||||
return await validateTokenHelper({
|
||||
type,
|
||||
email,
|
||||
phoneNumber,
|
||||
organizationId,
|
||||
token
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default TokenService;
|
@ -3,11 +3,13 @@ import postHogClient from './PostHogClient';
|
||||
import BotService from './BotService';
|
||||
import EventService from './EventService';
|
||||
import IntegrationService from './IntegrationService';
|
||||
import TokenService from './TokenService';
|
||||
|
||||
export {
|
||||
DatabaseService,
|
||||
postHogClient,
|
||||
BotService,
|
||||
EventService,
|
||||
IntegrationService
|
||||
IntegrationService,
|
||||
TokenService
|
||||
}
|
@ -1,6 +1,10 @@
|
||||
import nodemailer from 'nodemailer';
|
||||
import { SMTP_HOST, SMTP_PORT, SMTP_USERNAME, SMTP_PASSWORD, SMTP_SECURE } from '../config';
|
||||
import { SMTP_HOST_SENDGRID, SMTP_HOST_MAILGUN } from '../variables';
|
||||
import {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS
|
||||
} from '../variables';
|
||||
import SMTPConnection from 'nodemailer/lib/smtp-connection';
|
||||
import * as Sentry from '@sentry/node';
|
||||
|
||||
@ -27,6 +31,12 @@ if (SMTP_SECURE) {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
break;
|
||||
case SMTP_HOST_SOCKETLABS:
|
||||
mailOpts.requireTLS = true;
|
||||
mailOpts.tls = {
|
||||
ciphers: 'TLSv1.2'
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (SMTP_HOST.includes('amazonaws.com')) {
|
||||
mailOpts.tls = {
|
||||
|
19
backend/src/templates/emailMfa.handlebars
Normal file
@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>MFA Code</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Sign in attempt requires further verification</h2>
|
||||
<p>Your MFA code is below — enter it where you started signing in to Infisical.</p>
|
||||
<h2>{{code}}</h2>
|
||||
<p>The MFA code will be valid for 2 minutes.</p>
|
||||
<p>Not you? Contact Infisical or your administrator immediately.</p>
|
||||
</body>
|
||||
|
||||
</html>
|
@ -1,14 +1,17 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Email Verification</title>
|
||||
<title>Code</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Confirm your email address</h2>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.</p>
|
||||
<h1>{{code}}</h1>
|
||||
<p>Questions about setting up Infisical? Email us at support@infisical.com</p>
|
||||
</body>
|
||||
|
||||
</html>
|
19
backend/src/templates/newDevice.handlebars
Normal file
@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Successful login for {{email}} from new device</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<p>We're verifying a recent login for {{email}}:</p>
|
||||
<p><strong>Timestamp</strong>: {{timestamp}}</p>
|
||||
<p><strong>IP address</strong>: {{ip}}</p>
|
||||
<p><strong>User agent</strong>: {{userAgent}}</p>
|
||||
<p>If you believe that this login is suspicious, please contact Infisical or reset your password immediately.</p>
|
||||
</body>
|
||||
|
||||
</html>
|
@ -8,7 +8,7 @@
|
||||
</head>
|
||||
<body>
|
||||
<h2>Join your organization on Infisical</h2>
|
||||
<p>{{inviterFirstName}}({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
|
@ -7,7 +7,7 @@
|
||||
</head>
|
||||
<body>
|
||||
<h2>Join your team on Infisical</h2>
|
||||
<p>{{inviterFirstName}}({{inviterEmail}}) has invited you to their Infisical project — {{workspaceName}}</p>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical project — {{workspaceName}}</p>
|
||||
<a href="{{callback_url}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
|
38
backend/src/types/secret/index.d.ts
vendored
@ -1,5 +1,7 @@
|
||||
import { Types } from 'mongoose';
|
||||
import { Assign, Omit } from 'utility-types';
|
||||
import { ISecret } from '../../models';
|
||||
import { mongo } from 'mongoose';
|
||||
|
||||
// Everything is required, except the omitted types
|
||||
export type CreateSecretRequestBody = Omit<ISecret, "user" | "version" | "environment" | "workspace">;
|
||||
@ -12,3 +14,39 @@ export type SanitizedSecretModify = Partial<Omit<ISecret, "user" | "version" | "
|
||||
|
||||
// Everything is required, except the omitted types
|
||||
export type SanitizedSecretForCreate = Omit<ISecret, "version" | "_id">;
|
||||
|
||||
export interface BatchSecretRequest {
|
||||
id: string;
|
||||
method: 'POST' | 'PATCH' | 'DELETE';
|
||||
secret: Secret;
|
||||
}
|
||||
|
||||
export interface BatchSecret {
|
||||
_id: string;
|
||||
type: 'shared' | 'personal',
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretCommentCiphertext: string;
|
||||
secretCommentIV: string;
|
||||
secretCommentTag: string;
|
||||
tags: string[];
|
||||
}
|
||||
|
||||
export interface BatchSecret {
|
||||
_id: string;
|
||||
type: 'shared' | 'personal',
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretCommentCiphertext: string;
|
||||
secretCommentIV: string;
|
||||
secretCommentTag: string;
|
||||
tags: string[];
|
||||
}
|
@ -3,8 +3,8 @@ import {
|
||||
ENV_TESTING,
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
ENV_SET
|
||||
} from './environment';
|
||||
ENV_SET,
|
||||
} from "./environment";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
@ -15,6 +15,7 @@ import {
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
@ -27,17 +28,12 @@ import {
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_OPTIONS
|
||||
} from './integration';
|
||||
import {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
} from './organization';
|
||||
import { SECRET_SHARED, SECRET_PERSONAL } from './secret';
|
||||
import { EVENT_PUSH_SECRETS, EVENT_PULL_SECRETS } from './event';
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_OPTIONS,
|
||||
} from "./integration";
|
||||
import { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED } from "./organization";
|
||||
import { SECRET_SHARED, SECRET_PERSONAL } from "./secret";
|
||||
import { EVENT_PUSH_SECRETS, EVENT_PULL_SECRETS } from "./event";
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
@ -46,8 +42,21 @@ import {
|
||||
ACTION_DELETE_SECRETS,
|
||||
ACTION_READ_SECRETS
|
||||
} from './action';
|
||||
import { SMTP_HOST_SENDGRID, SMTP_HOST_MAILGUN } from './smtp';
|
||||
import {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS
|
||||
} from './smtp';
|
||||
import { PLAN_STARTER, PLAN_PRO } from './stripe';
|
||||
import {
|
||||
MFA_METHOD_EMAIL
|
||||
} from './user';
|
||||
import {
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
} from './token';
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
@ -71,6 +80,7 @@ export {
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
@ -83,6 +93,7 @@ export {
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
EVENT_PUSH_SECRETS,
|
||||
EVENT_PULL_SECRETS,
|
||||
ACTION_LOGIN,
|
||||
@ -94,6 +105,12 @@ export {
|
||||
INTEGRATION_OPTIONS,
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
PLAN_STARTER,
|
||||
PLAN_PRO,
|
||||
MFA_METHOD_EMAIL,
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
};
|
||||
|
@ -3,50 +3,53 @@ import {
|
||||
TENANT_ID_AZURE
|
||||
} from '../config';
|
||||
import {
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_ID_GITHUB,
|
||||
CLIENT_SLUG_VERCEL
|
||||
} from '../config';
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_ID_GITHUB,
|
||||
CLIENT_SLUG_VERCEL,
|
||||
} from "../config";
|
||||
|
||||
// integrations
|
||||
const INTEGRATION_AZURE_KEY_VAULT = 'azure-key-vault';
|
||||
const INTEGRATION_AWS_PARAMETER_STORE = 'aws-parameter-store';
|
||||
const INTEGRATION_AWS_SECRET_MANAGER = 'aws-secret-manager';
|
||||
const INTEGRATION_HEROKU = 'heroku';
|
||||
const INTEGRATION_VERCEL = 'vercel';
|
||||
const INTEGRATION_NETLIFY = 'netlify';
|
||||
const INTEGRATION_GITHUB = 'github';
|
||||
const INTEGRATION_RENDER = 'render';
|
||||
const INTEGRATION_FLYIO = 'flyio';
|
||||
const INTEGRATION_HEROKU = "heroku";
|
||||
const INTEGRATION_VERCEL = "vercel";
|
||||
const INTEGRATION_NETLIFY = "netlify";
|
||||
const INTEGRATION_GITHUB = "github";
|
||||
const INTEGRATION_RENDER = "render";
|
||||
const INTEGRATION_FLYIO = "flyio";
|
||||
const INTEGRATION_CIRCLECI = "circleci";
|
||||
const INTEGRATION_SET = new Set([
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
]);
|
||||
|
||||
// integration types
|
||||
const INTEGRATION_OAUTH2 = 'oauth2';
|
||||
const INTEGRATION_OAUTH2 = "oauth2";
|
||||
|
||||
// integration oauth endpoints
|
||||
const INTEGRATION_AZURE_TOKEN_URL = `https://login.microsoftonline.com/${TENANT_ID_AZURE}/oauth2/v2.0/token`;
|
||||
const INTEGRATION_HEROKU_TOKEN_URL = 'https://id.heroku.com/oauth/token';
|
||||
const INTEGRATION_VERCEL_TOKEN_URL =
|
||||
'https://api.vercel.com/v2/oauth/access_token';
|
||||
const INTEGRATION_NETLIFY_TOKEN_URL = 'https://api.netlify.com/oauth/token';
|
||||
"https://api.vercel.com/v2/oauth/access_token";
|
||||
const INTEGRATION_NETLIFY_TOKEN_URL = "https://api.netlify.com/oauth/token";
|
||||
const INTEGRATION_GITHUB_TOKEN_URL =
|
||||
'https://github.com/login/oauth/access_token';
|
||||
"https://github.com/login/oauth/access_token";
|
||||
|
||||
// integration apps endpoints
|
||||
const INTEGRATION_HEROKU_API_URL = 'https://api.heroku.com';
|
||||
const INTEGRATION_VERCEL_API_URL = 'https://api.vercel.com';
|
||||
const INTEGRATION_NETLIFY_API_URL = 'https://api.netlify.com';
|
||||
const INTEGRATION_RENDER_API_URL = 'https://api.render.com';
|
||||
const INTEGRATION_FLYIO_API_URL = 'https://api.fly.io/graphql';
|
||||
const INTEGRATION_HEROKU_API_URL = "https://api.heroku.com";
|
||||
const INTEGRATION_VERCEL_API_URL = "https://api.vercel.com";
|
||||
const INTEGRATION_NETLIFY_API_URL = "https://api.netlify.com";
|
||||
const INTEGRATION_RENDER_API_URL = "https://api.render.com";
|
||||
const INTEGRATION_FLYIO_API_URL = "https://api.fly.io/graphql";
|
||||
const INTEGRATION_CIRCLECI_API_URL = "https://circleci.com/api";
|
||||
|
||||
const INTEGRATION_OPTIONS = [
|
||||
{
|
||||
@ -122,6 +125,15 @@ const INTEGRATION_OPTIONS = [
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Circle CI',
|
||||
slug: 'circleci',
|
||||
image: 'Circle CI.png',
|
||||
isAvailable: true,
|
||||
type: 'pat',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Azure Key Vault',
|
||||
slug: 'azure-key-vault',
|
||||
@ -149,15 +161,6 @@ const INTEGRATION_OPTIONS = [
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Circle CI',
|
||||
slug: 'circleci',
|
||||
image: 'Circle CI.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
}
|
||||
]
|
||||
|
||||
@ -165,23 +168,25 @@ export {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_OPTIONS
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_OPTIONS,
|
||||
};
|
||||
|
@ -1,24 +1,16 @@
|
||||
// membership roles
|
||||
const OWNER = 'owner';
|
||||
const ADMIN = 'admin';
|
||||
const MEMBER = 'member';
|
||||
const OWNER = "owner";
|
||||
const ADMIN = "admin";
|
||||
const MEMBER = "member";
|
||||
|
||||
// membership statuses
|
||||
const INVITED = 'invited';
|
||||
const INVITED = "invited";
|
||||
|
||||
// membership permissions ability
|
||||
const ABILITY_READ = 'read';
|
||||
const ABILITY_WRITE = 'write';
|
||||
const ABILITY_READ = "read";
|
||||
const ABILITY_WRITE = "write";
|
||||
|
||||
// -- organization
|
||||
const ACCEPTED = 'accepted';
|
||||
const ACCEPTED = "accepted";
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
ABILITY_READ,
|
||||
ABILITY_WRITE
|
||||
}
|
||||
export { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED, ABILITY_READ, ABILITY_WRITE };
|
||||
|
@ -1,7 +1,9 @@
|
||||
const SMTP_HOST_SENDGRID = 'smtp.sendgrid.net';
|
||||
const SMTP_HOST_MAILGUN = 'smtp.mailgun.org';
|
||||
const SMTP_HOST_SOCKETLABS = 'smtp.socketlabs.com';
|
||||
|
||||
export {
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_MAILGUN
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_SOCKETLABS
|
||||
}
|
11
backend/src/variables/token.ts
Normal file
@ -0,0 +1,11 @@
|
||||
const TOKEN_EMAIL_CONFIRMATION = 'emailConfirmation';
|
||||
const TOKEN_EMAIL_MFA = 'emailMfa';
|
||||
const TOKEN_EMAIL_ORG_INVITATION = 'organizationInvitation';
|
||||
const TOKEN_EMAIL_PASSWORD_RESET = 'passwordReset';
|
||||
|
||||
export {
|
||||
TOKEN_EMAIL_CONFIRMATION,
|
||||
TOKEN_EMAIL_MFA,
|
||||
TOKEN_EMAIL_ORG_INVITATION,
|
||||
TOKEN_EMAIL_PASSWORD_RESET
|
||||
}
|
5
backend/src/variables/user.ts
Normal file
@ -0,0 +1,5 @@
|
||||
const MFA_METHOD_EMAIL = 'email';
|
||||
|
||||
export {
|
||||
MFA_METHOD_EMAIL
|
||||
}
|
4
cli/docker/Dockerfile
Normal file
@ -0,0 +1,4 @@
|
||||
FROM alpine
|
||||
RUN apk add --no-cache tini
|
||||
COPY infisical /bin/infisical
|
||||
ENTRYPOINT ["/sbin/tini", "--", "/bin/infisical"]
|
@ -7,8 +7,8 @@ require (
|
||||
github.com/muesli/mango-cobra v1.2.0
|
||||
github.com/muesli/roff v0.1.0
|
||||
github.com/spf13/cobra v1.6.1
|
||||
golang.org/x/crypto v0.3.0
|
||||
golang.org/x/term v0.3.0
|
||||
golang.org/x/crypto v0.6.0
|
||||
golang.org/x/term v0.5.0
|
||||
)
|
||||
|
||||
require (
|
||||
@ -31,8 +31,8 @@ require (
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.10.0 // indirect
|
||||
golang.org/x/net v0.2.0 // indirect
|
||||
golang.org/x/sys v0.3.0 // indirect
|
||||
golang.org/x/net v0.6.0 // indirect
|
||||
golang.org/x/sys v0.5.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
|
@ -106,10 +106,14 @@ go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAV
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A=
|
||||
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
|
||||
golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc=
|
||||
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||
golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.2.0 h1:sZfSu1wtKLGlWI4ZZayP0ck9Y73K1ynO6gqzTdBVdPU=
|
||||
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||
golang.org/x/net v0.6.0 h1:L4ZwwTvKW9gr0ZMS1yrHD9GZhIuVjOBBnaKH+SPQK0Q=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@ -123,9 +127,13 @@ golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
|
||||
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
|
||||
golang.org/x/term v0.5.0 h1:n2a8QNdAb0sZNpU9R1ALUXBbY+w51fCQDN+7EdxNBsY=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
|
@ -114,6 +114,7 @@ func CallGetSecretsV2(httpClient *resty.Client, request GetEncryptedSecretsV2Req
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetQueryParam("environment", request.Environment).
|
||||
SetQueryParam("workspaceId", request.WorkspaceId).
|
||||
SetQueryParam("tagSlugs", request.TagSlugs).
|
||||
Get(fmt.Sprintf("%v/v2/secrets", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
@ -127,6 +128,68 @@ func CallGetSecretsV2(httpClient *resty.Client, request GetEncryptedSecretsV2Req
|
||||
return secretsResponse, nil
|
||||
}
|
||||
|
||||
func CallLogin1V2(httpClient *resty.Client, request GetLoginOneV2Request) (GetLoginOneV2Response, error) {
|
||||
var loginOneV2Response GetLoginOneV2Response
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&loginOneV2Response).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetBody(request).
|
||||
Post(fmt.Sprintf("%v/v2/auth/login1", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V2: Unable to complete api request [err=%s]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V2: Unsuccessful response: [response=%s]", response)
|
||||
}
|
||||
|
||||
return loginOneV2Response, nil
|
||||
}
|
||||
|
||||
func CallVerifyMfaToken(httpClient *resty.Client, request VerifyMfaTokenRequest) (*VerifyMfaTokenResponse, *VerifyMfaTokenErrorResponse, error) {
|
||||
var verifyMfaTokenResponse VerifyMfaTokenResponse
|
||||
var responseError VerifyMfaTokenErrorResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&verifyMfaTokenResponse).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetError(&responseError).
|
||||
SetBody(request).
|
||||
Post(fmt.Sprintf("%v/v2/auth/mfa/verify", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("CallVerifyMfaToken: Unable to complete api request [err=%s]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return nil, &responseError, nil
|
||||
}
|
||||
|
||||
return &verifyMfaTokenResponse, nil, nil
|
||||
}
|
||||
|
||||
func CallLogin2V2(httpClient *resty.Client, request GetLoginTwoV2Request) (GetLoginTwoV2Response, error) {
|
||||
var loginTwoV2Response GetLoginTwoV2Response
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&loginTwoV2Response).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetBody(request).
|
||||
Post(fmt.Sprintf("%v/v2/auth/login2", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V2: Unable to complete api request [err=%s]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V2: Unsuccessful response: [response=%s]", response)
|
||||
}
|
||||
|
||||
return loginTwoV2Response, nil
|
||||
}
|
||||
|
||||
func CallGetAllWorkSpacesUserBelongsTo(httpClient *resty.Client) (GetWorkSpacesResponse, error) {
|
||||
var workSpacesResponse GetWorkSpacesResponse
|
||||
response, err := httpClient.
|
||||
@ -154,13 +217,12 @@ func CallIsAuthenticated(httpClient *resty.Client) bool {
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
Post(fmt.Sprintf("%v/v1/auth/checkAuth", config.INFISICAL_URL))
|
||||
|
||||
log.Debugln(fmt.Errorf("CallIsAuthenticated: Unsuccessful response: [response=%v]", response))
|
||||
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
log.Debugln(fmt.Errorf("CallIsAuthenticated: Unsuccessful response: [response=%v]", response))
|
||||
return false
|
||||
}
|
||||
|
||||
@ -175,8 +237,6 @@ func CallGetAccessibleEnvironments(httpClient *resty.Client, request GetAccessib
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
Get(fmt.Sprintf("%v/v2/workspace/%s/environments", config.INFISICAL_URL, request.WorkspaceId))
|
||||
|
||||
log.Debugln(fmt.Errorf("CallGetAccessibleEnvironments: Unsuccessful response: [response=%v]", response))
|
||||
|
||||
if err != nil {
|
||||
return GetAccessibleEnvironmentsResponse{}, err
|
||||
}
|
||||
|
@ -197,6 +197,7 @@ type GetSecretsByWorkspaceIdAndEnvironmentRequest struct {
|
||||
type GetEncryptedSecretsV2Request struct {
|
||||
Environment string `json:"environment"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
TagSlugs string `json:"tagSlugs"`
|
||||
}
|
||||
|
||||
type GetEncryptedSecretsV2Response struct {
|
||||
@ -262,3 +263,63 @@ type GetAccessibleEnvironmentsResponse struct {
|
||||
IsWriteDenied bool `json:"isWriteDenied"`
|
||||
} `json:"accessibleEnvironments"`
|
||||
}
|
||||
|
||||
type GetLoginOneV2Request struct {
|
||||
Email string `json:"email"`
|
||||
ClientPublicKey string `json:"clientPublicKey"`
|
||||
}
|
||||
|
||||
type GetLoginOneV2Response struct {
|
||||
ServerPublicKey string `json:"serverPublicKey"`
|
||||
Salt string `json:"salt"`
|
||||
}
|
||||
|
||||
type GetLoginTwoV2Request struct {
|
||||
Email string `json:"email"`
|
||||
ClientProof string `json:"clientProof"`
|
||||
}
|
||||
|
||||
type GetLoginTwoV2Response struct {
|
||||
MfaEnabled bool `json:"mfaEnabled"`
|
||||
EncryptionVersion int `json:"encryptionVersion"`
|
||||
Token string `json:"token"`
|
||||
PublicKey string `json:"publicKey"`
|
||||
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
|
||||
Iv string `json:"iv"`
|
||||
Tag string `json:"tag"`
|
||||
ProtectedKey string `json:"protectedKey"`
|
||||
ProtectedKeyIV string `json:"protectedKeyIV"`
|
||||
ProtectedKeyTag string `json:"protectedKeyTag"`
|
||||
}
|
||||
|
||||
type VerifyMfaTokenRequest struct {
|
||||
Email string `json:"email"`
|
||||
MFAToken string `json:"mfaToken"`
|
||||
}
|
||||
|
||||
type VerifyMfaTokenResponse struct {
|
||||
EncryptionVersion int `json:"encryptionVersion"`
|
||||
Token string `json:"token"`
|
||||
PublicKey string `json:"publicKey"`
|
||||
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
|
||||
Iv string `json:"iv"`
|
||||
Tag string `json:"tag"`
|
||||
ProtectedKey string `json:"protectedKey"`
|
||||
ProtectedKeyIV string `json:"protectedKeyIV"`
|
||||
ProtectedKeyTag string `json:"protectedKeyTag"`
|
||||
}
|
||||
|
||||
type VerifyMfaTokenErrorResponse struct {
|
||||
Type string `json:"type"`
|
||||
Message string `json:"message"`
|
||||
Context struct {
|
||||
Code string `json:"code"`
|
||||
TriesLeft int `json:"triesLeft"`
|
||||
} `json:"context"`
|
||||
Level int `json:"level"`
|
||||
LevelName string `json:"level_name"`
|
||||
StatusCode int `json:"status_code"`
|
||||
DatetimeIso time.Time `json:"datetime_iso"`
|
||||
Application string `json:"application"`
|
||||
Extra []interface{} `json:"extra"`
|
||||
}
|
||||
|
@ -36,9 +36,12 @@ var exportCmd = &cobra.Command{
|
||||
// util.RequireLocalWorkspaceFile()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
envName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
|
||||
@ -61,7 +64,12 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: envName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to fetch secrets")
|
||||
}
|
||||
@ -97,6 +105,7 @@ func init() {
|
||||
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
|
||||
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
}
|
||||
|
||||
// Format according to the format flag
|
||||
|
@ -56,7 +56,7 @@ var initCmd = &cobra.Command{
|
||||
workspaces := workspaceResponse.Workspaces
|
||||
if len(workspaces) == 0 {
|
||||
message := fmt.Sprintf("You don't have any projects created in Infisical. You must first create a project at %s", util.INFISICAL_TOKEN_NAME)
|
||||
util.PrintMessageAndExit(message)
|
||||
util.PrintErrorMessageAndExit(message)
|
||||
}
|
||||
|
||||
var workspaceNames []string
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
"regexp"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/config"
|
||||
"github.com/Infisical/infisical-merge/packages/crypto"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/srp"
|
||||
@ -23,8 +22,17 @@ import (
|
||||
"github.com/manifoldco/promptui"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/crypto/argon2"
|
||||
)
|
||||
|
||||
type params struct {
|
||||
memory uint32
|
||||
iterations uint32
|
||||
parallelism uint8
|
||||
saltLength uint32
|
||||
keyLength uint32
|
||||
}
|
||||
|
||||
// loginCmd represents the login command
|
||||
var loginCmd = &cobra.Command{
|
||||
Use: "login",
|
||||
@ -55,36 +63,158 @@ var loginCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse email and password for authentication")
|
||||
}
|
||||
|
||||
userCredentials, err := getFreshUserCredentials(email, password)
|
||||
loginOneResponse, loginTwoResponse, err := getFreshUserCredentials(email, password)
|
||||
if err != nil {
|
||||
log.Infoln("Unable to authenticate with the provided credentials, please try again")
|
||||
log.Debugln(err)
|
||||
return
|
||||
}
|
||||
|
||||
encryptedPrivateKey, _ := base64.StdEncoding.DecodeString(userCredentials.EncryptedPrivateKey)
|
||||
tag, err := base64.StdEncoding.DecodeString(userCredentials.Tag)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
if loginTwoResponse.MfaEnabled {
|
||||
i := 1
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode()
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(loginTwoResponse.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
})
|
||||
|
||||
if requestError != nil {
|
||||
util.HandleError(err)
|
||||
break
|
||||
} else if mfaErrorResponse != nil {
|
||||
if mfaErrorResponse.Context.Code == "mfa_invalid" {
|
||||
msg := fmt.Sprintf("Incorrect, verification code. You have %v attempts left", 5-i)
|
||||
fmt.Println(msg)
|
||||
if i == 5 {
|
||||
util.PrintErrorMessageAndExit("No tries left, please try again in a bit")
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if mfaErrorResponse.Context.Code == "mfa_expired" {
|
||||
util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again")
|
||||
break
|
||||
}
|
||||
i++
|
||||
} else {
|
||||
loginTwoResponse.EncryptedPrivateKey = verifyMFAresponse.EncryptedPrivateKey
|
||||
loginTwoResponse.EncryptionVersion = verifyMFAresponse.EncryptionVersion
|
||||
loginTwoResponse.Iv = verifyMFAresponse.Iv
|
||||
loginTwoResponse.ProtectedKey = verifyMFAresponse.ProtectedKey
|
||||
loginTwoResponse.ProtectedKeyIV = verifyMFAresponse.ProtectedKeyIV
|
||||
loginTwoResponse.ProtectedKeyTag = verifyMFAresponse.ProtectedKeyTag
|
||||
loginTwoResponse.PublicKey = verifyMFAresponse.PublicKey
|
||||
loginTwoResponse.Tag = verifyMFAresponse.Tag
|
||||
loginTwoResponse.Token = verifyMFAresponse.Token
|
||||
loginTwoResponse.EncryptionVersion = verifyMFAresponse.EncryptionVersion
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IV, err := base64.StdEncoding.DecodeString(userCredentials.IV)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
var decryptedPrivateKey []byte
|
||||
|
||||
if loginTwoResponse.EncryptionVersion == 1 {
|
||||
log.Debug("Login version 1")
|
||||
encryptedPrivateKey, _ := base64.StdEncoding.DecodeString(loginTwoResponse.EncryptedPrivateKey)
|
||||
tag, err := base64.StdEncoding.DecodeString(loginTwoResponse.Tag)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
IV, err := base64.StdEncoding.DecodeString(loginTwoResponse.Iv)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
paddedPassword := fmt.Sprintf("%032s", password)
|
||||
key := []byte(paddedPassword)
|
||||
|
||||
computedDecryptedPrivateKey, err := crypto.DecryptSymmetric(key, encryptedPrivateKey, tag, IV)
|
||||
if err != nil || len(computedDecryptedPrivateKey) == 0 {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
decryptedPrivateKey = computedDecryptedPrivateKey
|
||||
|
||||
} else if loginTwoResponse.EncryptionVersion == 2 {
|
||||
log.Debug("Login version 2")
|
||||
protectedKey, err := base64.StdEncoding.DecodeString(loginTwoResponse.ProtectedKey)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
protectedKeyTag, err := base64.StdEncoding.DecodeString(loginTwoResponse.ProtectedKeyTag)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
protectedKeyIV, err := base64.StdEncoding.DecodeString(loginTwoResponse.ProtectedKeyIV)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
nonProtectedTag, err := base64.StdEncoding.DecodeString(loginTwoResponse.Tag)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
nonProtectedIv, err := base64.StdEncoding.DecodeString(loginTwoResponse.Iv)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
parameters := ¶ms{
|
||||
memory: 64 * 1024,
|
||||
iterations: 3,
|
||||
parallelism: 1,
|
||||
keyLength: 32,
|
||||
}
|
||||
|
||||
derivedKey, err := generateFromPassword(password, []byte(loginOneResponse.Salt), parameters)
|
||||
if err != nil {
|
||||
util.HandleError(fmt.Errorf("unable to generate argon hash from password [err=%s]", err))
|
||||
}
|
||||
|
||||
decryptedProtectedKey, err := crypto.DecryptSymmetric(derivedKey, protectedKey, protectedKeyTag, protectedKeyIV)
|
||||
if err != nil {
|
||||
util.HandleError(fmt.Errorf("unable to get decrypted protected key [err=%s]", err))
|
||||
}
|
||||
|
||||
encryptedPrivateKey, err := base64.StdEncoding.DecodeString(loginTwoResponse.EncryptedPrivateKey)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
decryptedProtectedKeyInHex, err := hex.DecodeString(string(decryptedProtectedKey))
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
computedDecryptedPrivateKey, err := crypto.DecryptSymmetric(decryptedProtectedKeyInHex, encryptedPrivateKey, nonProtectedTag, nonProtectedIv)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
decryptedPrivateKey = computedDecryptedPrivateKey
|
||||
} else {
|
||||
util.PrintErrorMessageAndExit("Insufficient details to decrypt private key")
|
||||
}
|
||||
|
||||
paddedPassword := fmt.Sprintf("%032s", password)
|
||||
key := []byte(paddedPassword)
|
||||
|
||||
decryptedPrivateKey, err := crypto.DecryptSymmetric(key, encryptedPrivateKey, tag, IV)
|
||||
if err != nil || len(decryptedPrivateKey) == 0 {
|
||||
util.HandleError(err)
|
||||
if string(decryptedPrivateKey) == "" || email == "" || loginTwoResponse.Token == "" {
|
||||
log.Debugf("[decryptedPrivateKey=%s] [email=%s] [loginTwoResponse.Token=%s]", string(decryptedPrivateKey), email, loginTwoResponse.Token)
|
||||
util.PrintErrorMessageAndExit("We were unable to fetch required details to complete your login. Run with -d to see more info")
|
||||
}
|
||||
|
||||
userCredentialsToBeStored := &models.UserCredentials{
|
||||
Email: email,
|
||||
PrivateKey: string(decryptedPrivateKey),
|
||||
JTWToken: userCredentials.JTWToken,
|
||||
JTWToken: loginTwoResponse.Token,
|
||||
}
|
||||
|
||||
err = util.StoreUserCredsInKeyRing(userCredentialsToBeStored)
|
||||
@ -155,7 +285,7 @@ func askForLoginCredentials() (email string, password string, err error) {
|
||||
return userEmail, userPassword, nil
|
||||
}
|
||||
|
||||
func getFreshUserCredentials(email string, password string) (*api.LoginTwoResponse, error) {
|
||||
func getFreshUserCredentials(email string, password string) (*api.GetLoginOneV2Response, *api.GetLoginTwoV2Response, error) {
|
||||
log.Debugln("getFreshUserCredentials:", "email", email, "password", password)
|
||||
httpClient := resty.New()
|
||||
httpClient.SetRetryCount(5)
|
||||
@ -166,36 +296,24 @@ func getFreshUserCredentials(email string, password string) (*api.LoginTwoRespon
|
||||
srpA := hex.EncodeToString(srpClient.ComputeA())
|
||||
|
||||
// ** Login one
|
||||
loginOneRequest := api.LoginOneRequest{
|
||||
loginOneResponseResult, err := api.CallLogin1V2(httpClient, api.GetLoginOneV2Request{
|
||||
Email: email,
|
||||
ClientPublicKey: srpA,
|
||||
}
|
||||
|
||||
var loginOneResponseResult api.LoginOneResponse
|
||||
|
||||
loginOneResponse, err := httpClient.
|
||||
R().
|
||||
SetBody(loginOneRequest).
|
||||
SetResult(&loginOneResponseResult).
|
||||
Post(fmt.Sprintf("%v/v1/auth/login1", config.INFISICAL_URL))
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if loginOneResponse.StatusCode() > 299 {
|
||||
return nil, fmt.Errorf("ops, unsuccessful response code. [response=%v]", loginOneResponse)
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
// **** Login 2
|
||||
serverPublicKey_bytearray, err := hex.DecodeString(loginOneResponseResult.ServerPublicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
userSalt, err := hex.DecodeString(loginOneResponseResult.ServerSalt)
|
||||
userSalt, err := hex.DecodeString(loginOneResponseResult.Salt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
srpClient.SetSalt(userSalt, []byte(email), []byte(password))
|
||||
@ -203,27 +321,16 @@ func getFreshUserCredentials(email string, password string) (*api.LoginTwoRespon
|
||||
|
||||
srpM1 := srpClient.ComputeM1()
|
||||
|
||||
LoginTwoRequest := api.LoginTwoRequest{
|
||||
loginTwoResponseResult, err := api.CallLogin2V2(httpClient, api.GetLoginTwoV2Request{
|
||||
Email: email,
|
||||
ClientProof: hex.EncodeToString(srpM1),
|
||||
}
|
||||
|
||||
var loginTwoResponseResult api.LoginTwoResponse
|
||||
loginTwoResponse, err := httpClient.
|
||||
R().
|
||||
SetBody(LoginTwoRequest).
|
||||
SetResult(&loginTwoResponseResult).
|
||||
Post(fmt.Sprintf("%v/v1/auth/login2", config.INFISICAL_URL))
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if loginTwoResponse.StatusCode() > 299 {
|
||||
return nil, fmt.Errorf("ops, unsuccessful response code. [response=%v]", loginTwoResponse)
|
||||
}
|
||||
|
||||
return &loginTwoResponseResult, nil
|
||||
return &loginOneResponseResult, &loginTwoResponseResult, nil
|
||||
}
|
||||
|
||||
func shouldOverrideLoginPrompt(currentLoggedInUserEmail string) (bool, error) {
|
||||
@ -237,3 +344,21 @@ func shouldOverrideLoginPrompt(currentLoggedInUserEmail string) (bool, error) {
|
||||
}
|
||||
return result == "Yes", err
|
||||
}
|
||||
|
||||
func generateFromPassword(password string, salt []byte, p *params) (hash []byte, err error) {
|
||||
hash = argon2.IDKey([]byte(password), salt, p.iterations, p.memory, p.parallelism, p.keyLength)
|
||||
return hash, nil
|
||||
}
|
||||
|
||||
func askForMFACode() string {
|
||||
mfaCodePromptUI := promptui.Prompt{
|
||||
Label: "Enter the 2FA verification code sent to your email",
|
||||
}
|
||||
|
||||
mfaVerifyCode, err := mfaCodePromptUI.Run()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
return mfaVerifyCode
|
||||
}
|
||||
|
@ -4,7 +4,6 @@ Copyright (c) 2023 Infisical Inc.
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
@ -27,8 +26,6 @@ var resetCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
fmt.Println(pathToDir)
|
||||
|
||||
os.RemoveAll(pathToDir)
|
||||
|
||||
// delete keyring
|
||||
|
@ -54,9 +54,12 @@ var runCmd = &cobra.Command{
|
||||
return nil
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
envName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
@ -74,7 +77,12 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: envName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
|
||||
@ -148,6 +156,7 @@ func init() {
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")")
|
||||
runCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs ")
|
||||
}
|
||||
|
||||
// Will execute a single command and pass in the given secrets into the process
|
||||
|
@ -19,7 +19,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/Infisical/infisical-merge/packages/visualize"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -31,9 +30,12 @@ var secretsCmd = &cobra.Command{
|
||||
PreRun: toggleDebug,
|
||||
Args: cobra.NoArgs,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
@ -46,7 +48,12 @@ var secretsCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
@ -87,13 +94,14 @@ var secretsSetCmd = &cobra.Command{
|
||||
PreRun: toggleDebug,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
if !util.IsSecretEnvironmentValid(environmentName) {
|
||||
util.PrintMessageAndExit("You have entered a invalid environment name", "Environment names can only be prod, dev, test or staging")
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
@ -148,11 +156,11 @@ var secretsSetCmd = &cobra.Command{
|
||||
for _, arg := range args {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
util.PrintMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
util.PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
util.PrintMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
util.PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
@ -267,11 +275,12 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
PreRun: toggleDebug,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
log.Errorln("Unable to parse the environment name flag")
|
||||
log.Debugln(err)
|
||||
return
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
@ -303,7 +312,7 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
|
||||
if len(invalidSecretNamesThatDoNotExist) != 0 {
|
||||
message := fmt.Sprintf("secret name(s) [%v] does not exist in your project. To see which secrets exist run [infisical secrets]", strings.Join(invalidSecretNamesThatDoNotExist, ", "))
|
||||
util.PrintMessageAndExit(message)
|
||||
util.PrintErrorMessageAndExit(message)
|
||||
}
|
||||
|
||||
request := api.BatchDeleteSecretsBySecretIdsRequest{
|
||||
@ -327,14 +336,12 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
workspaceFileExists := util.WorkspaceConfigFileExistsInCurrentPath()
|
||||
if !workspaceFileExists {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
@ -342,7 +349,12 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
}
|
||||
@ -370,14 +382,12 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
workspaceFileExists := util.WorkspaceConfigFileExistsInCurrentPath()
|
||||
if !workspaceFileExists {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
|
||||
if environmentFromWorkspace != "" {
|
||||
environmentName = environmentFromWorkspace
|
||||
}
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
@ -385,12 +395,22 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken})
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
}
|
||||
|
||||
tagsHashToSecretKey := make(map[string]int)
|
||||
slugsToFilerBy := make(map[string]int)
|
||||
|
||||
for _, slug := range strings.Split(tagSlugs, ",") {
|
||||
slugsToFilerBy[slug] = 1
|
||||
}
|
||||
|
||||
type TagsAndSecrets struct {
|
||||
Secrets []models.SingleEnvironmentVariable
|
||||
@ -407,6 +427,25 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
return len(secrets[i].Tags) > len(secrets[j].Tags)
|
||||
})
|
||||
|
||||
for i, secret := range secrets {
|
||||
filteredTag := []struct {
|
||||
ID string "json:\"_id\""
|
||||
Name string "json:\"name\""
|
||||
Slug string "json:\"slug\""
|
||||
Workspace string "json:\"workspace\""
|
||||
}{}
|
||||
|
||||
for _, secretTag := range secret.Tags {
|
||||
_, exists := slugsToFilerBy[secretTag.Slug]
|
||||
if !exists {
|
||||
filteredTag = append(filteredTag, secretTag)
|
||||
}
|
||||
}
|
||||
|
||||
secret.Tags = filteredTag
|
||||
secrets[i] = secret
|
||||
}
|
||||
|
||||
for _, secret := range secrets {
|
||||
listOfTagSlugs := []string{}
|
||||
|
||||
@ -470,6 +509,8 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
return len(listOfsecretDetails[i].Tags) < len(listOfsecretDetails[j].Tags)
|
||||
})
|
||||
|
||||
tableOfContents := []string{}
|
||||
fullyGeneratedDocuments := []string{}
|
||||
for _, secretDetails := range listOfsecretDetails {
|
||||
listOfKeyValue := []string{}
|
||||
|
||||
@ -510,11 +551,22 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
heading := CenterString(strings.Join(listOfTagNames, " & "), 80)
|
||||
|
||||
if len(listOfTagNames) == 0 {
|
||||
fmt.Printf("\n%s \n", strings.Join(listOfKeyValue, "\n \n"))
|
||||
fullyGeneratedDocuments = append(fullyGeneratedDocuments, fmt.Sprintf("\n%s \n", strings.Join(listOfKeyValue, "\n")))
|
||||
} else {
|
||||
fmt.Printf("\n\n\n%s \n%s \n", heading, strings.Join(listOfKeyValue, "\n \n"))
|
||||
fullyGeneratedDocuments = append(fullyGeneratedDocuments, fmt.Sprintf("\n\n\n%s \n%s \n", heading, strings.Join(listOfKeyValue, "\n")))
|
||||
tableOfContents = append(tableOfContents, strings.ToUpper(strings.Join(listOfTagNames, " & ")))
|
||||
}
|
||||
}
|
||||
|
||||
dashedList := []string{}
|
||||
for _, item := range tableOfContents {
|
||||
dashedList = append(dashedList, fmt.Sprintf("# - %s \n", item))
|
||||
}
|
||||
if len(dashedList) > 0 {
|
||||
fmt.Println(CenterString("TABLE OF CONTENTS", 80))
|
||||
fmt.Println(strings.Join(dashedList, ""))
|
||||
}
|
||||
fmt.Println(strings.Join(fullyGeneratedDocuments, ""))
|
||||
}
|
||||
|
||||
func CenterString(s string, numStars int) string {
|
||||
@ -567,5 +619,6 @@ func init() {
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsCmd.PersistentFlags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
rootCmd.AddCommand(secretsCmd)
|
||||
}
|
||||
|
@ -39,7 +39,9 @@ type Workspace struct {
|
||||
}
|
||||
|
||||
type WorkspaceConfigFile struct {
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
DefaultEnvironment string `json:"defaultEnvironment"`
|
||||
GitBranchToEnvironmentMapping map[string]string `json:"gitBranchToEnvironmentMapping"`
|
||||
}
|
||||
|
||||
type SymmetricEncryptionResult struct {
|
||||
@ -49,6 +51,8 @@ type SymmetricEncryptionResult struct {
|
||||
}
|
||||
|
||||
type GetAllSecretsParameters struct {
|
||||
Environment string
|
||||
InfisicalToken string
|
||||
Environment string
|
||||
EnvironmentPassedViaFlag bool
|
||||
InfisicalToken string
|
||||
TagSlugs string
|
||||
}
|
||||
|
@ -1,10 +1,14 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type DecodedSymmetricEncryptionDetails = struct {
|
||||
@ -65,29 +69,29 @@ func RequireLogin() {
|
||||
}
|
||||
|
||||
if !currentUserDetails.IsUserLoggedIn {
|
||||
PrintMessageAndExit("You must be logged in to run this command. To login, run [infisical login]")
|
||||
PrintErrorMessageAndExit("You must be logged in to run this command. To login, run [infisical login]")
|
||||
}
|
||||
|
||||
if currentUserDetails.LoginExpired {
|
||||
PrintMessageAndExit("Your login expired, please login in again. To login, run [infisical login]")
|
||||
PrintErrorMessageAndExit("Your login expired, please login in again. To login, run [infisical login]")
|
||||
}
|
||||
|
||||
if currentUserDetails.UserCredentials.Email == "" && currentUserDetails.UserCredentials.JTWToken == "" && currentUserDetails.UserCredentials.PrivateKey == "" {
|
||||
PrintMessageAndExit("One or more of your login details is empty. Please try logging in again via by running [infisical login]")
|
||||
PrintErrorMessageAndExit("One or more of your login details is empty. Please try logging in again via by running [infisical login]")
|
||||
}
|
||||
}
|
||||
|
||||
func RequireServiceToken() {
|
||||
serviceToken := os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
if serviceToken == "" {
|
||||
PrintMessageAndExit("No service token is found in your terminal")
|
||||
PrintErrorMessageAndExit("No service token is found in your terminal")
|
||||
}
|
||||
}
|
||||
|
||||
func RequireLocalWorkspaceFile() {
|
||||
workspaceFileExists := WorkspaceConfigFileExistsInCurrentPath()
|
||||
if !workspaceFileExists {
|
||||
PrintMessageAndExit("It looks you have not yet connected this project to Infisical", "To do so, run [infisical init] then run your command again")
|
||||
PrintErrorMessageAndExit("It looks you have not yet connected this project to Infisical", "To do so, run [infisical init] then run your command again")
|
||||
}
|
||||
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
@ -96,7 +100,7 @@ func RequireLocalWorkspaceFile() {
|
||||
}
|
||||
|
||||
if workspaceFile.WorkspaceId == "" {
|
||||
PrintMessageAndExit("Your project id is missing in your local config file. Please add it or run again [infisical init]")
|
||||
PrintErrorMessageAndExit("Your project id is missing in your local config file. Please add it or run again [infisical init]")
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,3 +114,14 @@ func GetHashFromStringList(list []string) string {
|
||||
sum := sha256.Sum256(hash.Sum(nil))
|
||||
return fmt.Sprintf("%x", sum)
|
||||
}
|
||||
|
||||
func getCurrentBranch() (string, error) {
|
||||
cmd := exec.Command("git", "symbolic-ref", "--short", "HEAD")
|
||||
var out bytes.Buffer
|
||||
cmd.Stdout = &out
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return path.Base(strings.TrimSpace(out.String())), nil
|
||||
}
|
||||
|
@ -31,10 +31,10 @@ func PrintSuccessMessage(message string) {
|
||||
color.New(color.FgGreen).Println(message)
|
||||
}
|
||||
|
||||
func PrintMessageAndExit(messages ...string) {
|
||||
func PrintErrorMessageAndExit(messages ...string) {
|
||||
if len(messages) > 0 {
|
||||
for _, message := range messages {
|
||||
fmt.Println(message)
|
||||
fmt.Fprintln(os.Stderr, message)
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,5 +42,5 @@ func PrintMessageAndExit(messages ...string) {
|
||||
}
|
||||
|
||||
func printError(e error) {
|
||||
color.Red("Hmm, we ran into an error: %v", e)
|
||||
color.New(color.FgRed).Fprintf(os.Stderr, "Hmm, we ran into an error: %v", e)
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string) ([]models.Singl
|
||||
return plainTextSecrets, nil
|
||||
}
|
||||
|
||||
func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string) ([]models.SingleEnvironmentVariable, error) {
|
||||
func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string, tagSlugs string) ([]models.SingleEnvironmentVariable, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
@ -85,6 +85,7 @@ func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, work
|
||||
encryptedSecrets, err := api.CallGetSecretsV2(httpClient, api.GetEncryptedSecretsV2Request{
|
||||
WorkspaceId: workspaceId,
|
||||
Environment: environmentName,
|
||||
TagSlugs: tagSlugs,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@ -136,7 +137,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models
|
||||
return nil, fmt.Errorf("unable to validate environment name because [err=%s]", err)
|
||||
}
|
||||
|
||||
secretsToReturn, errorToReturn = GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, workspaceFile.WorkspaceId, params.Environment)
|
||||
secretsToReturn, errorToReturn = GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, workspaceFile.WorkspaceId, params.Environment, params.TagSlugs)
|
||||
log.Debugf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", errorToReturn)
|
||||
|
||||
backupSecretsEncryptionKey := []byte(loggedInUserDetails.UserCredentials.PrivateKey)[0:32]
|
||||
@ -480,3 +481,27 @@ func DeleteBackupSecrets() error {
|
||||
|
||||
return os.RemoveAll(fullPathToSecretsBackupFolder)
|
||||
}
|
||||
|
||||
func GetEnvelopmentBasedOnGitBranch() string {
|
||||
branch, err := getCurrentBranch()
|
||||
if err != nil {
|
||||
log.Debugf("getEnvelopmentBasedOnGitBranch: [err=%s]", err)
|
||||
}
|
||||
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
log.Debugf("getEnvelopmentBasedOnGitBranch: [err=%s]", err)
|
||||
return ""
|
||||
}
|
||||
|
||||
envBasedOnGitBranch, ok := workspaceFile.GitBranchToEnvironmentMapping[branch]
|
||||
|
||||
log.Debugf("GetEnvelopmentBasedOnGitBranch: [envBasedOnGitBranch=%s] [ok=%s]", envBasedOnGitBranch, ok)
|
||||
|
||||
if err == nil && ok {
|
||||
return envBasedOnGitBranch
|
||||
} else {
|
||||
log.Debugf("getEnvelopmentBasedOnGitBranch: [err=%s]", err)
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
11
docs/cli/commands/reset.mdx
Normal file
@ -0,0 +1,11 @@
|
||||
---
|
||||
title: "infisical reset"
|
||||
description: "Reset Infisical"
|
||||
---
|
||||
|
||||
```bash
|
||||
infisical reset
|
||||
```
|
||||
|
||||
## Description
|
||||
This command provides a way to clear all Infisical-generated configuration data, effectively resetting the software to its default settings. This can be an effective way to address any persistent issues that arise while using the CLI.
|
@ -18,4 +18,8 @@ If you are still experiencing trouble, please seek support.
|
||||
<Accordion title="Can I fetch secrets with Infisical if I am offline?">
|
||||
Yes. If you have previously retrieved secrets for a specific project and environment (such as dev, staging, or prod), the `run`/`secret` command will utilize the saved secrets, even when offline, on subsequent fetch attempts.
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="Can I upload the .infisical.json file that was generated?">
|
||||
Yes. This is simply a configuration file and contains no sensitive data.
|
||||
</Accordion>
|
26
docs/cli/project-config.mdx
Normal file
@ -0,0 +1,26 @@
|
||||
---
|
||||
title: "Project config file"
|
||||
description: "Project config file & customization options"
|
||||
---
|
||||
|
||||
To link your local project on your machine with an Infisical project, we suggest using the infisical init CLI command. This will generate a `.infisical.json` file in the root directory of your project.
|
||||
|
||||
The `.infisical.json` file specifies various parameters, such as the Infisical project to retrieve secrets from, along with other configuration options. Furthermore, you can define additional properties in the file to further tailor your local development experience.
|
||||
|
||||
## Set Infisical environment based on GitHub branch
|
||||
When fetching your secrets from Infisical, you can switch between environments by using the `--env` flag. However, in certain cases, you may prefer the environment to be automatically mapped based on the current GitHub branch you are working on.
|
||||
To achieve this, simply add the `gitBranchToEnvironmentMapping` property to your configuration file, as shown below.
|
||||
|
||||
```json .infisical.json
|
||||
{
|
||||
"workspaceId": "63ee5410a45f7a1ed39ba118",
|
||||
"gitBranchToEnvironmentMapping": {
|
||||
"branchName": "dev",
|
||||
"anotherBranchName": "staging"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### How it works
|
||||
After configuring this property, every time you use the CLI with the specified configuration file, it will automatically verify if there is a corresponding environment mapping for the current Github branch you are on.
|
||||
If it exists, the CLI will use that environment to retrieve secrets. You can override this behavior by explicitly using the `--env` flag while interacting with the CLI.
|
@ -5,8 +5,6 @@ description: "How to sync your secrets among various 3rd-party services with Inf
|
||||
|
||||
Integrations allow environment variables to be synced across your entire infrastructure from local development to CI/CD and production.
|
||||
|
||||
We're still relatively early with integrations. 6+ integrations are already avaiable but expect more coming very soon.
|
||||
|
||||
<Card title="View integrations" icon="link" href="/integrations/overview">
|
||||
View all available integrations and their guides
|
||||
</Card>
|
||||
|
18
docs/getting-started/dashboard/mfa.mdx
Normal file
@ -0,0 +1,18 @@
|
||||
---
|
||||
title: "MFA"
|
||||
description: "Secure your Infisical account with MFA"
|
||||
---
|
||||
|
||||
MFA requires users to provide multiple forms of identification to access their account. Currently, this means logging in with your password and a 6-digit code sent to your email.
|
||||
|
||||
## Email 2FA
|
||||
|
||||
Check the box in Personal Settings > Two-factor Authentication to enable email-based 2FA.
|
||||
|
||||

|
||||
|
||||
<Note>
|
||||
Infisical currently supports email-based 2FA. We're actively working on
|
||||
building support for other forms of identification via SMS and Authenticator
|
||||
App.
|
||||
</Note>
|
BIN
docs/images/email-socketlabs-credentials.png
Normal file
After Width: | Height: | Size: 315 KiB |
BIN
docs/images/email-socketlabs-dashboard.png
Normal file
After Width: | Height: | Size: 468 KiB |
BIN
docs/images/email-socketlabs-domains.png
Normal file
After Width: | Height: | Size: 332 KiB |
BIN
docs/images/integrations-circleci-auth.png
Normal file
After Width: | Height: | Size: 162 KiB |
BIN
docs/images/integrations-circleci-create.png
Normal file
After Width: | Height: | Size: 180 KiB |
BIN
docs/images/integrations-circleci-token.png
Normal file
After Width: | Height: | Size: 176 KiB |
BIN
docs/images/integrations-circleci.png
Normal file
After Width: | Height: | Size: 339 KiB |