mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-20 22:32:28 +00:00
Compare commits
302 Commits
Author | SHA1 | Date | |
---|---|---|---|
da857f321b | |||
c7dd028771 | |||
3c94bacda9 | |||
8e85847de3 | |||
0c10bbb569 | |||
fba54ae0c6 | |||
e243c72ca6 | |||
23ea6fd4f9 | |||
3f9f2ef238 | |||
77cb20f5c7 | |||
ddf630c269 | |||
39adb9a0c2 | |||
97fde96b7b | |||
190391e493 | |||
6f6df3e63a | |||
23c740d225 | |||
702d4de3b5 | |||
445fa35ab5 | |||
9868476965 | |||
bfa6b955ca | |||
90f5934440 | |||
0adc3d2027 | |||
edf0294d51 | |||
8850b44115 | |||
17f9e53779 | |||
a61233d2ba | |||
2022988e77 | |||
409de81bd2 | |||
2b289ddf77 | |||
b066a55ead | |||
8dfc0138f5 | |||
517f508e44 | |||
2f1a671121 | |||
2fb4b261a8 | |||
9c3c745fdf | |||
6a75147719 | |||
295b363d8a | |||
d96b5943b9 | |||
8fd2578a6d | |||
cc809a6bc0 | |||
66659c8fc8 | |||
31293bbe06 | |||
1c3488f8db | |||
20e536cec0 | |||
e8b498ca6d | |||
b82f8606a8 | |||
ab27fbccf7 | |||
d50de9366b | |||
4c56bca4e7 | |||
a60774a3f4 | |||
03426ee7f2 | |||
428022d1a2 | |||
b5bcd0a308 | |||
03c72ea00f | |||
a486390015 | |||
8dc47110a0 | |||
52a6fe64a7 | |||
081ef94399 | |||
eebde3ad12 | |||
6ab6147ac8 | |||
dd7e8d254b | |||
2765f7e488 | |||
2d3a276dc2 | |||
55eddee6ce | |||
ab751d0db3 | |||
b2bd0ba340 | |||
224fa25fdf | |||
e6539a5566 | |||
6115a311ad | |||
a685ac3e73 | |||
9a22975732 | |||
cd0b2e3a26 | |||
80a3c196ae | |||
b0c541f8dc | |||
6188b04544 | |||
8ba4f964d4 | |||
0d2caddb12 | |||
4570c35658 | |||
72f7d81b80 | |||
231fa61805 | |||
9f74affd3a | |||
f58e1e1d6c | |||
074cf695b2 | |||
07c056523f | |||
65eb037020 | |||
c84add0a2a | |||
ace0e9c56f | |||
498705f330 | |||
7892624709 | |||
d8889beaf7 | |||
6e67304e92 | |||
8b23e89a64 | |||
7611b999fe | |||
aba8feb985 | |||
747cc1134c | |||
db05412865 | |||
679b1d9c23 | |||
a37cf91702 | |||
80d219c3e0 | |||
5ea5887146 | |||
13838861fb | |||
09c60322db | |||
68bf0b9efe | |||
3ec68daf2e | |||
9fafe02e16 | |||
56da34d343 | |||
086dd621b5 | |||
56a14925da | |||
c13cb23942 | |||
31df4a26fa | |||
9f9273bb02 | |||
86fd876850 | |||
b56d9287e4 | |||
a35e235744 | |||
77a44b4490 | |||
594f846943 | |||
8ae43cdcf6 | |||
1d72d310e5 | |||
b0ffac2f00 | |||
5ba851adff | |||
e72e6cf2b7 | |||
0ac40acc40 | |||
56710657bd | |||
92f4979715 | |||
1e9118df33 | |||
e16c0e53ff | |||
0d57a26925 | |||
1bd180596e | |||
fca003dfd7 | |||
f1ef23874c | |||
16883cf168 | |||
1781b71399 | |||
fb62fa4d32 | |||
ed148a542d | |||
a4f7843727 | |||
48cd84ce77 | |||
3859a7e09b | |||
76d0127029 | |||
a94cd8c85c | |||
ee555f3f15 | |||
bd230a8b7d | |||
a4926d8833 | |||
7560d2f673 | |||
44b2bc1795 | |||
3ccc6e5d5c | |||
ccb579ecfd | |||
29f5e8aa78 | |||
d64357af61 | |||
37c91ae652 | |||
3a4cfa0834 | |||
cef45c2155 | |||
5143fc6eee | |||
186382619c | |||
91e70c5476 | |||
216ace9f61 | |||
6b99582a66 | |||
ea0fe1b92e | |||
72810acf2e | |||
a013768313 | |||
a660261678 | |||
7d181f334c | |||
46ab27af1a | |||
25bb966a32 | |||
c086579260 | |||
3d14bc9a00 | |||
75cd7a0f15 | |||
4722bb8fcd | |||
f2175b948c | |||
6f3d102ecb | |||
54fa39f347 | |||
52697dea97 | |||
c99b207e9e | |||
4886537a56 | |||
ca688764a3 | |||
71cf54c28b | |||
1878bed10a | |||
87fd5e33f1 | |||
ffda30bd65 | |||
716795532e | |||
f9ff99748b | |||
723fa153be | |||
1871d1a842 | |||
2c4e066f64 | |||
b371dad506 | |||
a6d4431940 | |||
871d80aad5 | |||
6711979445 | |||
cb080b356c | |||
9950c5e02d | |||
22a11be4e0 | |||
6e01c80282 | |||
4e14f84df9 | |||
55522404b4 | |||
4ef8c273f7 | |||
61c17ccc5e | |||
2832476c2b | |||
c0fc74b62a | |||
54caaffe3a | |||
55f0a491cb | |||
a940fa210a | |||
5162ba9b91 | |||
3b6022de64 | |||
bf743f5f72 | |||
3e177539d5 | |||
5743dd3a8c | |||
9f8ad95a59 | |||
3c05a4cebd | |||
bc955a9afd | |||
ec8d86e662 | |||
bc70bedb78 | |||
7a4b77ce59 | |||
8600cee54c | |||
fe9573ea3c | |||
61db6c54c2 | |||
65093c73c5 | |||
9986521e41 | |||
655f015109 | |||
3cea59ce5d | |||
a184192452 | |||
2dbcab32d5 | |||
13aeeb4731 | |||
233a468127 | |||
8a9e05b08f | |||
3ef2ac8a77 | |||
fdac590a02 | |||
dd960aa5f0 | |||
0bd9a848c4 | |||
1b86c58f91 | |||
d5166d343d | |||
b315cf6022 | |||
37de32ec90 | |||
6eb81802c3 | |||
e6068a6f7f | |||
c059c088d1 | |||
b530847edc | |||
c87c2dadd7 | |||
7b1ff04436 | |||
83aa440b62 | |||
a555ef836b | |||
528601e442 | |||
13acb19e9f | |||
079063157f | |||
e38933c0b3 | |||
d09b406c4e | |||
a5eba8e722 | |||
7acb4cc22a | |||
b95ab6c6a1 | |||
038445e13e | |||
07e9dd5a39 | |||
6ec520d358 | |||
06bfd2429b | |||
099c4836e6 | |||
ddf8ceb45d | |||
8a49e0817a | |||
88908297f5 | |||
cf0e111c09 | |||
ae0ee727fa | |||
be2945c445 | |||
237a10da1e | |||
1baf14084d | |||
a6387e7552 | |||
a6f480d3f8 | |||
0413059fbe | |||
65f049f6ac | |||
62f886a3b3 | |||
271ca148e3 | |||
8aa294309f | |||
ca3233110b | |||
1e4f6a4b9d | |||
a73fc6de19 | |||
0bb750488b | |||
32f98f83c5 | |||
6943785ce5 | |||
86558a8221 | |||
f2c35a302d | |||
0794b6132a | |||
062c287e75 | |||
e67d68a7b9 | |||
054acc689a | |||
9b95d18b85 | |||
7f9bc77253 | |||
b92907aca6 | |||
c4ee03c73b | |||
89ba80740b | |||
606a5e5317 | |||
f859bf528e | |||
ad504fa84e | |||
e7ac74c5a0 | |||
b80504ae00 | |||
68f1887d66 | |||
201c8352e3 | |||
a0f0ffe566 | |||
4b4e8e2bfc | |||
4db4c172c1 | |||
08c54a910f | |||
00fee63ff3 | |||
6b80cd6590 | |||
840efbdc2f | |||
59ab4bf7f9 | |||
d4bc92bd5b | |||
7efdbeb787 | |||
43daff29dc |
.env.example.eslintignoremint.jsontailwind.config.js
.github
.goreleaser.yamlMakefileREADME.mdbackend
environment.d.tspackage-lock.jsonpackage.json
src
app.ts
config
controllers
v1
authController.tsintegrationAuthController.tsintegrationController.tsmembershipController.tsmembershipOrgController.tsorganizationController.tspasswordController.tssignupController.tsworkspaceController.ts
v2
ee
controllers/v1
helpers
models
services
helpers
index.tsintegrations
middleware
models
index.tsintegration.tsintegrationAuth.tsloginSRPDetail.tsmembership.tssecret.tssecretApprovalRequest.tstag.tstoken.tsuser.tsworkspace.ts
routes
v1
v2
services
templates
emailVerification.handlebarsorganizationInvitation.handlebarspasswordReset.handlebarsworkspaceInvitation.handlebars
utils
variables
cli
docker-compose.dev.ymldocker-compose.ymldocs
cli
contributing
getting-started
dashboard
audit-logs.mdxcreate-account.mdxintegrations.mdxorganization.mdxpit-recovery.mdxproject.mdxsecret-versioning.mdxtoken.mdx
features.mdxintroduction.mdxquickstart.mdximages
integrations-aws-access-key-1.pngintegrations-aws-access-key-2.pngintegrations-aws-access-key-3.pngintegrations-aws-iam-1.pngintegrations-aws-parameter-store-auth.pngintegrations-aws-parameter-store-create.pngintegrations-aws-parameter-store-iam-2.pngintegrations-aws-parameter-store-iam-3.pngintegrations-aws-parameter-store.pngintegrations-aws-secret-manager-auth.pngintegrations-aws-secret-manager-create.pngintegrations-aws-secret-manager-iam-2.pngintegrations-aws-secret-manager-iam-3.pngintegrations-aws-secret-manager.pngintegrations-flyio-auth.pngintegrations-flyio-create.pngintegrations-flyio.pngintegrations-heroku-create.pngintegrations-heroku.pngintegrations-netlify-create.pngintegrations-netlify.pngintegrations-render-auth.pngintegrations-render-create.pngintegrations-render.pngintegrations-vercel-create.pngintegrations-vercel.pngintegrations.pngk8-diagram.png
integrations
cicd
cloud
frameworks
django.mdxdotnet.mdxexpress.mdxfiber.mdxflask.mdxgatsby.mdxlaravel.mdxnestjs.mdxnextjs.mdxnuxt.mdxrails.mdxreact.mdxremix.mdxvite.mdxvue.mdx
overview.mdxplatforms
security
self-hosting
spec.yamlfrontend
.storybook
package-lock.jsonpackage.jsonpublic
data
locales
en
fr
ko
pt-BR
tr
src
components
analytics
basic
Layout.tsxListbox.tsx
dialog
ActivateBotDialog.tsxAddUpdateEnvironmentDialog.tsxAddUserDialog.tsxAddWorkspaceDialog.tsxDeleteActionModal.tsxDeleteEnvVar.tsxIntegrationAccessTokenDialog.tsxUpgradePlan.tsx
popups
table
context/Notifications
dashboard
AddTagsMenu.tsxCommentField.tsxCompareSecretsModal.tsxConfirmEnvOverwriteModal.tsxDashboardInputField.tsxDeleteActionButton.tsxDownloadSecretsMenu.tsxDropZone.tsxKeyPair.tsxSideBar.tsx
integrations
navigation
utilities
v2
Button
Card
Checkbox
DeleteActionModal
Dropdown
FormControl
HoverCard
IconButton
Input
Menu
Modal
Popover
Select
Spinner
Switch
Table
TextArea
UpgradePlanModal
index.tsxconfig
const.tscontext
ee
api/memberships
components
hooks
api
index.tsuseLeaveConfirm.tsxusePopUp.tsxuseToggle.tsxlayouts
pages
_app.tsx
reactQuery.tsactivity
api
integrations
organization
workspace
dashboard
github.tsxheroku.tsxhome
integrations
[id].tsx
login.tsxnetlify.tsxaws-parameter-store
aws-secret-manager
azure-key-vault
circleci
flyio
github
heroku
netlify
render
vercel
settings
users
vercel.tsxverify-email.tsxstyles
views/Settings/ProjectSettingsPage
ProjectSettingsPage.tsxindex.tsx
components
AutoCapitalizationSection
CopyProjectIDSection
EnvironmentSection
ProjectNameChangeSection
SecretTagsSection
ServiceTokenSection
index.tsxhelm-charts/infisical
i18n
@ -64,7 +64,7 @@ POSTHOG_PROJECT_API_KEY=
|
||||
STRIPE_SECRET_KEY=
|
||||
STRIPE_PUBLISHABLE_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
STRIPE_PRODUCT_CARD_AUTH=
|
||||
STRIPE_PRODUCT_PRO=
|
||||
STRIPE_PRODUCT_STARTER=
|
||||
STRIPE_PRODUCT_TEAM=
|
||||
STRIPE_PRODUCT_PRO=
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=
|
@ -1,3 +1,4 @@
|
||||
node_modules
|
||||
built
|
||||
healthcheck.js
|
||||
tailwind.config.js
|
83
.github/values.yaml
vendored
83
.github/values.yaml
vendored
@ -1,36 +1,93 @@
|
||||
#####
|
||||
# INFISICAL K8 DEFAULT VALUES FILE
|
||||
# PLEASE REPLACE VALUES/EDIT AS REQUIRED
|
||||
#####
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
frontend:
|
||||
replicaCount: 1
|
||||
name: frontend
|
||||
podAnnotations: {}
|
||||
deploymentAnnotations:
|
||||
secrets.infisical.com/auto-reload: "true"
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository:
|
||||
pullPolicy: Always
|
||||
repository: infisical/frontend
|
||||
pullPolicy: Always
|
||||
tag: "latest"
|
||||
kubeSecretRef: managed-secret-frontend
|
||||
service:
|
||||
# type of the frontend service
|
||||
type: ClusterIP
|
||||
# define the nodePort if service type is NodePort
|
||||
# nodePort:
|
||||
annotations: {}
|
||||
|
||||
backend:
|
||||
replicaCount: 1
|
||||
name: backend
|
||||
podAnnotations: {}
|
||||
deploymentAnnotations:
|
||||
secrets.infisical.com/auto-reload: "true"
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository:
|
||||
repository: infisical/backend
|
||||
pullPolicy: Always
|
||||
tag: "latest"
|
||||
kubeSecretRef: managed-backend-secret
|
||||
service:
|
||||
annotations: {}
|
||||
|
||||
mongodb:
|
||||
name: mongodb
|
||||
podAnnotations: {}
|
||||
image:
|
||||
repository: mongo
|
||||
pullPolicy: IfNotPresent
|
||||
tag: "latest"
|
||||
service:
|
||||
annotations: {}
|
||||
|
||||
# By default the backend will be connected to a Mongo instance in the cluster.
|
||||
# However, it is recommended to add a managed document DB connection string because the DB instance in the cluster does not have persistence yet ( data will be deleted on next deploy).
|
||||
# Learn about connection string type here https://www.mongodb.com/docs/manual/reference/connection-string/
|
||||
mongodbConnection: {}
|
||||
# externalMongoDBConnectionString: <>
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: "nginx"
|
||||
cert-manager.io/cluster-issuer: "letsencrypt-prod"
|
||||
hostName: gamma.infisical.com
|
||||
hostName: gamma.infisical.com # replace with your domain
|
||||
frontend:
|
||||
path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
path: /api
|
||||
pathType: Prefix
|
||||
tls:
|
||||
- secretName: echo-tls
|
||||
hosts:
|
||||
- gamma.infisical.com
|
||||
tls: []
|
||||
|
||||
backendEnvironmentVariables:
|
||||
|
||||
frontendEnvironmentVariables:
|
||||
## Complete Ingress example
|
||||
# ingress:
|
||||
# enabled: true
|
||||
# annotations:
|
||||
# kubernetes.io/ingress.class: "nginx"
|
||||
# cert-manager.io/issuer: letsencrypt-nginx
|
||||
# hostName: k8.infisical.com
|
||||
# frontend:
|
||||
# path: /
|
||||
# pathType: Prefix
|
||||
# backend:
|
||||
# path: /api
|
||||
# pathType: Prefix
|
||||
# tls:
|
||||
# - secretName: letsencrypt-nginx
|
||||
# hosts:
|
||||
# - k8.infisical.com
|
||||
|
||||
###
|
||||
### YOU MUST FILL IN ALL SECRETS BELOW
|
||||
###
|
||||
backendEnvironmentVariables: {}
|
||||
|
||||
frontendEnvironmentVariables: {}
|
||||
|
17
.github/workflows/release_build.yml
vendored
17
.github/workflows/release_build.yml
vendored
@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- 'v*'
|
||||
- "v*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@ -18,10 +18,16 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '>=1.19.3'
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
@ -33,19 +39,18 @@ jobs:
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v2
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --rm-dist
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
||||
|
@ -14,6 +14,9 @@ before:
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
ldflags: -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
flags:
|
||||
- -trimpath
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=/home/runner/work/osxcross/target/bin/o64-clang
|
||||
@ -24,10 +27,14 @@ builds:
|
||||
- goos: darwin
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
- id: all-other-builds
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
binary: infisical
|
||||
ldflags: -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
flags:
|
||||
- -trimpath
|
||||
goos:
|
||||
- freebsd
|
||||
- linux
|
||||
@ -61,18 +68,20 @@ archives:
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
mode: 'replace'
|
||||
mode: "replace"
|
||||
|
||||
checksum:
|
||||
name_template: 'checksums.txt'
|
||||
name_template: "checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}"
|
||||
name_template: "{{ incpatch .Version }}-devel"
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^test:'
|
||||
- "^docs:"
|
||||
- "^test:"
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
@ -80,6 +89,7 @@ changelog:
|
||||
# - infisical
|
||||
# dir: "{{ dir .ArtifactPath }}"
|
||||
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
|
||||
|
||||
brews:
|
||||
- name: infisical
|
||||
tap:
|
||||
@ -91,31 +101,39 @@ brews:
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
|
||||
scoop:
|
||||
bucket:
|
||||
owner: Infisical
|
||||
@ -126,16 +144,16 @@ scoop:
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
license: MIT
|
||||
|
||||
aurs:
|
||||
-
|
||||
name: infisical-bin
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
maintainers:
|
||||
- Infisical, Inc <support@infisical.com>
|
||||
license: MIT
|
||||
private_key: '{{ .Env.AUR_KEY }}'
|
||||
git_url: 'ssh://aur@aur.archlinux.org/infisical-bin.git'
|
||||
private_key: "{{ .Env.AUR_KEY }}"
|
||||
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
package: |-
|
||||
# bin
|
||||
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
@ -150,19 +168,13 @@ aurs:
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
# dockers:
|
||||
# - dockerfile: goreleaser.dockerfile
|
||||
# - dockerfile: cli/docker/Dockerfile
|
||||
# goos: linux
|
||||
# goarch: amd64
|
||||
# ids:
|
||||
# - infisical
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}"
|
||||
# - "infisical/cli:{{ .Major }}"
|
||||
# - "infisical/cli:{{ .Version }}"
|
||||
# - "infisical/cli:latest"
|
||||
# build_flag_templates:
|
||||
# - "--label=org.label-schema.schema-version=1.0"
|
||||
# - "--label=org.label-schema.version={{.Version}}"
|
||||
# - "--label=org.label-schema.name={{.ProjectName}}"
|
||||
# - "--platform=linux/amd64"
|
2
Makefile
2
Makefile
@ -8,7 +8,7 @@ up-dev:
|
||||
docker-compose -f docker-compose.dev.yml up --build
|
||||
|
||||
i-dev:
|
||||
infisical export && infisical export > .env && docker-compose -f docker-compose.dev.yml up --build
|
||||
infisical run -- docker-compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.yml up --build
|
||||
|
77
README.md
77
README.md
File diff suppressed because one or more lines are too long
18
backend/environment.d.ts
vendored
18
backend/environment.d.ts
vendored
@ -3,8 +3,10 @@ export {};
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
interface ProcessEnv {
|
||||
PORT: string;
|
||||
EMAIL_TOKEN_LIFETIME: string;
|
||||
ENCRYPTION_KEY: string;
|
||||
SALT_ROUNDS: string;
|
||||
JWT_AUTH_LIFETIME: string;
|
||||
JWT_AUTH_SECRET: string;
|
||||
JWT_REFRESH_LIFETIME: string;
|
||||
@ -19,23 +21,31 @@ declare global {
|
||||
CLIENT_ID_HEROKU: string;
|
||||
CLIENT_ID_VERCEL: string;
|
||||
CLIENT_ID_NETLIFY: string;
|
||||
CLIENT_ID_GITHUB: string;
|
||||
CLIENT_SECRET_HEROKU: string;
|
||||
CLIENT_SECRET_VERCEL: string;
|
||||
CLIENT_SECRET_NETLIFY: string;
|
||||
CLIENT_SECRET_GITHUB: string;
|
||||
CLIENT_SLUG_VERCEL: string;
|
||||
POSTHOG_HOST: string;
|
||||
POSTHOG_PROJECT_API_KEY: string;
|
||||
SENTRY_DSN: string;
|
||||
SITE_URL: string;
|
||||
SMTP_HOST: string;
|
||||
SMTP_NAME: string;
|
||||
SMTP_PASSWORD: string;
|
||||
SMTP_SECURE: string;
|
||||
SMTP_PORT: string;
|
||||
SMTP_USERNAME: string;
|
||||
STRIPE_PRODUCT_CARD_AUTH: string;
|
||||
STRIPE_PRODUCT_PRO: string;
|
||||
SMTP_PASSWORD: string;
|
||||
SMTP_FROM_ADDRESS: string;
|
||||
SMTP_FROM_NAME: string;
|
||||
STRIPE_PRODUCT_STARTER: string;
|
||||
STRIPE_PRODUCT_TEAM: string;
|
||||
STRIPE_PRODUCT_PRO: string;
|
||||
STRIPE_PUBLISHABLE_KEY: string;
|
||||
STRIPE_SECRET_KEY: string;
|
||||
STRIPE_WEBHOOK_SECRET: string;
|
||||
TELEMETRY_ENABLED: string;
|
||||
LICENSE_KEY: string;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
3574
backend/package-lock.json
generated
3574
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,48 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-secrets-manager": "^3.267.0",
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.14.0",
|
||||
"@sentry/tracing": "^7.19.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/libsodium-wrappers": "^0.7.10",
|
||||
"await-to-js": "^3.0.0",
|
||||
"aws-sdk": "^2.1311.0",
|
||||
"axios": "^1.1.3",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"builder-pattern": "^2.2.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-js": "^4.1.1",
|
||||
"dotenv": "^16.0.1",
|
||||
"express": "^4.18.1",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jsrp": "^0.2.4",
|
||||
"libsodium-wrappers": "^0.7.10",
|
||||
"lodash": "^4.17.21",
|
||||
"mongoose": "^6.7.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.2.2",
|
||||
"query-string": "^7.1.3",
|
||||
"request-ip": "^3.3.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"swagger-autogen": "^2.22.0",
|
||||
"swagger-ui-express": "^4.6.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"typescript": "^4.9.3",
|
||||
"utility-types": "^3.10.0",
|
||||
"winston": "^3.8.2",
|
||||
"winston-loki": "^6.0.6"
|
||||
},
|
||||
"name": "infisical-api",
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
@ -36,6 +80,7 @@
|
||||
"@types/express": "^4.17.14",
|
||||
"@types/jest": "^29.2.4",
|
||||
"@types/jsonwebtoken": "^8.5.9",
|
||||
"@types/lodash": "^4.14.191",
|
||||
"@types/node": "^18.11.3",
|
||||
"@types/nodemailer": "^6.4.6",
|
||||
"@types/supertest": "^2.0.12",
|
||||
@ -73,45 +118,5 @@
|
||||
"suiteNameTemplate": "{filepath}",
|
||||
"classNameTemplate": "{classname}",
|
||||
"titleTemplate": "{title}"
|
||||
},
|
||||
"dependencies": {
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.14.0",
|
||||
"@sentry/tracing": "^7.19.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/libsodium-wrappers": "^0.7.10",
|
||||
"await-to-js": "^3.0.0",
|
||||
"axios": "^1.1.3",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bigint-conversion": "^2.2.2",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"cors": "^2.8.5",
|
||||
"crypto-js": "^4.1.1",
|
||||
"dotenv": "^16.0.1",
|
||||
"express": "^4.18.1",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"express-validator": "^6.14.2",
|
||||
"handlebars": "^4.7.7",
|
||||
"helmet": "^5.1.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jsrp": "^0.2.4",
|
||||
"libsodium-wrappers": "^0.7.10",
|
||||
"mongoose": "^6.7.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.2.2",
|
||||
"query-string": "^7.1.3",
|
||||
"request-ip": "^3.3.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"swagger-autogen": "^2.22.0",
|
||||
"swagger-ui-express": "^4.6.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"typescript": "^4.9.3",
|
||||
"utility-types": "^3.10.0",
|
||||
"winston": "^3.8.2",
|
||||
"winston-loki": "^6.0.6"
|
||||
}
|
||||
}
|
||||
|
@ -50,6 +50,7 @@ import {
|
||||
serviceTokenData as v2ServiceTokenDataRouter,
|
||||
apiKeyData as v2APIKeyDataRouter,
|
||||
environment as v2EnvironmentRouter,
|
||||
tags as v2TagsRouter,
|
||||
} from './routes/v2';
|
||||
|
||||
import { healthCheck } from './routes/status';
|
||||
@ -112,6 +113,7 @@ app.use('/api/v1/integration-auth', v1IntegrationAuthRouter);
|
||||
app.use('/api/v2/users', v2UsersRouter);
|
||||
app.use('/api/v2/organizations', v2OrganizationsRouter);
|
||||
app.use('/api/v2/workspace', v2EnvironmentRouter);
|
||||
app.use('/api/v2/workspace', v2TagsRouter);
|
||||
app.use('/api/v2/workspace', v2WorkspaceRouter);
|
||||
app.use('/api/v2/secret', v2SecretRouter); // deprecated
|
||||
app.use('/api/v2/secrets', v2SecretsRouter);
|
||||
|
@ -1,5 +1,6 @@
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const EMAIL_TOKEN_LIFETIME = process.env.EMAIL_TOKEN_LIFETIME! || '86400';
|
||||
const EMAIL_TOKEN_LIFETIME = parseInt(process.env.EMAIL_TOKEN_LIFETIME! || '86400');
|
||||
const INVITE_ONLY_SIGNUP = process.env.INVITE_ONLY_SIGNUP == undefined ? false : process.env.INVITE_ONLY_SIGNUP
|
||||
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY!;
|
||||
const SALT_ROUNDS = parseInt(process.env.SALT_ROUNDS!) || 10;
|
||||
const JWT_AUTH_LIFETIME = process.env.JWT_AUTH_LIFETIME! || '10d';
|
||||
@ -13,15 +14,18 @@ const MONGO_URL = process.env.MONGO_URL!;
|
||||
const NODE_ENV = process.env.NODE_ENV! || 'production';
|
||||
const VERBOSE_ERROR_OUTPUT = process.env.VERBOSE_ERROR_OUTPUT! === 'true' && true;
|
||||
const LOKI_HOST = process.env.LOKI_HOST || undefined;
|
||||
const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
|
||||
const CLIENT_ID_AZURE = process.env.CLIENT_ID_AZURE!;
|
||||
const TENANT_ID_AZURE = process.env.TENANT_ID_AZURE!;
|
||||
const CLIENT_ID_HEROKU = process.env.CLIENT_ID_HEROKU!;
|
||||
const CLIENT_ID_VERCEL = process.env.CLIENT_ID_VERCEL!;
|
||||
const CLIENT_ID_NETLIFY = process.env.CLIENT_ID_NETLIFY!;
|
||||
const CLIENT_ID_GITHUB = process.env.CLIENT_ID_GITHUB!;
|
||||
const CLIENT_SECRET_AZURE = process.env.CLIENT_SECRET_AZURE!;
|
||||
const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
|
||||
const CLIENT_SECRET_VERCEL = process.env.CLIENT_SECRET_VERCEL!;
|
||||
const CLIENT_SECRET_NETLIFY = process.env.CLIENT_SECRET_NETLIFY!;
|
||||
const CLIENT_SECRET_GITHUB = process.env.CLIENT_SECRET_GITHUB!;
|
||||
const CLIENT_SLUG_VERCEL= process.env.CLIENT_SLUG_VERCEL!;
|
||||
const CLIENT_SLUG_VERCEL = process.env.CLIENT_SLUG_VERCEL!;
|
||||
const POSTHOG_HOST = process.env.POSTHOG_HOST! || 'https://app.posthog.com';
|
||||
const POSTHOG_PROJECT_API_KEY =
|
||||
process.env.POSTHOG_PROJECT_API_KEY! ||
|
||||
@ -35,9 +39,9 @@ const SMTP_USERNAME = process.env.SMTP_USERNAME!;
|
||||
const SMTP_PASSWORD = process.env.SMTP_PASSWORD!;
|
||||
const SMTP_FROM_ADDRESS = process.env.SMTP_FROM_ADDRESS!;
|
||||
const SMTP_FROM_NAME = process.env.SMTP_FROM_NAME! || 'Infisical';
|
||||
const STRIPE_PRODUCT_CARD_AUTH = process.env.STRIPE_PRODUCT_CARD_AUTH!;
|
||||
const STRIPE_PRODUCT_PRO = process.env.STRIPE_PRODUCT_PRO!;
|
||||
const STRIPE_PRODUCT_STARTER = process.env.STRIPE_PRODUCT_STARTER!;
|
||||
const STRIPE_PRODUCT_PRO = process.env.STRIPE_PRODUCT_PRO!;
|
||||
const STRIPE_PRODUCT_TEAM = process.env.STRIPE_PRODUCT_TEAM!;
|
||||
const STRIPE_PUBLISHABLE_KEY = process.env.STRIPE_PUBLISHABLE_KEY!;
|
||||
const STRIPE_SECRET_KEY = process.env.STRIPE_SECRET_KEY!;
|
||||
const STRIPE_WEBHOOK_SECRET = process.env.STRIPE_WEBHOOK_SECRET!;
|
||||
@ -47,6 +51,7 @@ const LICENSE_KEY = process.env.LICENSE_KEY!;
|
||||
export {
|
||||
PORT,
|
||||
EMAIL_TOKEN_LIFETIME,
|
||||
INVITE_ONLY_SIGNUP,
|
||||
ENCRYPTION_KEY,
|
||||
SALT_ROUNDS,
|
||||
JWT_AUTH_LIFETIME,
|
||||
@ -60,10 +65,13 @@ export {
|
||||
NODE_ENV,
|
||||
VERBOSE_ERROR_OUTPUT,
|
||||
LOKI_HOST,
|
||||
CLIENT_ID_AZURE,
|
||||
TENANT_ID_AZURE,
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_VERCEL,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_ID_GITHUB,
|
||||
CLIENT_SECRET_AZURE,
|
||||
CLIENT_SECRET_HEROKU,
|
||||
CLIENT_SECRET_VERCEL,
|
||||
CLIENT_SECRET_NETLIFY,
|
||||
@ -80,9 +88,9 @@ export {
|
||||
SMTP_PASSWORD,
|
||||
SMTP_FROM_ADDRESS,
|
||||
SMTP_FROM_NAME,
|
||||
STRIPE_PRODUCT_CARD_AUTH,
|
||||
STRIPE_PRODUCT_PRO,
|
||||
STRIPE_PRODUCT_STARTER,
|
||||
STRIPE_PRODUCT_TEAM,
|
||||
STRIPE_PRODUCT_PRO,
|
||||
STRIPE_PUBLISHABLE_KEY,
|
||||
STRIPE_SECRET_KEY,
|
||||
STRIPE_WEBHOOK_SECRET,
|
||||
|
@ -4,14 +4,21 @@ import jwt from 'jsonwebtoken';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
const jsrp = require('jsrp');
|
||||
import { User } from '../../models';
|
||||
import { User, LoginSRPDetail } from '../../models';
|
||||
import { createToken, issueTokens, clearTokens } from '../../helpers/auth';
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT
|
||||
} from '../../variables';
|
||||
import {
|
||||
NODE_ENV,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_SECRET
|
||||
} from '../../config';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
import { EELogService } from '../../ee/services';
|
||||
import { getChannelFromUserAgent } from '../../utils/posthog'; // TODO: move this
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
@ -19,8 +26,6 @@ declare module 'jsonwebtoken' {
|
||||
}
|
||||
}
|
||||
|
||||
const clientPublicKeys: any = {};
|
||||
|
||||
/**
|
||||
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
|
||||
* @param req
|
||||
@ -46,13 +51,15 @@ export const login1 = async (req: Request, res: Response) => {
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
() => {
|
||||
async () => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
clientPublicKeys[email] = {
|
||||
clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt)
|
||||
};
|
||||
|
||||
await LoginSRPDetail.findOneAndReplace({ email: email }, {
|
||||
email: email,
|
||||
clientPublicKey: clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt),
|
||||
}, { upsert: true, returnNewDocument: false })
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
@ -85,15 +92,21 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: email })
|
||||
|
||||
if (!loginSRPDetailFromDB) {
|
||||
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
|
||||
}
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: clientPublicKeys[email].serverBInt
|
||||
b: loginSRPDetailFromDB.serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(clientPublicKeys[email].clientPublicKey);
|
||||
server.setClientPublicKey(loginSRPDetailFromDB.clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
@ -108,6 +121,18 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
const loginAction = await EELogService.createAction({
|
||||
name: ACTION_LOGIN,
|
||||
userId: user._id
|
||||
});
|
||||
|
||||
loginAction && await EELogService.createLog({
|
||||
userId: user._id,
|
||||
actions: [loginAction],
|
||||
channel: getChannelFromUserAgent(req.headers['user-agent']),
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
// return (access) token in response
|
||||
return res.status(200).send({
|
||||
token: tokens.token,
|
||||
@ -151,6 +176,19 @@ export const logout = async (req: Request, res: Response) => {
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
const logoutAction = await EELogService.createAction({
|
||||
name: ACTION_LOGOUT,
|
||||
userId: req.user._id
|
||||
});
|
||||
|
||||
logoutAction && await EELogService.createLog({
|
||||
userId: req.user._id,
|
||||
actions: [logoutAction],
|
||||
channel: getChannelFromUserAgent(req.headers['user-agent']),
|
||||
ipAddress: req.ip
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
|
@ -10,15 +10,37 @@ import { INTEGRATION_SET, INTEGRATION_OPTIONS } from '../../variables';
|
||||
import { IntegrationService } from '../../services';
|
||||
import { getApps, revokeAccess } from '../../integrations';
|
||||
|
||||
export const getIntegrationOptions = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
/***
|
||||
* Return integration authorization with id [integrationAuthId]
|
||||
*/
|
||||
export const getIntegrationAuth = async (req: Request, res: Response) => {
|
||||
let integrationAuth;
|
||||
try {
|
||||
const { integrationAuthId } = req.params;
|
||||
integrationAuth = await IntegrationAuth.findById(integrationAuthId);
|
||||
|
||||
if (!integrationAuth) return res.status(400).send({
|
||||
message: 'Failed to find integration authorization'
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integration authorization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS
|
||||
integrationAuth
|
||||
});
|
||||
}
|
||||
|
||||
export const getIntegrationOptions = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange as part of integration [integration] for workspace with id [workspaceId]
|
||||
* @param req
|
||||
@ -31,7 +53,6 @@ export const oAuthExchange = async (
|
||||
) => {
|
||||
try {
|
||||
const { workspaceId, code, integration } = req.body;
|
||||
|
||||
if (!INTEGRATION_SET.has(integration))
|
||||
throw new Error('Failed to validate integration');
|
||||
|
||||
@ -40,12 +61,16 @@ export const oAuthExchange = async (
|
||||
throw new Error("Failed to get environments")
|
||||
}
|
||||
|
||||
await IntegrationService.handleOAuthExchange({
|
||||
const integrationAuth = await IntegrationService.handleOAuthExchange({
|
||||
workspaceId,
|
||||
integration,
|
||||
code,
|
||||
environment: environments[0].slug,
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
@ -53,34 +78,42 @@ export const oAuthExchange = async (
|
||||
message: 'Failed to get OAuth2 code-token exchange'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully enabled integration authorization'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Save integration access token as part of integration [integration] for workspace with id [workspaceId]
|
||||
* Save integration access token and (optionally) access id as part of integration
|
||||
* [integration] for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const saveIntegrationAccessToken = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
// TODO: refactor
|
||||
// TODO: check if access token is valid for each integration
|
||||
|
||||
let integrationAuth;
|
||||
try {
|
||||
const {
|
||||
workspaceId,
|
||||
accessId,
|
||||
accessToken,
|
||||
integration
|
||||
}: {
|
||||
workspaceId: string;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
integration: string;
|
||||
} = req.body;
|
||||
|
||||
const bot = await Bot.findOne({
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (!bot) throw new Error('Bot must be enabled to save integration access token');
|
||||
|
||||
integrationAuth = await IntegrationAuth.findOneAndUpdate({
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
integration
|
||||
@ -91,17 +124,11 @@ export const saveIntegrationAccessToken = async (
|
||||
new: true,
|
||||
upsert: true
|
||||
});
|
||||
|
||||
const bot = await Bot.findOne({
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (!bot) throw new Error('Bot must be enabled to save integration access token');
|
||||
|
||||
// encrypt and save integration access token
|
||||
// encrypt and save integration access details
|
||||
integrationAuth = await IntegrationService.setIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt: undefined
|
||||
});
|
||||
@ -127,23 +154,23 @@ export const saveIntegrationAccessToken = async (
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
let apps;
|
||||
try {
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integration authorization applications'
|
||||
});
|
||||
}
|
||||
let apps;
|
||||
try {
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get integration authorization applications",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apps
|
||||
});
|
||||
return res.status(200).send({
|
||||
apps,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -153,21 +180,21 @@ export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration authorization'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth
|
||||
});
|
||||
}
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete integration authorization",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrationAuth,
|
||||
});
|
||||
};
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { Types } from 'mongoose';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Integration,
|
||||
@ -11,73 +12,41 @@ import { eventPushSecrets } from '../../events';
|
||||
|
||||
/**
|
||||
* Create/initialize an (empty) integration for integration authorization
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
try {
|
||||
// initialize new integration after saving integration access token
|
||||
integration = await new Integration({
|
||||
workspace: req.integrationAuth.workspace._id,
|
||||
isActive: false,
|
||||
app: null,
|
||||
environment: req.integrationAuth.workspace?.environments[0].slug,
|
||||
integration: req.integrationAuth.integration,
|
||||
integrationAuth: req.integrationAuth._id
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to create integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Change environment or name of integration with id [integrationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateIntegration = async (req: Request, res: Response) => {
|
||||
export const createIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const {
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
const {
|
||||
integrationAuthId,
|
||||
app,
|
||||
appId,
|
||||
isActive,
|
||||
sourceEnvironment,
|
||||
targetEnvironment,
|
||||
owner, // github-specific integration param
|
||||
owner,
|
||||
path,
|
||||
region
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id
|
||||
},
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
// TODO: validate [sourceEnvironment] and [targetEnvironment]
|
||||
|
||||
// initialize new integration after saving integration access token
|
||||
integration = await new Integration({
|
||||
workspace: req.integrationAuth.workspace._id,
|
||||
environment: sourceEnvironment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner,
|
||||
path,
|
||||
region,
|
||||
integration: req.integrationAuth.integration,
|
||||
integrationAuth: new Types.ObjectId(integrationAuthId)
|
||||
}).save();
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
@ -87,17 +56,78 @@ export const updateIntegration = async (req: Request, res: Response) => {
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update integration'
|
||||
message: 'Failed to create integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change environment or name of integration with id [integrationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const {
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner, // github-specific integration param
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id,
|
||||
},
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace.toString(),
|
||||
}),
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to update integration",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -108,24 +138,24 @@ export const updateIntegration = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
try {
|
||||
const { integrationId } = req.params;
|
||||
let integration;
|
||||
try {
|
||||
const { integrationId } = req.params;
|
||||
|
||||
integration = await Integration.findOneAndDelete({
|
||||
_id: integrationId
|
||||
});
|
||||
|
||||
if (!integration) throw new Error('Failed to find integration');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration
|
||||
});
|
||||
integration = await Integration.findOneAndDelete({
|
||||
_id: integrationId,
|
||||
});
|
||||
|
||||
if (!integration) throw new Error("Failed to find integration");
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete integration",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
});
|
||||
};
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Membership, MembershipOrg, User, Key } from '../../models';
|
||||
import { Membership, MembershipOrg, User, Key, IMembership, Workspace } from '../../models';
|
||||
import {
|
||||
findMembership,
|
||||
deleteMembership as deleteMember
|
||||
@ -230,4 +230,4 @@ export const inviteUserToWorkspace = async (req: Request, res: Response) => {
|
||||
invitee,
|
||||
latestKey
|
||||
});
|
||||
};
|
||||
};
|
@ -1,7 +1,7 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
import { SITE_URL, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
|
||||
import { SITE_URL, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, EMAIL_TOKEN_LIFETIME } from '../../config';
|
||||
import { MembershipOrg, Organization, User, Token } from '../../models';
|
||||
import { deleteMembershipOrg as deleteMemberFromOrg } from '../../helpers/membershipOrg';
|
||||
import { checkEmailVerification } from '../../helpers/signup';
|
||||
@ -77,8 +77,6 @@ export const changeMembershipOrgRole = async (req: Request, res: Response) => {
|
||||
// change role for (target) organization membership with id
|
||||
// [membershipOrgId]
|
||||
|
||||
// TODO
|
||||
|
||||
let membershipToChangeRole;
|
||||
// try {
|
||||
// } catch (err) {
|
||||
@ -115,14 +113,14 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
|
||||
|
||||
invitee = await User.findOne({
|
||||
email: inviteeEmail
|
||||
}).select('+publicKey');
|
||||
|
||||
if (invitee) {
|
||||
// case: invitee is an existing user
|
||||
|
||||
|
||||
inviteeMembershipOrg = await MembershipOrg.findOne({
|
||||
user: invitee._id,
|
||||
organization: organizationId
|
||||
@ -172,7 +170,8 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
{
|
||||
email: inviteeEmail,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
createdAt: new Date(),
|
||||
ttl: Math.floor(+new Date() / 1000) + EMAIL_TOKEN_LIFETIME // time in seconds, i.e unix
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
@ -243,7 +242,7 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
message: 'Successfully verified email',
|
||||
user,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
// initialize user account
|
||||
|
@ -2,10 +2,7 @@ import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
SITE_URL,
|
||||
STRIPE_SECRET_KEY,
|
||||
STRIPE_PRODUCT_STARTER,
|
||||
STRIPE_PRODUCT_PRO,
|
||||
STRIPE_PRODUCT_CARD_AUTH
|
||||
STRIPE_SECRET_KEY
|
||||
} from '../../config';
|
||||
import Stripe from 'stripe';
|
||||
|
||||
@ -17,17 +14,13 @@ import {
|
||||
MembershipOrg,
|
||||
Organization,
|
||||
Workspace,
|
||||
IncidentContactOrg
|
||||
IncidentContactOrg,
|
||||
IMembershipOrg
|
||||
} from '../../models';
|
||||
import { createOrganization as create } from '../../helpers/organization';
|
||||
import { addMembershipsOrg } from '../../helpers/membershipOrg';
|
||||
import { OWNER, ACCEPTED } from '../../variables';
|
||||
|
||||
const productToPriceMap = {
|
||||
starter: STRIPE_PRODUCT_STARTER,
|
||||
pro: STRIPE_PRODUCT_PRO,
|
||||
cardAuth: STRIPE_PRODUCT_CARD_AUTH
|
||||
};
|
||||
import _ from 'lodash';
|
||||
|
||||
export const getOrganizations = async (req: Request, res: Response) => {
|
||||
let organizations;
|
||||
@ -340,7 +333,6 @@ export const createOrganizationPortalSession = async (
|
||||
|
||||
if (paymentMethods.data.length < 1) {
|
||||
// case: no payment method on file
|
||||
productToPriceMap['cardAuth'];
|
||||
session = await stripe.checkout.sessions.create({
|
||||
customer: req.membershipOrg.organization.customerId,
|
||||
mode: 'setup',
|
||||
@ -392,3 +384,44 @@ export const getOrganizationSubscriptions = async (
|
||||
subscriptions
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Given a org id, return the projects each member of the org belongs to
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationMembersAndTheirWorkspaces = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
const { organizationId } = req.params;
|
||||
|
||||
const workspacesSet = (
|
||||
await Workspace.find(
|
||||
{
|
||||
organization: organizationId
|
||||
},
|
||||
'_id'
|
||||
)
|
||||
).map((w) => w._id.toString());
|
||||
|
||||
const memberships = (
|
||||
await Membership.find({
|
||||
workspace: { $in: workspacesSet }
|
||||
}).populate('workspace')
|
||||
);
|
||||
const userToWorkspaceIds: any = {};
|
||||
|
||||
memberships.forEach(membership => {
|
||||
const user = membership.user.toString();
|
||||
if (userToWorkspaceIds[user]) {
|
||||
userToWorkspaceIds[user].push(membership.workspace);
|
||||
} else {
|
||||
userToWorkspaceIds[user] = [membership.workspace];
|
||||
}
|
||||
});
|
||||
|
||||
return res.json(userToWorkspaceIds);
|
||||
};
|
@ -4,13 +4,12 @@ import crypto from 'crypto';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const jsrp = require('jsrp');
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
import { User, Token, BackupPrivateKey } from '../../models';
|
||||
import { User, Token, BackupPrivateKey, LoginSRPDetail } from '../../models';
|
||||
import { checkEmailVerification } from '../../helpers/signup';
|
||||
import { createToken } from '../../helpers/auth';
|
||||
import { sendMail } from '../../helpers/nodemailer';
|
||||
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, SITE_URL } from '../../config';
|
||||
|
||||
const clientPublicKeys: any = {};
|
||||
import { EMAIL_TOKEN_LIFETIME, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, SITE_URL } from '../../config';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
|
||||
/**
|
||||
* Password reset step 1: Send email verification link to email [email]
|
||||
@ -32,7 +31,7 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
error: 'Failed to send email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
const token = crypto.randomBytes(16).toString('hex');
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
@ -40,11 +39,12 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
{
|
||||
email,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
createdAt: new Date(),
|
||||
ttl: Math.floor(+new Date() / 1000) + EMAIL_TOKEN_LIFETIME // time in seconds, i.e unix
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
|
||||
|
||||
await sendMail({
|
||||
template: 'passwordReset.handlebars',
|
||||
subjectLine: 'Infisical password reset',
|
||||
@ -55,15 +55,15 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
callback_url: SITE_URL + '/password-reset'
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to send email for account recovery'
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).send({
|
||||
message: `Sent an email for account recovery to ${email}`
|
||||
});
|
||||
@ -79,7 +79,7 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
let user, token;
|
||||
try {
|
||||
const { email, code } = req.body;
|
||||
|
||||
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
if (!user || !user?.publicKey) {
|
||||
// case: user doesn't exist with email [email] or
|
||||
@ -93,7 +93,7 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
email,
|
||||
code
|
||||
});
|
||||
|
||||
|
||||
// generate temporary password-reset token
|
||||
token = createToken({
|
||||
payload: {
|
||||
@ -107,7 +107,7 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed email verification for password reset'
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
@ -130,7 +130,7 @@ export const srp1 = async (req: Request, res: Response) => {
|
||||
const user = await User.findOne({
|
||||
email: req.user.email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
@ -139,13 +139,15 @@ export const srp1 = async (req: Request, res: Response) => {
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
() => {
|
||||
async () => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
clientPublicKeys[req.user.email] = {
|
||||
clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt)
|
||||
};
|
||||
|
||||
await LoginSRPDetail.findOneAndReplace({ email: req.user.email }, {
|
||||
email: req.user.email,
|
||||
clientPublicKey: clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt),
|
||||
}, { upsert: true, returnNewDocument: false })
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
@ -180,17 +182,21 @@ export const changePassword = async (req: Request, res: Response) => {
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: req.user.email })
|
||||
|
||||
if (!loginSRPDetailFromDB) {
|
||||
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
|
||||
}
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: clientPublicKeys[req.user.email].serverBInt
|
||||
b: loginSRPDetailFromDB.serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(
|
||||
clientPublicKeys[req.user.email].clientPublicKey
|
||||
);
|
||||
server.setClientPublicKey(loginSRPDetailFromDB.clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
@ -249,16 +255,22 @@ export const createBackupPrivateKey = async (req: Request, res: Response) => {
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: req.user.email })
|
||||
|
||||
if (!loginSRPDetailFromDB) {
|
||||
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
|
||||
}
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: clientPublicKeys[req.user.email].serverBInt
|
||||
b: loginSRPDetailFromDB.serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(
|
||||
clientPublicKeys[req.user.email].clientPublicKey
|
||||
loginSRPDetailFromDB.clientPublicKey
|
||||
);
|
||||
|
||||
// compare server and client shared keys
|
||||
@ -311,16 +323,16 @@ export const getBackupPrivateKey = async (req: Request, res: Response) => {
|
||||
backupPrivateKey = await BackupPrivateKey.findOne({
|
||||
user: req.user._id
|
||||
}).select('+encryptedPrivateKey +iv +tag');
|
||||
|
||||
|
||||
if (!backupPrivateKey) throw new Error('Failed to find backup private key');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email});
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get backup private key'
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).send({
|
||||
backupPrivateKey
|
||||
});
|
||||
@ -348,15 +360,15 @@ export const resetPassword = async (req: Request, res: Response) => {
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email});
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get backup private key'
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully reset password'
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { NODE_ENV, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
|
||||
import { NODE_ENV, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, INVITE_ONLY_SIGNUP } from '../../config';
|
||||
import { User, MembershipOrg } from '../../models';
|
||||
import { completeAccount } from '../../helpers/user';
|
||||
import {
|
||||
@ -11,6 +11,7 @@ import {
|
||||
import { issueTokens, createToken } from '../../helpers/auth';
|
||||
import { INVITED, ACCEPTED } from '../../variables';
|
||||
import axios from 'axios';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
|
||||
/**
|
||||
* Signup step 1: Initialize account for user under email [email] and send a verification code
|
||||
@ -24,6 +25,14 @@ export const beginEmailSignup = async (req: Request, res: Response) => {
|
||||
try {
|
||||
email = req.body.email;
|
||||
|
||||
if (INVITE_ONLY_SIGNUP) {
|
||||
// Only one user can create an account without being invited. The rest need to be invited in order to make an account
|
||||
const userCount = await User.countDocuments({})
|
||||
if (userCount != 0) {
|
||||
throw BadRequestError({ message: "New user sign ups are not allowed at this time. You must be invited to sign up." })
|
||||
}
|
||||
}
|
||||
|
||||
const user = await User.findOne({ email }).select('+publicKey');
|
||||
if (user && user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
@ -129,7 +138,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
|
||||
// get user
|
||||
user = await User.findOne({ email });
|
||||
|
||||
|
||||
if (!user || (user && user?.publicKey)) {
|
||||
// case 1: user doesn't exist.
|
||||
// case 2: user has already completed account
|
||||
|
@ -1,21 +1,21 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response } from "express";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import {
|
||||
Workspace,
|
||||
Membership,
|
||||
MembershipOrg,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
IUser,
|
||||
ServiceToken,
|
||||
ServiceTokenData
|
||||
} from '../../models';
|
||||
Workspace,
|
||||
Membership,
|
||||
MembershipOrg,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
IUser,
|
||||
ServiceToken,
|
||||
ServiceTokenData,
|
||||
} from "../../models";
|
||||
import {
|
||||
createWorkspace as create,
|
||||
deleteWorkspace as deleteWork
|
||||
} from '../../helpers/workspace';
|
||||
import { addMemberships } from '../../helpers/membership';
|
||||
import { ADMIN } from '../../variables';
|
||||
createWorkspace as create,
|
||||
deleteWorkspace as deleteWork,
|
||||
} from "../../helpers/workspace";
|
||||
import { addMemberships } from "../../helpers/membership";
|
||||
import { ADMIN } from "../../variables";
|
||||
|
||||
/**
|
||||
* Return public keys of members of workspace with id [workspaceId]
|
||||
@ -24,32 +24,31 @@ import { ADMIN } from '../../variables';
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
|
||||
let publicKeys;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let publicKeys;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
publicKeys = (
|
||||
await Membership.find({
|
||||
workspace: workspaceId
|
||||
}).populate<{ user: IUser }>('user', 'publicKey')
|
||||
)
|
||||
.map((member) => {
|
||||
return {
|
||||
publicKey: member.user.publicKey,
|
||||
userId: member.user._id
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace member public keys'
|
||||
});
|
||||
}
|
||||
publicKeys = (
|
||||
await Membership.find({
|
||||
workspace: workspaceId,
|
||||
}).populate<{ user: IUser }>("user", "publicKey")
|
||||
).map((member) => {
|
||||
return {
|
||||
publicKey: member.user.publicKey,
|
||||
userId: member.user._id,
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace member public keys",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
publicKeys
|
||||
});
|
||||
return res.status(200).send({
|
||||
publicKeys,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -59,24 +58,24 @@ export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
let users;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let users;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
users = await Membership.find({
|
||||
workspace: workspaceId
|
||||
}).populate('user', '+publicKey');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace members'
|
||||
});
|
||||
}
|
||||
users = await Membership.find({
|
||||
workspace: workspaceId,
|
||||
}).populate("user", "+publicKey");
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace members",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
users
|
||||
});
|
||||
return res.status(200).send({
|
||||
users,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -86,24 +85,24 @@ export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaces = async (req: Request, res: Response) => {
|
||||
let workspaces;
|
||||
try {
|
||||
workspaces = (
|
||||
await Membership.find({
|
||||
user: req.user._id
|
||||
}).populate('workspace')
|
||||
).map((m) => m.workspace);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspaces'
|
||||
});
|
||||
}
|
||||
let workspaces;
|
||||
try {
|
||||
workspaces = (
|
||||
await Membership.find({
|
||||
user: req.user._id,
|
||||
}).populate("workspace")
|
||||
).map((m) => m.workspace);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspaces",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspaces
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspaces,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -113,24 +112,24 @@ export const getWorkspaces = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspace = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
workspace = await Workspace.findOne({
|
||||
_id: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace'
|
||||
});
|
||||
}
|
||||
workspace = await Workspace.findOne({
|
||||
_id: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -141,46 +140,46 @@ export const getWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const createWorkspace = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceName, organizationId } = req.body;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceName, organizationId } = req.body;
|
||||
|
||||
// validate organization membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId
|
||||
});
|
||||
// validate organization membership
|
||||
const membershipOrg = await MembershipOrg.findOne({
|
||||
user: req.user._id,
|
||||
organization: organizationId,
|
||||
});
|
||||
|
||||
if (!membershipOrg) {
|
||||
throw new Error('Failed to validate organization membership');
|
||||
}
|
||||
if (!membershipOrg) {
|
||||
throw new Error("Failed to validate organization membership");
|
||||
}
|
||||
|
||||
if (workspaceName.length < 1) {
|
||||
throw new Error('Workspace names must be at least 1-character long');
|
||||
}
|
||||
if (workspaceName.length < 1) {
|
||||
throw new Error("Workspace names must be at least 1-character long");
|
||||
}
|
||||
|
||||
// create workspace and add user as member
|
||||
workspace = await create({
|
||||
name: workspaceName,
|
||||
organizationId
|
||||
});
|
||||
// create workspace and add user as member
|
||||
workspace = await create({
|
||||
name: workspaceName,
|
||||
organizationId,
|
||||
});
|
||||
|
||||
await addMemberships({
|
||||
userIds: [req.user._id],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN]
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to create workspace'
|
||||
});
|
||||
}
|
||||
await addMemberships({
|
||||
userIds: [req.user._id],
|
||||
workspaceId: workspace._id.toString(),
|
||||
roles: [ADMIN],
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to create workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -190,24 +189,24 @@ export const createWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteWorkspace = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
// delete workspace
|
||||
await deleteWork({
|
||||
id: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete workspace'
|
||||
});
|
||||
}
|
||||
// delete workspace
|
||||
await deleteWork({
|
||||
id: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to delete workspace",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully deleted workspace'
|
||||
});
|
||||
return res.status(200).send({
|
||||
message: "Successfully deleted workspace",
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -217,34 +216,34 @@ export const deleteWorkspace = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const changeWorkspaceName = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { name } = req.body;
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { name } = req.body;
|
||||
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId
|
||||
},
|
||||
{
|
||||
name
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change workspace name'
|
||||
});
|
||||
}
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId,
|
||||
},
|
||||
{
|
||||
name,
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to change workspace name",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully changed workspace name',
|
||||
workspace
|
||||
});
|
||||
return res.status(200).send({
|
||||
message: "Successfully changed workspace name",
|
||||
workspace,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -254,24 +253,24 @@ export const changeWorkspaceName = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
|
||||
let integrations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let integrations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
integrations = await Integration.find({
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace integrations'
|
||||
});
|
||||
}
|
||||
integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace integrations",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrations
|
||||
});
|
||||
return res.status(200).send({
|
||||
integrations,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -281,56 +280,56 @@ export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceIntegrationAuthorizations = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let authorizations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
let authorizations;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
authorizations = await IntegrationAuth.find({
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace integration authorizations'
|
||||
});
|
||||
}
|
||||
authorizations = await IntegrationAuth.find({
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace integration authorizations",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
authorizations
|
||||
});
|
||||
return res.status(200).send({
|
||||
authorizations,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return service service tokens for workspace [workspaceId] belonging to user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceServiceTokens = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
let serviceTokens;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
// ?? FIX.
|
||||
serviceTokens = await ServiceToken.find({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get workspace service tokens'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
serviceTokens
|
||||
});
|
||||
}
|
||||
let serviceTokens;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
// ?? FIX.
|
||||
serviceTokens = await ServiceToken.find({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId,
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: "Failed to get workspace service tokens",
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
serviceTokens,
|
||||
});
|
||||
};
|
||||
|
@ -6,8 +6,12 @@ import {
|
||||
Workspace,
|
||||
Integration,
|
||||
ServiceTokenData,
|
||||
Membership,
|
||||
} from '../../models';
|
||||
import { SecretVersion } from '../../ee/models';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
import _ from 'lodash';
|
||||
import { ABILITY_READ, ABILITY_WRITE } from '../../variables/organization';
|
||||
|
||||
/**
|
||||
* Create new workspace environment named [environmentName] under workspace with id
|
||||
@ -120,6 +124,15 @@ export const renameWorkspaceEnvironment = async (
|
||||
{ workspace: workspaceId, environment: oldEnvironmentSlug },
|
||||
{ environment: environmentSlug }
|
||||
);
|
||||
await Membership.updateMany(
|
||||
{
|
||||
workspace: workspaceId,
|
||||
"deniedPermissions.environmentSlug": oldEnvironmentSlug
|
||||
},
|
||||
{ $set: { "deniedPermissions.$[element].environmentSlug": environmentSlug } },
|
||||
{ arrayFilters: [{ "element.environmentSlug": oldEnvironmentSlug }] }
|
||||
)
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
@ -188,6 +201,11 @@ export const deleteWorkspaceEnvironment = async (
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
});
|
||||
await Membership.updateMany(
|
||||
{ workspace: workspaceId },
|
||||
{ $pull: { deniedPermissions: { environmentSlug: environmentSlug } } }
|
||||
)
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
@ -202,3 +220,43 @@ export const deleteWorkspaceEnvironment = async (
|
||||
environment: environmentSlug,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
export const getAllAccessibleEnvironmentsOfWorkspace = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
const { workspaceId } = req.params;
|
||||
const workspacesUserIsMemberOf = await Membership.findOne({
|
||||
workspace: workspaceId,
|
||||
user: req.user
|
||||
})
|
||||
|
||||
if (!workspacesUserIsMemberOf) {
|
||||
throw BadRequestError()
|
||||
}
|
||||
|
||||
const accessibleEnvironments: any = []
|
||||
const deniedPermission = workspacesUserIsMemberOf.deniedPermissions
|
||||
|
||||
const relatedWorkspace = await Workspace.findById(workspaceId)
|
||||
if (!relatedWorkspace) {
|
||||
throw BadRequestError()
|
||||
}
|
||||
relatedWorkspace.environments.forEach(environment => {
|
||||
const isReadBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_READ })
|
||||
const isWriteBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_WRITE })
|
||||
if (isReadBlocked && isWriteBlocked) {
|
||||
return
|
||||
} else {
|
||||
accessibleEnvironments.push({
|
||||
name: environment.name,
|
||||
slug: environment.slug,
|
||||
isWriteDenied: isWriteBlocked,
|
||||
isReadDenied: isReadBlocked
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
res.json({ accessibleEnvironments })
|
||||
};
|
||||
|
@ -6,6 +6,7 @@ import * as apiKeyDataController from './apiKeyDataController';
|
||||
import * as secretController from './secretController';
|
||||
import * as secretsController from './secretsController';
|
||||
import * as environmentController from './environmentController';
|
||||
import * as tagController from './tagController';
|
||||
|
||||
export {
|
||||
usersController,
|
||||
@ -15,5 +16,6 @@ export {
|
||||
apiKeyDataController,
|
||||
secretController,
|
||||
secretsController,
|
||||
environmentController
|
||||
environmentController,
|
||||
tagController
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import to from 'await-to-js';
|
||||
import { Types } from 'mongoose';
|
||||
import { Request, Response } from 'express';
|
||||
import { ISecret, Secret } from '../../models';
|
||||
import { ISecret, Membership, Secret, Workspace } from '../../models';
|
||||
import {
|
||||
SECRET_PERSONAL,
|
||||
SECRET_SHARED,
|
||||
@ -10,13 +10,16 @@ import {
|
||||
ACTION_UPDATE_SECRETS,
|
||||
ACTION_DELETE_SECRETS
|
||||
} from '../../variables';
|
||||
import { ValidationError } from '../../utils/errors';
|
||||
import { UnauthorizedRequestError, ValidationError } from '../../utils/errors';
|
||||
import { EventService } from '../../services';
|
||||
import { eventPushSecrets } from '../../events';
|
||||
import { EESecretService, EELogService } from '../../ee/services';
|
||||
import { postHogClient } from '../../services';
|
||||
import { BadRequestError } from '../../utils/errors';
|
||||
import { getChannelFromUserAgent } from '../../utils/posthog';
|
||||
import { ABILITY_READ, ABILITY_WRITE } from '../../variables/organization';
|
||||
import { userHasNoAbility, userHasWorkspaceAccess, userHasWriteOnlyAbility } from '../../ee/helpers/checkMembershipPermissions';
|
||||
import Tag from '../../models/tag';
|
||||
import _ from 'lodash';
|
||||
|
||||
/**
|
||||
* Create secret(s) for workspace with id [workspaceId] and environment [environment]
|
||||
@ -76,20 +79,40 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
}
|
||||
*/
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent'])
|
||||
const { workspaceId, environment } = req.body;
|
||||
|
||||
let toAdd;
|
||||
if (Array.isArray(req.body.secrets)) {
|
||||
// case: create multiple secrets
|
||||
toAdd = req.body.secrets;
|
||||
} else if (typeof req.body.secrets === 'object') {
|
||||
// case: create 1 secret
|
||||
toAdd = [req.body.secrets];
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent'])
|
||||
const { workspaceId, environment }: { workspaceId: string, environment: string } = req.body;
|
||||
|
||||
const hasAccess = await userHasWorkspaceAccess(req.user, workspaceId, environment, ABILITY_WRITE)
|
||||
if (!hasAccess) {
|
||||
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
|
||||
}
|
||||
|
||||
const newSecrets = await Secret.insertMany(
|
||||
toAdd.map(({
|
||||
let listOfSecretsToCreate;
|
||||
if (Array.isArray(req.body.secrets)) {
|
||||
// case: create multiple secrets
|
||||
listOfSecretsToCreate = req.body.secrets;
|
||||
} else if (typeof req.body.secrets === 'object') {
|
||||
// case: create 1 secret
|
||||
listOfSecretsToCreate = [req.body.secrets];
|
||||
}
|
||||
|
||||
type secretsToCreateType = {
|
||||
type: string;
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretCommentCiphertext: string;
|
||||
secretCommentIV: string;
|
||||
secretCommentTag: string;
|
||||
tags: string[]
|
||||
}
|
||||
|
||||
const newlyCreatedSecrets = await Secret.insertMany(
|
||||
listOfSecretsToCreate.map(({
|
||||
type,
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
@ -97,27 +120,29 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
}: {
|
||||
type: string;
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
}) => ({
|
||||
version: 1,
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
type,
|
||||
user: type === SECRET_PERSONAL ? req.user : undefined,
|
||||
environment,
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag
|
||||
}))
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
tags
|
||||
}: secretsToCreateType) => {
|
||||
return ({
|
||||
version: 1,
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
type,
|
||||
user: type === SECRET_PERSONAL ? req.user : undefined,
|
||||
environment,
|
||||
secretKeyCiphertext,
|
||||
secretKeyIV,
|
||||
secretKeyTag,
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
tags
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
setTimeout(async () => {
|
||||
@ -131,7 +156,7 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
|
||||
// (EE) add secret versions for new secrets
|
||||
await EESecretService.addSecretVersions({
|
||||
secretVersions: newSecrets.map(({
|
||||
secretVersions: newlyCreatedSecrets.map(({
|
||||
_id,
|
||||
version,
|
||||
workspace,
|
||||
@ -145,7 +170,11 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash
|
||||
secretValueHash,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
tags
|
||||
}) => ({
|
||||
_id: new Types.ObjectId(),
|
||||
secret: _id,
|
||||
@ -162,21 +191,25 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretValueHash
|
||||
secretValueHash,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
tags
|
||||
}))
|
||||
});
|
||||
|
||||
const addAction = await EELogService.createActionSecret({
|
||||
const addAction = await EELogService.createAction({
|
||||
name: ACTION_ADD_SECRETS,
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId,
|
||||
secretIds: newSecrets.map((n) => n._id)
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: newlyCreatedSecrets.map((n) => n._id)
|
||||
});
|
||||
|
||||
// (EE) create (audit) log
|
||||
addAction && await EELogService.createLog({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId,
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
actions: [addAction],
|
||||
channel,
|
||||
ipAddress: req.ip
|
||||
@ -192,7 +225,7 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
event: 'secrets added',
|
||||
distinctId: req.user.email,
|
||||
properties: {
|
||||
numberOfSecrets: toAdd.length,
|
||||
numberOfSecrets: listOfSecretsToCreate.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel: channel,
|
||||
@ -202,7 +235,7 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets: newSecrets
|
||||
secrets: newlyCreatedSecrets
|
||||
});
|
||||
}
|
||||
|
||||
@ -253,8 +286,10 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
}
|
||||
*/
|
||||
const { workspaceId, environment } = req.query;
|
||||
|
||||
|
||||
const { workspaceId, environment, tagSlugs } = req.query;
|
||||
const tagNamesList = typeof tagSlugs === 'string' && tagSlugs !== '' ? tagSlugs.split(',') : [];
|
||||
let userId = "" // used for getting personal secrets for user
|
||||
let userEmail = "" // used for posthog
|
||||
if (req.user) {
|
||||
@ -267,8 +302,38 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
userEmail = req.serviceTokenData.user.email;
|
||||
}
|
||||
|
||||
const [err, secrets] = await to(Secret.find(
|
||||
{
|
||||
// none service token case as service tokens are already scoped to env and project
|
||||
let hasWriteOnlyAccess
|
||||
if (!req.serviceTokenData) {
|
||||
hasWriteOnlyAccess = await userHasWriteOnlyAbility(userId, workspaceId, environment)
|
||||
const hasNoAccess = await userHasNoAbility(userId, workspaceId, environment)
|
||||
if (hasNoAccess) {
|
||||
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
|
||||
}
|
||||
}
|
||||
let secrets: any
|
||||
let secretQuery: any
|
||||
|
||||
if (tagNamesList != undefined && tagNamesList.length != 0) {
|
||||
const workspaceFromDB = await Tag.find({ workspace: workspaceId })
|
||||
|
||||
const tagIds = _.map(tagNamesList, (tagName) => {
|
||||
const tag = _.find(workspaceFromDB, { slug: tagName });
|
||||
return tag ? tag.id : null;
|
||||
});
|
||||
|
||||
secretQuery = {
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
{ user: userId },
|
||||
{ user: { $exists: false } }
|
||||
],
|
||||
tags: { $in: tagIds },
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
} else {
|
||||
secretQuery = {
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
@ -277,22 +342,26 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
],
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
).then())
|
||||
}
|
||||
|
||||
if (err) throw ValidationError({ message: 'Failed to get secrets', stack: err.stack });
|
||||
if (hasWriteOnlyAccess) {
|
||||
secrets = await Secret.find(secretQuery).select("secretKeyCiphertext secretKeyIV secretKeyTag")
|
||||
} else {
|
||||
secrets = await Secret.find(secretQuery).populate("tags")
|
||||
}
|
||||
|
||||
const channel = getChannelFromUserAgent(req.headers['user-agent'])
|
||||
|
||||
const readAction = await EELogService.createActionSecret({
|
||||
const readAction = await EELogService.createAction({
|
||||
name: ACTION_READ_SECRETS,
|
||||
userId: userId,
|
||||
workspaceId: workspaceId as string,
|
||||
userId: new Types.ObjectId(userId),
|
||||
workspaceId: new Types.ObjectId(workspaceId as string),
|
||||
secretIds: secrets.map((n: any) => n._id)
|
||||
});
|
||||
|
||||
readAction && await EELogService.createLog({
|
||||
userId: userId,
|
||||
workspaceId: workspaceId as string,
|
||||
userId: new Types.ObjectId(userId),
|
||||
workspaceId: new Types.ObjectId(workspaceId as string),
|
||||
actions: [readAction],
|
||||
channel,
|
||||
ipAddress: req.ip
|
||||
@ -317,6 +386,59 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
export const getOnlySecretKeys = async (req: Request, res: Response) => {
|
||||
const { workspaceId, environment } = req.query;
|
||||
|
||||
let userId = "" // used for getting personal secrets for user
|
||||
let userEmail = "" // used for posthog
|
||||
if (req.user) {
|
||||
userId = req.user._id;
|
||||
userEmail = req.user.email;
|
||||
}
|
||||
|
||||
if (req.serviceTokenData) {
|
||||
userId = req.serviceTokenData.user._id
|
||||
userEmail = req.serviceTokenData.user.email;
|
||||
}
|
||||
|
||||
// none service token case as service tokens are already scoped
|
||||
if (!req.serviceTokenData) {
|
||||
const hasAccess = await userHasWorkspaceAccess(userId, workspaceId, environment, ABILITY_READ)
|
||||
if (!hasAccess) {
|
||||
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
|
||||
}
|
||||
}
|
||||
|
||||
const [err, secretKeys] = await to(Secret.find(
|
||||
{
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
$or: [
|
||||
{ user: userId },
|
||||
{ user: { $exists: false } }
|
||||
],
|
||||
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
|
||||
}
|
||||
)
|
||||
.select("secretKeyIV secretKeyTag secretKeyCiphertext")
|
||||
.then())
|
||||
|
||||
if (err) throw ValidationError({ message: 'Failed to get secrets', stack: err.stack });
|
||||
|
||||
// readAction && await EELogService.createLog({
|
||||
// userId: new Types.ObjectId(userId),
|
||||
// workspaceId: new Types.ObjectId(workspaceId as string),
|
||||
// actions: [readAction],
|
||||
// channel,
|
||||
// ipAddress: req.ip
|
||||
// });
|
||||
|
||||
return res.status(200).send({
|
||||
secretKeys
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update secret(s)
|
||||
* @param req
|
||||
@ -369,7 +491,6 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
*/
|
||||
const channel = req.headers?.['user-agent']?.toLowerCase().includes('mozilla') ? 'web' : 'cli';
|
||||
|
||||
|
||||
// TODO: move type
|
||||
interface PatchSecret {
|
||||
id: string;
|
||||
@ -382,6 +503,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
secretCommentCiphertext: string;
|
||||
secretCommentIV: string;
|
||||
secretCommentTag: string;
|
||||
tags: string[]
|
||||
}
|
||||
|
||||
const updateOperationsToPerform = req.body.secrets.map((secret: PatchSecret) => {
|
||||
@ -394,7 +516,8 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
secretValueTag,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag
|
||||
secretCommentTag,
|
||||
tags
|
||||
} = secret;
|
||||
|
||||
return ({
|
||||
@ -410,8 +533,9 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
tags,
|
||||
...((
|
||||
secretCommentCiphertext &&
|
||||
secretCommentCiphertext !== undefined &&
|
||||
secretCommentIV &&
|
||||
secretCommentTag
|
||||
) ? {
|
||||
@ -444,6 +568,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
tags
|
||||
} = secretModificationsBySecretId[secret._id.toString()]
|
||||
|
||||
return ({
|
||||
@ -461,6 +586,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
secretCommentCiphertext: secretCommentCiphertext ? secretCommentCiphertext : secret.secretCommentCiphertext,
|
||||
secretCommentIV: secretCommentIV ? secretCommentIV : secret.secretCommentIV,
|
||||
secretCommentTag: secretCommentTag ? secretCommentTag : secret.secretCommentTag,
|
||||
tags: tags ? tags : secret.tags
|
||||
});
|
||||
})
|
||||
}
|
||||
@ -489,17 +615,17 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
});
|
||||
}, 10000);
|
||||
|
||||
const updateAction = await EELogService.createActionSecret({
|
||||
const updateAction = await EELogService.createAction({
|
||||
name: ACTION_UPDATE_SECRETS,
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: key,
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(key),
|
||||
secretIds: workspaceSecretObj[key].map((secret: ISecret) => secret._id)
|
||||
});
|
||||
|
||||
// (EE) create (audit) log
|
||||
updateAction && await EELogService.createLog({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: key,
|
||||
workspaceId: new Types.ObjectId(key),
|
||||
actions: [updateAction],
|
||||
channel,
|
||||
ipAddress: req.ip
|
||||
@ -615,17 +741,17 @@ export const deleteSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId: key
|
||||
})
|
||||
});
|
||||
const deleteAction = await EELogService.createActionSecret({
|
||||
const deleteAction = await EELogService.createAction({
|
||||
name: ACTION_DELETE_SECRETS,
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: key,
|
||||
userId: req.user._id,
|
||||
workspaceId: new Types.ObjectId(key),
|
||||
secretIds: workspaceSecretObj[key].map((secret: ISecret) => secret._id)
|
||||
});
|
||||
|
||||
// (EE) create (audit) log
|
||||
deleteAction && await EELogService.createLog({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: key,
|
||||
workspaceId: new Types.ObjectId(key),
|
||||
actions: [deleteAction],
|
||||
channel,
|
||||
ipAddress: req.ip
|
||||
|
@ -8,6 +8,8 @@ import {
|
||||
import {
|
||||
SALT_ROUNDS
|
||||
} from '../../config';
|
||||
import { userHasWorkspaceAccess } from '../../ee/helpers/checkMembershipPermissions';
|
||||
import { ABILITY_READ } from '../../variables/organization';
|
||||
|
||||
/**
|
||||
* Return service token data associated with service token on request
|
||||
@ -37,6 +39,11 @@ export const createServiceTokenData = async (req: Request, res: Response) => {
|
||||
expiresIn
|
||||
} = req.body;
|
||||
|
||||
const hasAccess = await userHasWorkspaceAccess(req.user, workspaceId, environment, ABILITY_READ)
|
||||
if (!hasAccess) {
|
||||
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
|
||||
}
|
||||
|
||||
const secret = crypto.randomBytes(16).toString('hex');
|
||||
const secretHash = await bcrypt.hash(secret, SALT_ROUNDS);
|
||||
|
||||
@ -100,4 +107,8 @@ export const deleteServiceTokenData = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
serviceTokenData
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function UnauthorizedRequestError(arg0: { message: string; }) {
|
||||
throw new Error('Function not implemented.');
|
||||
}
|
||||
|
72
backend/src/controllers/v2/tagController.ts
Normal file
72
backend/src/controllers/v2/tagController.ts
Normal file
@ -0,0 +1,72 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
Membership, Secret,
|
||||
} from '../../models';
|
||||
import Tag, { ITag } from '../../models/tag';
|
||||
import { Builder } from "builder-pattern"
|
||||
import to from 'await-to-js';
|
||||
import { BadRequestError, UnauthorizedRequestError } from '../../utils/errors';
|
||||
import { MongoError } from 'mongodb';
|
||||
import { userHasWorkspaceAccess } from '../../ee/helpers/checkMembershipPermissions';
|
||||
|
||||
export const createWorkspaceTag = async (req: Request, res: Response) => {
|
||||
const { workspaceId } = req.params
|
||||
const { name, slug } = req.body
|
||||
const sanitizedTagToCreate = Builder<ITag>()
|
||||
.name(name)
|
||||
.workspace(new Types.ObjectId(workspaceId))
|
||||
.slug(slug)
|
||||
.user(new Types.ObjectId(req.user._id))
|
||||
.build();
|
||||
|
||||
const [err, createdTag] = await to(Tag.create(sanitizedTagToCreate))
|
||||
|
||||
if (err) {
|
||||
if ((err as MongoError).code === 11000) {
|
||||
throw BadRequestError({ message: "Tags must be unique in a workspace" })
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
res.json(createdTag)
|
||||
}
|
||||
|
||||
export const deleteWorkspaceTag = async (req: Request, res: Response) => {
|
||||
const { tagId } = req.params
|
||||
|
||||
const tagFromDB = await Tag.findById(tagId)
|
||||
if (!tagFromDB) {
|
||||
throw BadRequestError()
|
||||
}
|
||||
|
||||
// can only delete if the request user is one that belongs to the same workspace as the tag
|
||||
const membership = await Membership.findOne({
|
||||
user: req.user,
|
||||
workspace: tagFromDB.workspace
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
UnauthorizedRequestError({ message: 'Failed to validate membership' });
|
||||
}
|
||||
|
||||
const result = await Tag.findByIdAndDelete(tagId);
|
||||
|
||||
// remove the tag from secrets
|
||||
await Secret.updateMany(
|
||||
{ tags: { $in: [tagId] } },
|
||||
{ $pull: { tags: tagId } }
|
||||
);
|
||||
|
||||
res.json(result);
|
||||
}
|
||||
|
||||
export const getWorkspaceTags = async (req: Request, res: Response) => {
|
||||
const { workspaceId } = req.params
|
||||
const workspaceTags = await Tag.find({ workspace: workspaceId })
|
||||
return res.json({
|
||||
workspaceTags
|
||||
})
|
||||
}
|
@ -467,4 +467,42 @@ export const deleteWorkspaceMembership = async (req: Request, res: Response) =>
|
||||
return res.status(200).send({
|
||||
membership
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Change autoCapitilzation Rule of workspace
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const toggleAutoCapitalization = async (req: Request, res: Response) => {
|
||||
let workspace;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
const { autoCapitalization } = req.body;
|
||||
|
||||
workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId
|
||||
},
|
||||
{
|
||||
autoCapitalization
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to change autoCapitalization setting'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully changed autoCapitalization setting',
|
||||
workspace
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -3,11 +3,13 @@ import * as secretController from './secretController';
|
||||
import * as secretSnapshotController from './secretSnapshotController';
|
||||
import * as workspaceController from './workspaceController';
|
||||
import * as actionController from './actionController';
|
||||
import * as membershipController from './membershipController';
|
||||
|
||||
export {
|
||||
stripeController,
|
||||
secretController,
|
||||
secretSnapshotController,
|
||||
workspaceController,
|
||||
actionController
|
||||
actionController,
|
||||
membershipController
|
||||
}
|
63
backend/src/ee/controllers/v1/membershipController.ts
Normal file
63
backend/src/ee/controllers/v1/membershipController.ts
Normal file
@ -0,0 +1,63 @@
|
||||
import { Request, Response } from "express";
|
||||
import { Membership, Workspace } from "../../../models";
|
||||
import { IMembershipPermission } from "../../../models/membership";
|
||||
import { BadRequestError, UnauthorizedRequestError } from "../../../utils/errors";
|
||||
import { ABILITY_READ, ABILITY_WRITE, ADMIN, MEMBER } from "../../../variables/organization";
|
||||
import { Builder } from "builder-pattern"
|
||||
import _ from "lodash";
|
||||
|
||||
export const denyMembershipPermissions = async (req: Request, res: Response) => {
|
||||
const { membershipId } = req.params;
|
||||
const { permissions } = req.body;
|
||||
const sanitizedMembershipPermissions: IMembershipPermission[] = permissions.map((permission: IMembershipPermission) => {
|
||||
if (!permission.ability || !permission.environmentSlug || ![ABILITY_READ, ABILITY_WRITE].includes(permission.ability)) {
|
||||
throw BadRequestError({ message: "One or more required fields are missing from the request or have incorrect type" })
|
||||
}
|
||||
|
||||
return Builder<IMembershipPermission>()
|
||||
.environmentSlug(permission.environmentSlug)
|
||||
.ability(permission.ability)
|
||||
.build();
|
||||
})
|
||||
|
||||
const sanitizedMembershipPermissionsUnique = _.uniqWith(sanitizedMembershipPermissions, _.isEqual)
|
||||
|
||||
const membershipToModify = await Membership.findById(membershipId)
|
||||
if (!membershipToModify) {
|
||||
throw BadRequestError({ message: "Unable to locate resource" })
|
||||
}
|
||||
|
||||
// check if the user making the request is a admin of this project
|
||||
if (![ADMIN, MEMBER].includes(membershipToModify.role)) {
|
||||
throw UnauthorizedRequestError()
|
||||
}
|
||||
|
||||
// check if the requested slugs are indeed a part of this related workspace
|
||||
const relatedWorkspace = await Workspace.findById(membershipToModify.workspace)
|
||||
if (!relatedWorkspace) {
|
||||
throw BadRequestError({ message: "Something went wrong when locating the related workspace" })
|
||||
}
|
||||
|
||||
const uniqueEnvironmentSlugs = new Set(_.uniq(_.map(relatedWorkspace.environments, 'slug')));
|
||||
|
||||
sanitizedMembershipPermissionsUnique.forEach(permission => {
|
||||
if (!uniqueEnvironmentSlugs.has(permission.environmentSlug)) {
|
||||
throw BadRequestError({ message: "Unknown environment slug reference" })
|
||||
}
|
||||
})
|
||||
|
||||
// update the permissions
|
||||
const updatedMembershipWithPermissions = await Membership.findByIdAndUpdate(
|
||||
{ _id: membershipToModify._id },
|
||||
{ $set: { deniedPermissions: sanitizedMembershipPermissionsUnique } },
|
||||
{ new: true }
|
||||
)
|
||||
|
||||
if (!updatedMembershipWithPermissions) {
|
||||
throw BadRequestError({ message: "The resource has been removed before it can be modified" })
|
||||
}
|
||||
|
||||
res.send({
|
||||
permissionsDenied: updatedMembershipWithPermissions.deniedPermissions
|
||||
})
|
||||
}
|
@ -15,7 +15,13 @@ export const getSecretSnapshot = async (req: Request, res: Response) => {
|
||||
|
||||
secretSnapshot = await SecretSnapshot
|
||||
.findById(secretSnapshotId)
|
||||
.populate('secretVersions');
|
||||
.populate({
|
||||
path: 'secretVersions',
|
||||
populate: {
|
||||
path: 'tags',
|
||||
model: 'Tag',
|
||||
}
|
||||
});
|
||||
|
||||
if (!secretSnapshot) throw new Error('Failed to find secret snapshot');
|
||||
|
||||
|
@ -1,39 +1,40 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import { SecretVersion, Action } from '../models';
|
||||
import { Action } from '../models';
|
||||
import {
|
||||
getLatestSecretVersionIds,
|
||||
getLatestNSecretSecretVersionIds
|
||||
} from '../helpers/secretVersion';
|
||||
import { ACTION_UPDATE_SECRETS } from '../../variables';
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_READ_SECRETS,
|
||||
ACTION_DELETE_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
} from '../../variables';
|
||||
|
||||
/**
|
||||
* Create an (audit) action for secrets including
|
||||
* add, delete, update, and read actions.
|
||||
* Create an (audit) action for updating secrets
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.name - name of action
|
||||
* @param {ObjectId[]} obj.secretIds - ids of relevant secrets
|
||||
* @param {Types.ObjectId} obj.secretIds - ids of relevant secrets
|
||||
* @returns {Action} action - new action
|
||||
*/
|
||||
const createActionSecretHelper = async ({
|
||||
const createActionUpdateSecret = async ({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
}: {
|
||||
name: string;
|
||||
userId: string;
|
||||
workspaceId: string;
|
||||
userId: Types.ObjectId;
|
||||
workspaceId: Types.ObjectId;
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
|
||||
let action;
|
||||
let latestSecretVersions;
|
||||
try {
|
||||
if (name === ACTION_UPDATE_SECRETS) {
|
||||
// case: action is updating secrets
|
||||
// -> add old and new secret versions
|
||||
latestSecretVersions = (await getLatestNSecretSecretVersionIds({
|
||||
const latestSecretVersions = (await getLatestNSecretSecretVersionIds({
|
||||
secretIds,
|
||||
n: 2
|
||||
}))
|
||||
@ -41,17 +42,7 @@ const createActionSecretHelper = async ({
|
||||
oldSecretVersion: s.versions[0]._id,
|
||||
newSecretVersion: s.versions[1]._id
|
||||
}));
|
||||
} else {
|
||||
// case: action is adding, deleting, or reading secrets
|
||||
// -> add new secret versions
|
||||
latestSecretVersions = (await getLatestSecretVersionIds({
|
||||
secretIds
|
||||
}))
|
||||
.map((s) => ({
|
||||
newSecretVersion: s.versionId
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
@ -64,10 +55,148 @@ const createActionSecretHelper = async ({
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create update secret action');
|
||||
}
|
||||
|
||||
return action;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an (audit) action for creating, reading, and deleting
|
||||
* secrets
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.name - name of action
|
||||
* @param {Types.ObjectId} obj.secretIds - ids of relevant secrets
|
||||
* @returns {Action} action - new action
|
||||
*/
|
||||
const createActionSecret = async ({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
}: {
|
||||
name: string;
|
||||
userId: Types.ObjectId;
|
||||
workspaceId: Types.ObjectId;
|
||||
secretIds: Types.ObjectId[];
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
// case: action is adding, deleting, or reading secrets
|
||||
// -> add new secret versions
|
||||
const latestSecretVersions = (await getLatestSecretVersionIds({
|
||||
secretIds
|
||||
}))
|
||||
.map((s) => ({
|
||||
newSecretVersion: s.versionId
|
||||
}));
|
||||
|
||||
action = await new Action({
|
||||
name,
|
||||
user: userId,
|
||||
workspace: workspaceId,
|
||||
payload: {
|
||||
secretVersions: latestSecretVersions
|
||||
}
|
||||
}).save();
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create action create/read/delete secret action');
|
||||
}
|
||||
|
||||
return action;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an (audit) action for user with id [userId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.name - name of action
|
||||
* @param {String} obj.userId - id of user associated with action
|
||||
* @returns
|
||||
*/
|
||||
const createActionUser = ({
|
||||
name,
|
||||
userId
|
||||
}: {
|
||||
name: string;
|
||||
userId: Types.ObjectId;
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
action = new Action({
|
||||
name,
|
||||
user: userId
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create user action');
|
||||
}
|
||||
|
||||
return action;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an (audit) action.
|
||||
* @param {Object} obj
|
||||
* @param {Object} obj.name - name of action
|
||||
* @param {Types.ObjectId} obj.userId - id of user associated with action
|
||||
* @param {Types.ObjectId} obj.workspaceId - id of workspace associated with action
|
||||
* @param {Types.ObjectId[]} obj.secretIds - ids of secrets associated with action
|
||||
*/
|
||||
const createActionHelper = async ({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds,
|
||||
}: {
|
||||
name: string;
|
||||
userId: Types.ObjectId;
|
||||
workspaceId?: Types.ObjectId;
|
||||
secretIds?: Types.ObjectId[];
|
||||
}) => {
|
||||
let action;
|
||||
try {
|
||||
switch (name) {
|
||||
case ACTION_LOGIN:
|
||||
case ACTION_LOGOUT:
|
||||
action = await createActionUser({
|
||||
name,
|
||||
userId
|
||||
});
|
||||
break;
|
||||
case ACTION_ADD_SECRETS:
|
||||
case ACTION_READ_SECRETS:
|
||||
case ACTION_DELETE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
case ACTION_UPDATE_SECRETS:
|
||||
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
|
||||
action = await createActionUpdateSecret({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create action');
|
||||
}
|
||||
|
||||
return action;
|
||||
}
|
||||
|
||||
export { createActionSecretHelper };
|
||||
export {
|
||||
createActionHelper
|
||||
};
|
54
backend/src/ee/helpers/checkMembershipPermissions.ts
Normal file
54
backend/src/ee/helpers/checkMembershipPermissions.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import _ from "lodash";
|
||||
import { Membership } from "../../models";
|
||||
import { ABILITY_READ, ABILITY_WRITE } from "../../variables/organization";
|
||||
|
||||
export const userHasWorkspaceAccess = async (userId: any, workspaceId: any, environment: any, action: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
if (!membershipForWorkspace) {
|
||||
return false
|
||||
}
|
||||
|
||||
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
|
||||
const isDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: action });
|
||||
|
||||
if (isDisallowed) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
export const userHasWriteOnlyAbility = async (userId: any, workspaceId: any, environment: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
if (!membershipForWorkspace) {
|
||||
return false
|
||||
}
|
||||
|
||||
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
|
||||
const isWriteDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_WRITE });
|
||||
const isReadDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_READ });
|
||||
|
||||
// case: you have write only if read is blocked and write is not
|
||||
if (isReadDisallowed && !isWriteDisallowed) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export const userHasNoAbility = async (userId: any, workspaceId: any, environment: any) => {
|
||||
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
|
||||
if (!membershipForWorkspace) {
|
||||
return true
|
||||
}
|
||||
|
||||
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
|
||||
const isWriteDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_WRITE });
|
||||
const isReadBlocked = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_READ });
|
||||
|
||||
if (isReadBlocked && isWriteDisallowed) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
@ -1,9 +1,19 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
Log,
|
||||
IAction
|
||||
} from '../models';
|
||||
|
||||
/**
|
||||
* Create an (audit) log
|
||||
* @param {Object} obj
|
||||
* @param {Types.ObjectId} obj.userId - id of user associated with the log
|
||||
* @param {Types.ObjectId} obj.workspaceId - id of workspace associated with the log
|
||||
* @param {IAction[]} obj.actions - actions to include in log
|
||||
* @param {String} obj.channel - channel (web/cli/auto) associated with the log
|
||||
* @param {String} obj.ipAddress - ip address associated with the log
|
||||
* @returns {Log} log - new audit log
|
||||
*/
|
||||
const createLogHelper = async ({
|
||||
userId,
|
||||
workspaceId,
|
||||
@ -11,8 +21,8 @@ const createLogHelper = async ({
|
||||
channel,
|
||||
ipAddress
|
||||
}: {
|
||||
userId: string;
|
||||
workspaceId: string;
|
||||
userId: Types.ObjectId;
|
||||
workspaceId?: Types.ObjectId;
|
||||
actions: IAction[];
|
||||
channel: string;
|
||||
ipAddress: string;
|
||||
@ -21,7 +31,7 @@ const createLogHelper = async ({
|
||||
try {
|
||||
log = await new Log({
|
||||
user: userId,
|
||||
workspace: workspaceId,
|
||||
workspace: workspaceId ?? undefined,
|
||||
actionNames: actions.map((a) => a.name),
|
||||
actions,
|
||||
channel,
|
||||
|
@ -1,10 +1,18 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
ACTION_READ_SECRETS,
|
||||
ACTION_DELETE_SECRETS
|
||||
} from '../../variables';
|
||||
|
||||
export interface IAction {
|
||||
name: string;
|
||||
user?: Types.ObjectId,
|
||||
workspace?: Types.ObjectId,
|
||||
payload: {
|
||||
payload?: {
|
||||
secretVersions?: Types.ObjectId[]
|
||||
}
|
||||
}
|
||||
@ -13,7 +21,15 @@ const actionSchema = new Schema<IAction>(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true
|
||||
required: true,
|
||||
enum: [
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
ACTION_READ_SECRETS,
|
||||
ACTION_DELETE_SECRETS
|
||||
]
|
||||
},
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
|
@ -1,5 +1,7 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
ACTION_READ_SECRETS,
|
||||
@ -29,6 +31,8 @@ const logSchema = new Schema<ILog>(
|
||||
actionNames: {
|
||||
type: [String],
|
||||
enum: [
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
ACTION_READ_SECRETS,
|
||||
|
@ -21,6 +21,7 @@ export interface ISecretVersion {
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretValueHash: string;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
const secretVersionSchema = new Schema<ISecretVersion>(
|
||||
@ -88,7 +89,12 @@ const secretVersionSchema = new Schema<ISecretVersion>(
|
||||
},
|
||||
secretValueHash: {
|
||||
type: String
|
||||
}
|
||||
},
|
||||
tags: {
|
||||
ref: 'Tag',
|
||||
type: [Schema.Types.ObjectId],
|
||||
default: []
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
|
@ -1,14 +1,12 @@
|
||||
import { Types } from 'mongoose';
|
||||
import {
|
||||
Log,
|
||||
Action,
|
||||
IAction
|
||||
} from '../models';
|
||||
import {
|
||||
createLogHelper
|
||||
} from '../helpers/log';
|
||||
import {
|
||||
createActionSecretHelper
|
||||
createActionHelper
|
||||
} from '../helpers/action';
|
||||
import EELicenseService from './EELicenseService';
|
||||
|
||||
@ -33,8 +31,8 @@ class EELogService {
|
||||
channel,
|
||||
ipAddress
|
||||
}: {
|
||||
userId: string;
|
||||
workspaceId: string;
|
||||
userId: Types.ObjectId;
|
||||
workspaceId?: Types.ObjectId;
|
||||
actions: IAction[];
|
||||
channel: string;
|
||||
ipAddress: string;
|
||||
@ -50,26 +48,26 @@ class EELogService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an (audit) action for secrets including
|
||||
* add, delete, update, and read actions.
|
||||
* Create an (audit) action
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.name - name of action
|
||||
* @param {ObjectId[]} obj.secretIds - secret ids
|
||||
* @param {Types.ObjectId} obj.userId - id of user associated with the action
|
||||
* @param {Types.ObjectId} obj.workspaceId - id of workspace associated with the action
|
||||
* @param {ObjectId[]} obj.secretIds - ids of secrets associated with the action
|
||||
* @returns {Action} action - new action
|
||||
*/
|
||||
static async createActionSecret({
|
||||
static async createAction({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
secretIds
|
||||
}: {
|
||||
name: string;
|
||||
userId: string;
|
||||
workspaceId: string;
|
||||
secretIds: Types.ObjectId[];
|
||||
userId: Types.ObjectId;
|
||||
workspaceId?: Types.ObjectId;
|
||||
secretIds?: Types.ObjectId[];
|
||||
}) {
|
||||
if (!EELicenseService.isLicenseValid) return null;
|
||||
return await createActionSecretHelper({
|
||||
return await createActionHelper({
|
||||
name,
|
||||
userId,
|
||||
workspaceId,
|
||||
|
@ -1,5 +1,4 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { ISecret, Secret } from '../models';
|
||||
import { EESecretService } from '../ee/services';
|
||||
import { getLogger } from '../utils/logger';
|
||||
|
||||
@ -16,6 +15,10 @@ const initDatabaseHelper = async ({
|
||||
}) => {
|
||||
try {
|
||||
await mongoose.connect(mongoURL);
|
||||
|
||||
// allow empty strings to pass the required validator
|
||||
mongoose.Schema.Types.String.checkRequired(v => typeof v === 'string');
|
||||
|
||||
getLogger("database").info("Database connection established");
|
||||
|
||||
await EESecretService.initSecretVersioning();
|
||||
|
@ -30,6 +30,7 @@ interface Update {
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.integration - name of integration
|
||||
* @param {String} obj.code - code
|
||||
* @returns {IntegrationAuth} integrationAuth - integration auth after OAuth2 code-token exchange
|
||||
*/
|
||||
const handleOAuthExchangeHelper = async ({
|
||||
workspaceId,
|
||||
@ -42,7 +43,6 @@ const handleOAuthExchangeHelper = async ({
|
||||
code: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
let action;
|
||||
let integrationAuth;
|
||||
try {
|
||||
const bot = await Bot.findOne({
|
||||
@ -94,25 +94,18 @@ const handleOAuthExchangeHelper = async ({
|
||||
// set integration auth access token
|
||||
await setIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessId: null,
|
||||
accessToken: res.accessToken,
|
||||
accessExpiresAt: res.accessExpiresAt
|
||||
});
|
||||
}
|
||||
|
||||
// initialize new integration after exchange
|
||||
await new Integration({
|
||||
workspace: workspaceId,
|
||||
isActive: false,
|
||||
app: null,
|
||||
environment,
|
||||
integration,
|
||||
integrationAuth: integrationAuth._id
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to handle OAuth2 code-token exchange')
|
||||
}
|
||||
|
||||
return integrationAuth;
|
||||
}
|
||||
/**
|
||||
* Sync/push environment variables in workspace with id [workspaceId] to
|
||||
@ -146,7 +139,7 @@ const syncIntegrationsHelper = async ({
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
// get integration auth access token
|
||||
const accessToken = await getIntegrationAuthAccessHelper({
|
||||
const access = await getIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integration.integrationAuth.toString()
|
||||
});
|
||||
|
||||
@ -155,7 +148,8 @@ const syncIntegrationsHelper = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
accessId: access.accessId,
|
||||
accessToken: access.accessToken
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
@ -211,12 +205,12 @@ const syncIntegrationsHelper = async ({
|
||||
* @returns {String} accessToken - decrypted access token
|
||||
*/
|
||||
const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrationAuthId: string }) => {
|
||||
let accessId;
|
||||
let accessToken;
|
||||
|
||||
try {
|
||||
const integrationAuth = await IntegrationAuth
|
||||
.findById(integrationAuthId)
|
||||
.select('workspace integration +accessCiphertext +accessIV +accessTag +accessExpiresAt + refreshCiphertext');
|
||||
.select('workspace integration +accessCiphertext +accessIV +accessTag +accessExpiresAt + refreshCiphertext +accessIdCiphertext +accessIdIV +accessIdTag');
|
||||
|
||||
if (!integrationAuth) throw UnauthorizedRequestError({message: 'Failed to locate Integration Authentication credentials'});
|
||||
|
||||
@ -240,6 +234,15 @@ const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrati
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (integrationAuth?.accessIdCiphertext && integrationAuth?.accessIdIV && integrationAuth?.accessIdTag) {
|
||||
accessId = await BotService.decryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
ciphertext: integrationAuth.accessIdCiphertext as string,
|
||||
iv: integrationAuth.accessIdIV as string,
|
||||
tag: integrationAuth.accessIdTag as string
|
||||
});
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
@ -250,7 +253,10 @@ const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrati
|
||||
throw new Error('Failed to get integration access token');
|
||||
}
|
||||
|
||||
return accessToken;
|
||||
return ({
|
||||
accessId,
|
||||
accessToken
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@ -300,9 +306,9 @@ const setIntegrationAuthRefreshHelper = async ({
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt access token [accessToken] using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId] and store it along with [accessExpiresAt]
|
||||
* Encrypt access token [accessToken] and (optionally) access id [accessId]
|
||||
* using the bot's copy of the workspace key for workspace belonging to
|
||||
* integration auth with id [integrationAuthId] and store it along with [accessExpiresAt]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} obj.accessToken - access token
|
||||
@ -310,10 +316,12 @@ const setIntegrationAuthRefreshHelper = async ({
|
||||
*/
|
||||
const setIntegrationAuthAccessHelper = async ({
|
||||
integrationAuthId,
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
}: {
|
||||
integrationAuthId: string;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
accessExpiresAt: Date | undefined;
|
||||
}) => {
|
||||
@ -323,17 +331,28 @@ const setIntegrationAuthAccessHelper = async ({
|
||||
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
const obj = await BotService.encryptSymmetric({
|
||||
const encryptedAccessTokenObj = await BotService.encryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
plaintext: accessToken
|
||||
});
|
||||
|
||||
let encryptedAccessIdObj;
|
||||
if (accessId) {
|
||||
encryptedAccessIdObj = await BotService.encryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
plaintext: accessId
|
||||
});
|
||||
}
|
||||
|
||||
integrationAuth = await IntegrationAuth.findOneAndUpdate({
|
||||
_id: integrationAuthId
|
||||
}, {
|
||||
accessCiphertext: obj.ciphertext,
|
||||
accessIV: obj.iv,
|
||||
accessTag: obj.tag,
|
||||
accessIdCiphertext: encryptedAccessIdObj?.ciphertext ?? undefined,
|
||||
accessIdIV: encryptedAccessIdObj?.iv ?? undefined,
|
||||
accessIdTag: encryptedAccessIdObj?.tag ?? undefined,
|
||||
accessCiphertext: encryptedAccessTokenObj.ciphertext,
|
||||
accessIV: encryptedAccessTokenObj.iv,
|
||||
accessTag: encryptedAccessTokenObj.tag,
|
||||
accessExpiresAt
|
||||
}, {
|
||||
new: true
|
||||
|
@ -3,6 +3,7 @@ import Stripe from 'stripe';
|
||||
import {
|
||||
STRIPE_SECRET_KEY,
|
||||
STRIPE_PRODUCT_STARTER,
|
||||
STRIPE_PRODUCT_TEAM,
|
||||
STRIPE_PRODUCT_PRO
|
||||
} from '../config';
|
||||
const stripe = new Stripe(STRIPE_SECRET_KEY, {
|
||||
@ -14,6 +15,7 @@ import { Organization, MembershipOrg } from '../models';
|
||||
|
||||
const productToPriceMap = {
|
||||
starter: STRIPE_PRODUCT_STARTER,
|
||||
team: STRIPE_PRODUCT_TEAM,
|
||||
pro: STRIPE_PRODUCT_PRO
|
||||
};
|
||||
|
||||
@ -55,7 +57,7 @@ const createOrganization = async ({
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email });
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create organization');
|
||||
throw new Error(`Failed to create organization [err=${err}]`);
|
||||
}
|
||||
|
||||
return organization;
|
||||
|
@ -12,14 +12,17 @@ import {
|
||||
import {
|
||||
IAction
|
||||
} from '../ee/models';
|
||||
import {
|
||||
SECRET_SHARED,
|
||||
import {
|
||||
SECRET_SHARED,
|
||||
SECRET_PERSONAL,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
ACTION_DELETE_SECRETS,
|
||||
ACTION_READ_SECRETS
|
||||
} from '../variables';
|
||||
import _ from 'lodash';
|
||||
import { ABILITY_WRITE } from '../variables/organization';
|
||||
import { BadRequestError, UnauthorizedRequestError } from '../utils/errors';
|
||||
|
||||
/**
|
||||
* Validate that user with id [userId] can modify secrets with ids [secretIds]
|
||||
@ -34,7 +37,7 @@ const validateSecrets = async ({
|
||||
}: {
|
||||
userId: string;
|
||||
secretIds: string[];
|
||||
}) =>{
|
||||
}) => {
|
||||
let secrets;
|
||||
try {
|
||||
secrets = await Secret.find({
|
||||
@ -42,20 +45,31 @@ const validateSecrets = async ({
|
||||
$in: secretIds.map((secretId: string) => new Types.ObjectId(secretId))
|
||||
}
|
||||
});
|
||||
|
||||
const workspaceIdsSet = new Set((await Membership.find({
|
||||
user: userId
|
||||
}, 'workspace'))
|
||||
.map((m) => m.workspace.toString()));
|
||||
|
||||
|
||||
if (secrets.length != secretIds.length) {
|
||||
throw BadRequestError({ message: 'Unable to validate some secrets' })
|
||||
}
|
||||
|
||||
const userMemberships = await Membership.find({ user: userId })
|
||||
const userMembershipById = _.keyBy(userMemberships, 'workspace');
|
||||
const workspaceIdsSet = new Set(userMemberships.map((m) => m.workspace.toString()));
|
||||
|
||||
// for each secret check if the secret belongs to a workspace the user is a member of
|
||||
secrets.forEach((secret: ISecret) => {
|
||||
if (!workspaceIdsSet.has(secret.workspace.toString())) {
|
||||
throw new Error('Failed to validate secret');
|
||||
if (workspaceIdsSet.has(secret.workspace.toString())) {
|
||||
const deniedMembershipPermissions = userMembershipById[secret.workspace.toString()].deniedPermissions;
|
||||
const isDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: secret.environment, ability: ABILITY_WRITE });
|
||||
|
||||
if (isDisallowed) {
|
||||
throw UnauthorizedRequestError({ message: 'You do not have the required permissions to perform this action' });
|
||||
}
|
||||
} else {
|
||||
throw BadRequestError({ message: 'You cannot edit secrets of a workspace you are not a member of' });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
} catch (err) {
|
||||
throw new Error('Failed to validate secrets');
|
||||
throw BadRequestError({ message: 'Unable to validate secrets' })
|
||||
}
|
||||
|
||||
return secrets;
|
||||
@ -127,13 +141,13 @@ const v1PushSecrets = async ({
|
||||
workspaceId,
|
||||
environment
|
||||
});
|
||||
|
||||
const oldSecretsObj: any = oldSecrets.reduce((accumulator, s: any) =>
|
||||
|
||||
const oldSecretsObj: any = oldSecrets.reduce((accumulator, s: any) =>
|
||||
({ ...accumulator, [`${s.type}-${s.secretKeyHash}`]: s })
|
||||
, {});
|
||||
const newSecretsObj: any = secrets.reduce((accumulator, s) =>
|
||||
, {});
|
||||
const newSecretsObj: any = secrets.reduce((accumulator, s) =>
|
||||
({ ...accumulator, [`${s.type}-${s.hashKey}`]: s })
|
||||
, {});
|
||||
, {});
|
||||
|
||||
// handle deleting secrets
|
||||
const toDelete = oldSecrets
|
||||
@ -150,12 +164,12 @@ const v1PushSecrets = async ({
|
||||
secretIds: toDelete
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
const toUpdate = oldSecrets
|
||||
.filter((s) => {
|
||||
if (`${s.type}-${s.secretKeyHash}` in newSecretsObj) {
|
||||
if (s.secretValueHash !== newSecretsObj[`${s.type}-${s.secretKeyHash}`].hashValue
|
||||
|| s.secretCommentHash !== newSecretsObj[`${s.type}-${s.secretKeyHash}`].hashComment) {
|
||||
if (s.secretValueHash !== newSecretsObj[`${s.type}-${s.secretKeyHash}`].hashValue
|
||||
|| s.secretCommentHash !== newSecretsObj[`${s.type}-${s.secretKeyHash}`].hashComment) {
|
||||
// case: filter secrets where value or comment changed
|
||||
return true;
|
||||
}
|
||||
@ -165,7 +179,7 @@ const v1PushSecrets = async ({
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
@ -217,7 +231,7 @@ const v1PushSecrets = async ({
|
||||
};
|
||||
});
|
||||
await Secret.bulkWrite(operations as any);
|
||||
|
||||
|
||||
// (EE) add secret versions for updated secrets
|
||||
await EESecretService.addSecretVersions({
|
||||
secretVersions: toUpdate.map(({
|
||||
@ -245,7 +259,7 @@ const v1PushSecrets = async ({
|
||||
secretValueTag: newSecret.tagValue,
|
||||
secretValueHash: newSecret.hashValue
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
// handle adding new secrets
|
||||
@ -319,7 +333,7 @@ const v1PushSecrets = async ({
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// (EE) take a secret snapshot
|
||||
await EESecretService.takeSecretSnapshot({
|
||||
workspaceId
|
||||
@ -344,7 +358,7 @@ const v1PushSecrets = async ({
|
||||
* @param {String} obj.channel - channel (web/cli/auto)
|
||||
* @param {String} obj.ipAddress - ip address of request to push secrets
|
||||
*/
|
||||
const v2PushSecrets = async ({
|
||||
const v2PushSecrets = async ({
|
||||
userId,
|
||||
workspaceId,
|
||||
environment,
|
||||
@ -362,20 +376,20 @@ const v1PushSecrets = async ({
|
||||
// TODO: clean up function and fix up types
|
||||
try {
|
||||
const actions: IAction[] = [];
|
||||
|
||||
|
||||
// construct useful data structures
|
||||
const oldSecrets = await getSecrets({
|
||||
userId,
|
||||
workspaceId,
|
||||
environment
|
||||
});
|
||||
|
||||
const oldSecretsObj: any = oldSecrets.reduce((accumulator, s: any) =>
|
||||
|
||||
const oldSecretsObj: any = oldSecrets.reduce((accumulator, s: any) =>
|
||||
({ ...accumulator, [`${s.type}-${s.secretKeyHash}`]: s })
|
||||
, {});
|
||||
const newSecretsObj: any = secrets.reduce((accumulator, s) =>
|
||||
, {});
|
||||
const newSecretsObj: any = secrets.reduce((accumulator, s) =>
|
||||
({ ...accumulator, [`${s.type}-${s.secretKeyHash}`]: s })
|
||||
, {});
|
||||
, {});
|
||||
|
||||
// handle deleting secrets
|
||||
const toDelete = oldSecrets
|
||||
@ -391,22 +405,22 @@ const v1PushSecrets = async ({
|
||||
await EESecretService.markDeletedSecretVersions({
|
||||
secretIds: toDelete
|
||||
});
|
||||
|
||||
const deleteAction = await EELogService.createActionSecret({
|
||||
|
||||
const deleteAction = await EELogService.createAction({
|
||||
name: ACTION_DELETE_SECRETS,
|
||||
userId,
|
||||
workspaceId,
|
||||
userId: new Types.ObjectId(userId),
|
||||
workspaceId: new Types.ObjectId(userId),
|
||||
secretIds: toDelete
|
||||
});
|
||||
|
||||
deleteAction && actions.push(deleteAction);
|
||||
}
|
||||
|
||||
|
||||
const toUpdate = oldSecrets
|
||||
.filter((s) => {
|
||||
if (`${s.type}-${s.secretKeyHash}` in newSecretsObj) {
|
||||
if (s.secretValueHash !== newSecretsObj[`${s.type}-${s.secretKeyHash}`].secretValueHash
|
||||
|| s.secretCommentHash !== newSecretsObj[`${s.type}-${s.secretKeyHash}`].secretCommentHash) {
|
||||
if (s.secretValueHash !== newSecretsObj[`${s.type}-${s.secretKeyHash}`].secretValueHash
|
||||
|| s.secretCommentHash !== newSecretsObj[`${s.type}-${s.secretKeyHash}`].secretCommentHash) {
|
||||
// case: filter secrets where value or comment changed
|
||||
return true;
|
||||
}
|
||||
@ -416,7 +430,7 @@ const v1PushSecrets = async ({
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
@ -469,7 +483,7 @@ const v1PushSecrets = async ({
|
||||
};
|
||||
});
|
||||
await Secret.bulkWrite(operations as any);
|
||||
|
||||
|
||||
// (EE) add secret versions for updated secrets
|
||||
await EESecretService.addSecretVersions({
|
||||
secretVersions: toUpdate.map((s) => {
|
||||
@ -482,13 +496,13 @@ const v1PushSecrets = async ({
|
||||
environment: s.environment,
|
||||
isDeleted: false
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
const updateAction = await EELogService.createActionSecret({
|
||||
const updateAction = await EELogService.createAction({
|
||||
name: ACTION_UPDATE_SECRETS,
|
||||
userId,
|
||||
workspaceId,
|
||||
userId: new Types.ObjectId(userId),
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: toUpdate.map((u) => u._id)
|
||||
});
|
||||
|
||||
@ -507,29 +521,30 @@ const v1PushSecrets = async ({
|
||||
workspace: workspaceId,
|
||||
type: toAdd[idx].type,
|
||||
environment,
|
||||
...( toAdd[idx].type === 'personal' ? { user: userId } : {})
|
||||
...(toAdd[idx].type === 'personal' ? { user: userId } : {})
|
||||
}))
|
||||
);
|
||||
|
||||
// (EE) add secret versions for new secrets
|
||||
EESecretService.addSecretVersions({
|
||||
secretVersions: newSecrets.map((secretDocument) => {
|
||||
secretVersions: newSecrets.map((secretDocument) => {
|
||||
return {
|
||||
...secretDocument.toObject(),
|
||||
secret: secretDocument._id,
|
||||
isDeleted: false
|
||||
}})
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
const addAction = await EELogService.createActionSecret({
|
||||
const addAction = await EELogService.createAction({
|
||||
name: ACTION_ADD_SECRETS,
|
||||
userId,
|
||||
workspaceId,
|
||||
userId: new Types.ObjectId(userId),
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: newSecrets.map((n) => n._id)
|
||||
});
|
||||
addAction && actions.push(addAction);
|
||||
}
|
||||
|
||||
|
||||
// (EE) take a secret snapshot
|
||||
await EESecretService.takeSecretSnapshot({
|
||||
workspaceId
|
||||
@ -538,8 +553,8 @@ const v1PushSecrets = async ({
|
||||
// (EE) create (audit) log
|
||||
if (actions.length > 0) {
|
||||
await EELogService.createLog({
|
||||
userId,
|
||||
workspaceId,
|
||||
userId: new Types.ObjectId(userId),
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
actions,
|
||||
channel,
|
||||
ipAddress
|
||||
@ -560,7 +575,7 @@ const v1PushSecrets = async ({
|
||||
* @param {String} obj.workspaceId - id of workspace to pull from
|
||||
* @param {String} obj.environment - environment for secrets
|
||||
*/
|
||||
const getSecrets = async ({
|
||||
const getSecrets = async ({
|
||||
userId,
|
||||
workspaceId,
|
||||
environment
|
||||
@ -570,7 +585,7 @@ const v1PushSecrets = async ({
|
||||
environment: string;
|
||||
}): Promise<ISecret[]> => {
|
||||
let secrets: any; // TODO: FIX any
|
||||
|
||||
|
||||
try {
|
||||
// get shared workspace secrets
|
||||
const sharedSecrets = await Secret.find({
|
||||
@ -622,7 +637,7 @@ const pullSecrets = async ({
|
||||
ipAddress: string;
|
||||
}): Promise<ISecret[]> => {
|
||||
let secrets: any;
|
||||
|
||||
|
||||
try {
|
||||
secrets = await getSecrets({
|
||||
userId,
|
||||
@ -630,16 +645,16 @@ const pullSecrets = async ({
|
||||
environment
|
||||
})
|
||||
|
||||
const readAction = await EELogService.createActionSecret({
|
||||
const readAction = await EELogService.createAction({
|
||||
name: ACTION_READ_SECRETS,
|
||||
userId,
|
||||
workspaceId,
|
||||
userId: new Types.ObjectId(userId),
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
secretIds: secrets.map((n: any) => n._id)
|
||||
});
|
||||
|
||||
readAction && await EELogService.createLog({
|
||||
userId,
|
||||
workspaceId,
|
||||
userId: new Types.ObjectId(userId),
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
actions: [readAction],
|
||||
channel,
|
||||
ipAddress
|
||||
|
@ -7,6 +7,7 @@ import { createWorkspace } from './workspace';
|
||||
import { addMemberships } from './membership';
|
||||
import { OWNER, ADMIN, ACCEPTED } from '../variables';
|
||||
import { sendMail } from '../helpers/nodemailer';
|
||||
import { EMAIL_TOKEN_LIFETIME } from '../config';
|
||||
|
||||
/**
|
||||
* Send magic link to verify email to [email]
|
||||
@ -25,7 +26,8 @@ const sendEmailVerification = async ({ email }: { email: string }) => {
|
||||
{
|
||||
email,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
createdAt: new Date(),
|
||||
ttl: Math.floor(+new Date() / 1000) + EMAIL_TOKEN_LIFETIME // time in seconds, i.e unix
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
@ -62,11 +64,20 @@ const checkEmailVerification = async ({
|
||||
code: string;
|
||||
}) => {
|
||||
try {
|
||||
const token = await Token.findOneAndDelete({
|
||||
const token = await Token.findOne({
|
||||
email,
|
||||
token: code
|
||||
});
|
||||
|
||||
|
||||
if (token && Math.floor(Date.now() / 1000) > token.ttl) {
|
||||
await Token.deleteOne({
|
||||
email,
|
||||
token: code
|
||||
});
|
||||
|
||||
throw new Error('Verification token has expired')
|
||||
}
|
||||
|
||||
if (!token) throw new Error('Failed to find email verification token');
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
@ -116,7 +127,7 @@ const initializeDefaultOrg = async ({
|
||||
roles: [ADMIN]
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error('Failed to initialize default organization and workspace');
|
||||
throw new Error(`Failed to initialize default organization and workspace [err=${err}]`);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -8,6 +8,7 @@ import { DatabaseService } from './services';
|
||||
import { setUpHealthEndpoint } from './services/health';
|
||||
import { initSmtp } from './services/smtp';
|
||||
import { setTransporter } from './helpers/nodemailer';
|
||||
import { createTestUserForDevelopment } from './utils/addDevelopmentUser';
|
||||
|
||||
DatabaseService.initDatabase(MONGO_URL);
|
||||
|
||||
@ -23,3 +24,5 @@ if (NODE_ENV !== 'test') {
|
||||
environment: NODE_ENV
|
||||
});
|
||||
}
|
||||
|
||||
createTestUserForDevelopment()
|
||||
|
@ -1,20 +1,25 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { IIntegrationAuth } from '../models';
|
||||
import axios from "axios";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL
|
||||
} from '../variables';
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
} from "../variables";
|
||||
|
||||
/**
|
||||
* Return list of names of apps for integration named [integration]
|
||||
@ -26,7 +31,7 @@ import {
|
||||
*/
|
||||
const getApps = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
accessToken,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
@ -40,42 +45,56 @@ const getApps = async ({
|
||||
let apps: App[];
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_PARAMETER_STORE:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_AWS_SECRET_MANAGER:
|
||||
apps = [];
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
apps = await getAppsHeroku({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
apps = await getAppsVercel({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
apps = await getAppsNetlify({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_GITHUB:
|
||||
apps = await getAppsGithub({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_RENDER:
|
||||
apps = await getAppsRender({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_FLYIO:
|
||||
apps = await getAppsFlyio({
|
||||
accessToken
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CIRCLECI:
|
||||
apps = await getAppsCircleCI({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get integration apps');
|
||||
throw new Error("Failed to get integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -94,19 +113,19 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: 'application/vnd.heroku+json; version=3',
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
Accept: "application/vnd.heroku+json; version=3",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Heroku integration apps');
|
||||
throw new Error("Failed to get Heroku integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -119,10 +138,10 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {Object[]} apps - names of Vercel apps
|
||||
* @returns {String} apps.name - name of Vercel app
|
||||
*/
|
||||
const getAppsVercel = async ({
|
||||
const getAppsVercel = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
}: {
|
||||
accessToken,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
@ -131,23 +150,26 @@ const getAppsVercel = async ({
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
},
|
||||
...( integrationAuth?.teamId ? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId
|
||||
}
|
||||
} : {})
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
).data;
|
||||
|
||||
|
||||
apps = res.projects.map((a: any) => ({
|
||||
name: a.name
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Vercel integration apps');
|
||||
throw new Error("Failed to get Vercel integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -160,29 +182,26 @@ const getAppsVercel = async ({
|
||||
* @returns {Object[]} apps - names of Netlify sites
|
||||
* @returns {String} apps.name - name of Netlify site
|
||||
*/
|
||||
const getAppsNetlify = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
})
|
||||
).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
appId: a.site_id
|
||||
appId: a.site_id,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Netlify integration apps');
|
||||
throw new Error("Failed to get Netlify integration apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -195,35 +214,32 @@ const getAppsNetlify = async ({
|
||||
* @returns {Object[]} apps - names of Netlify sites
|
||||
* @returns {String} apps.name - name of Netlify site
|
||||
*/
|
||||
const getAppsGithub = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const octokit = new Octokit({
|
||||
auth: accessToken
|
||||
auth: accessToken,
|
||||
});
|
||||
|
||||
const repos = (await octokit.request(
|
||||
'GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}',
|
||||
{
|
||||
per_page: 100
|
||||
}
|
||||
)).data;
|
||||
const repos = (
|
||||
await octokit.request(
|
||||
"GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}",
|
||||
{
|
||||
per_page: 100,
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
apps = repos
|
||||
.filter((a:any) => a.permissions.admin === true)
|
||||
.filter((a: any) => a.permissions.admin === true)
|
||||
.map((a: any) => ({
|
||||
name: a.name,
|
||||
owner: a.owner.login
|
||||
})
|
||||
);
|
||||
name: a.name,
|
||||
owner: a.owner.login,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Github repos');
|
||||
throw new Error("Failed to get Github repos");
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -237,18 +253,16 @@ const getAppsGithub = async ({
|
||||
* @returns {String} apps.name - name of Render service
|
||||
* @returns {String} apps.appId - id of Render service
|
||||
*/
|
||||
const getAppsRender = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await axios.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
@ -257,14 +271,15 @@ const getAppsRender = async ({
|
||||
name: a.service.name,
|
||||
appId: a.service.id
|
||||
}));
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Render services');
|
||||
throw new Error("Failed to get Render services");
|
||||
}
|
||||
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of apps for Fly.io integration
|
||||
@ -273,11 +288,7 @@ const getAppsRender = async ({
|
||||
* @returns {Object[]} apps - names and ids of Fly.io apps
|
||||
* @returns {String} apps.name - name of Fly.io apps
|
||||
*/
|
||||
const getAppsFlyio = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps;
|
||||
try {
|
||||
const query = `
|
||||
@ -291,32 +302,71 @@ const getAppsFlyio = async ({
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const res = (await axios({
|
||||
url: INTEGRATION_FLYIO_API_URL,
|
||||
method: 'post',
|
||||
headers: {
|
||||
'Authorization': 'Bearer ' + accessToken
|
||||
},
|
||||
data: {
|
||||
query,
|
||||
variables: {
|
||||
role: null
|
||||
}
|
||||
}
|
||||
})).data.data.apps.nodes;
|
||||
|
||||
apps = res
|
||||
.map((a: any) => ({
|
||||
name: a.name
|
||||
}));
|
||||
|
||||
const res = (
|
||||
await axios({
|
||||
url: INTEGRATION_FLYIO_API_URL,
|
||||
method: "post",
|
||||
headers: {
|
||||
Authorization: "Bearer " + accessToken,
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json',
|
||||
},
|
||||
data: {
|
||||
query,
|
||||
variables: {
|
||||
role: null,
|
||||
},
|
||||
},
|
||||
})
|
||||
).data.data.apps.nodes;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Fly.io apps');
|
||||
throw new Error("Failed to get Fly.io apps");
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of projects for CircleCI integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for CircleCI API
|
||||
* @returns {Object[]} apps -
|
||||
* @returns {String} apps.name - name of CircleCI apps
|
||||
*/
|
||||
const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
let apps: any;
|
||||
try {
|
||||
const res = (
|
||||
await axios.get(
|
||||
`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data
|
||||
|
||||
apps = res?.map((a: any) => {
|
||||
return {
|
||||
name: a?.reponame
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error("Failed to get CircleCI projects");
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
};
|
||||
|
||||
export { getApps };
|
||||
|
@ -1,10 +1,12 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
@ -12,15 +14,27 @@ import {
|
||||
} from '../variables';
|
||||
import {
|
||||
SITE_URL,
|
||||
CLIENT_ID_AZURE,
|
||||
CLIENT_ID_VERCEL,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_ID_GITHUB,
|
||||
CLIENT_SECRET_AZURE,
|
||||
CLIENT_SECRET_HEROKU,
|
||||
CLIENT_SECRET_VERCEL,
|
||||
CLIENT_SECRET_NETLIFY,
|
||||
CLIENT_SECRET_GITHUB
|
||||
} from '../config';
|
||||
|
||||
interface ExchangeCodeAzureResponse {
|
||||
token_type: string;
|
||||
scope: string;
|
||||
expires_in: number;
|
||||
ext_expires_in: number;
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
id_token: string;
|
||||
}
|
||||
|
||||
interface ExchangeCodeHerokuResponse {
|
||||
token_type: string;
|
||||
access_token: string;
|
||||
@ -75,6 +89,11 @@ const exchangeCode = async ({
|
||||
|
||||
try {
|
||||
switch (integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
obj = await exchangeCodeAzure({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
obj = await exchangeCodeHeroku({
|
||||
code
|
||||
@ -105,6 +124,46 @@ const exchangeCode = async ({
|
||||
return obj;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return [accessToken] for Azure OAuth2 code-token exchange
|
||||
* @param param0
|
||||
*/
|
||||
const exchangeCodeAzure = async ({
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
}) => {
|
||||
const accessExpiresAt = new Date();
|
||||
let res: ExchangeCodeAzureResponse;
|
||||
try {
|
||||
res = (await axios.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
scope: 'https://vault.azure.net/.default openid offline_access',
|
||||
client_id: CLIENT_ID_AZURE,
|
||||
client_secret: CLIENT_SECRET_AZURE,
|
||||
redirect_uri: `${SITE_URL}/integrations/azure-key-vault/oauth2/callback`
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err: any) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Azure');
|
||||
}
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Heroku
|
||||
* OAuth2 code-token exchange
|
||||
@ -168,7 +227,7 @@ const exchangeCodeVercel = async ({ code }: { code: string }) => {
|
||||
code: code,
|
||||
client_id: CLIENT_ID_VERCEL,
|
||||
client_secret: CLIENT_SECRET_VERCEL,
|
||||
redirect_uri: `${SITE_URL}/vercel`
|
||||
redirect_uri: `${SITE_URL}/integrations/vercel/oauth2/callback`
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
@ -208,7 +267,7 @@ const exchangeCodeNetlify = async ({ code }: { code: string }) => {
|
||||
code: code,
|
||||
client_id: CLIENT_ID_NETLIFY,
|
||||
client_secret: CLIENT_SECRET_NETLIFY,
|
||||
redirect_uri: `${SITE_URL}/netlify`
|
||||
redirect_uri: `${SITE_URL}/integrations/netlify/oauth2/callback`
|
||||
} as any)
|
||||
)
|
||||
).data;
|
||||
@ -260,10 +319,11 @@ const exchangeCodeGithub = async ({ code }: { code: string }) => {
|
||||
client_id: CLIENT_ID_GITHUB,
|
||||
client_secret: CLIENT_SECRET_GITHUB,
|
||||
code: code,
|
||||
redirect_uri: `${SITE_URL}/github`
|
||||
redirect_uri: `${SITE_URL}/integrations/github/oauth2/callback`
|
||||
},
|
||||
headers: {
|
||||
Accept: 'application/json'
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'application/json'
|
||||
}
|
||||
})
|
||||
).data;
|
||||
|
@ -1,13 +1,26 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { INTEGRATION_HEROKU } from '../variables';
|
||||
import { INTEGRATION_AZURE_KEY_VAULT, INTEGRATION_HEROKU } from '../variables';
|
||||
import {
|
||||
CLIENT_SECRET_HEROKU
|
||||
SITE_URL,
|
||||
CLIENT_ID_AZURE,
|
||||
CLIENT_SECRET_AZURE,
|
||||
CLIENT_SECRET_HEROKU
|
||||
} from '../config';
|
||||
import {
|
||||
INTEGRATION_HEROKU_TOKEN_URL
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL
|
||||
} from '../variables';
|
||||
|
||||
interface RefreshTokenAzureResponse {
|
||||
token_type: string;
|
||||
scope: string;
|
||||
expires_in: number;
|
||||
ext_expires_in: 4871;
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for integration
|
||||
* named [integration]
|
||||
@ -25,6 +38,11 @@ const exchangeRefresh = async ({
|
||||
let accessToken;
|
||||
try {
|
||||
switch (integration) {
|
||||
case INTEGRATION_AZURE_KEY_VAULT:
|
||||
accessToken = await exchangeRefreshAzure({
|
||||
refreshToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HEROKU:
|
||||
accessToken = await exchangeRefreshHeroku({
|
||||
refreshToken
|
||||
@ -40,6 +58,38 @@ const exchangeRefresh = async ({
|
||||
return accessToken;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||
* Azure integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.refreshToken - refresh token to use to get new access token for Azure
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshAzure = async ({
|
||||
refreshToken
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const res: RefreshTokenAzureResponse = (await axios.post(
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
client_id: CLIENT_ID_AZURE,
|
||||
scope: 'openid offline_access',
|
||||
refresh_token: refreshToken,
|
||||
grant_type: 'refresh_token',
|
||||
client_secret: CLIENT_SECRET_AZURE
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
return res.access_token;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get refresh OAuth2 access token for Azure');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||
* Heroku integration
|
||||
@ -52,23 +102,23 @@ const exchangeRefreshHeroku = async ({
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
let accessToken;
|
||||
//TODO: Refactor code to take advantage of using RequestError. It's possible to create new types of errors for more detailed errors
|
||||
try {
|
||||
const res = await axios.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
client_secret: CLIENT_SECRET_HEROKU
|
||||
} as any)
|
||||
);
|
||||
|
||||
let accessToken;
|
||||
try {
|
||||
const res = await axios.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
client_secret: CLIENT_SECRET_HEROKU
|
||||
} as any)
|
||||
);
|
||||
|
||||
accessToken = res.data.access_token;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get new OAuth2 access token for Heroku');
|
||||
throw new Error('Failed to refresh OAuth2 access token for Heroku');
|
||||
}
|
||||
|
||||
return accessToken;
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -25,7 +25,6 @@ const requireIntegrationAuthorizationAuth = ({
|
||||
}) => {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
const { integrationAuthId } = req[location];
|
||||
|
||||
const integrationAuth = await IntegrationAuth.findOne({
|
||||
_id: integrationAuthId
|
||||
})
|
||||
@ -46,9 +45,10 @@ const requireIntegrationAuthorizationAuth = ({
|
||||
|
||||
req.integrationAuth = integrationAuth;
|
||||
if (attachAccessToken) {
|
||||
req.accessToken = await IntegrationService.getIntegrationAuthAccess({
|
||||
const access = await IntegrationService.getIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString()
|
||||
});
|
||||
req.accessToken = access.accessToken;
|
||||
}
|
||||
|
||||
return next();
|
||||
|
@ -16,6 +16,7 @@ import UserAction, { IUserAction } from './userAction';
|
||||
import Workspace, { IWorkspace } from './workspace';
|
||||
import ServiceTokenData, { IServiceTokenData } from './serviceTokenData';
|
||||
import APIKeyData, { IAPIKeyData } from './apiKeyData';
|
||||
import LoginSRPDetail, { ILoginSRPDetail } from './loginSRPDetail';
|
||||
|
||||
export {
|
||||
BackupPrivateKey,
|
||||
@ -53,5 +54,7 @@ export {
|
||||
ServiceTokenData,
|
||||
IServiceTokenData,
|
||||
APIKeyData,
|
||||
IAPIKeyData
|
||||
IAPIKeyData,
|
||||
LoginSRPDetail,
|
||||
ILoginSRPDetail
|
||||
};
|
||||
|
@ -1,12 +1,16 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { Schema, model, Types } from "mongoose";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
} from '../variables';
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
} from "../variables";
|
||||
|
||||
export interface IIntegration {
|
||||
_id: Types.ObjectId;
|
||||
@ -17,68 +21,96 @@ export interface IIntegration {
|
||||
owner: string;
|
||||
targetEnvironment: string;
|
||||
appId: string;
|
||||
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio';
|
||||
path: string;
|
||||
region: string;
|
||||
integration:
|
||||
| 'azure-key-vault'
|
||||
| 'aws-parameter-store'
|
||||
| 'aws-secret-manager'
|
||||
| 'heroku'
|
||||
| 'vercel'
|
||||
| 'netlify'
|
||||
| 'github'
|
||||
| 'render'
|
||||
| 'flyio'
|
||||
| 'circleci';
|
||||
integrationAuth: Types.ObjectId;
|
||||
}
|
||||
|
||||
const integrationSchema = new Schema<IIntegration>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true,
|
||||
},
|
||||
environment: {
|
||||
type: String,
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
app: {
|
||||
// name of app in provider
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
appId: { // (new)
|
||||
appId: {
|
||||
// (new)
|
||||
// id of app in provider
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
targetEnvironment: { // (new)
|
||||
// target environment
|
||||
targetEnvironment: {
|
||||
// (new)
|
||||
// target environment
|
||||
type: String,
|
||||
default: null
|
||||
default: null,
|
||||
},
|
||||
owner: {
|
||||
// github-specific repo owner-login
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
path: {
|
||||
// aws-parameter-store-specific path
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
region: {
|
||||
// aws-parameter-store-specific path
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
integration: {
|
||||
type: String,
|
||||
enum: [
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
],
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
integrationAuth: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'IntegrationAuth',
|
||||
required: true
|
||||
}
|
||||
ref: "IntegrationAuth",
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const Integration = model<IIntegration>('Integration', integrationSchema);
|
||||
const Integration = model<IIntegration>("Integration", integrationSchema);
|
||||
|
||||
export default Integration;
|
||||
|
@ -1,20 +1,29 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { Schema, model, Types } from "mongoose";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB
|
||||
} from '../variables';
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
} from "../variables";
|
||||
|
||||
export interface IIntegrationAuth {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio';
|
||||
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio' | 'azure-key-vault' | 'circleci' | 'aws-parameter-store' | 'aws-secret-manager';
|
||||
teamId: string;
|
||||
accountId: string;
|
||||
refreshCiphertext?: string;
|
||||
refreshIV?: string;
|
||||
refreshTag?: string;
|
||||
accessIdCiphertext?: string; // new
|
||||
accessIdIV?: string; // new
|
||||
accessIdTag?: string; // new
|
||||
accessCiphertext?: string;
|
||||
accessIV?: string;
|
||||
accessTag?: string;
|
||||
@ -24,64 +33,82 @@ export interface IIntegrationAuth {
|
||||
const integrationAuthSchema = new Schema<IIntegrationAuth>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true,
|
||||
},
|
||||
integration: {
|
||||
type: String,
|
||||
enum: [
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
],
|
||||
required: true
|
||||
required: true,
|
||||
},
|
||||
teamId: {
|
||||
// vercel-specific integration param
|
||||
type: String
|
||||
type: String,
|
||||
},
|
||||
accountId: {
|
||||
// netlify-specific integration param
|
||||
type: String
|
||||
type: String,
|
||||
},
|
||||
refreshCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
refreshIV: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
refreshTag: {
|
||||
type: String,
|
||||
select: false,
|
||||
},
|
||||
accessIdCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
accessIdIV: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
accessIdTag: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
accessCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessIV: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessTag: {
|
||||
type: String,
|
||||
select: false
|
||||
select: false,
|
||||
},
|
||||
accessExpiresAt: {
|
||||
type: Date,
|
||||
select: false
|
||||
}
|
||||
select: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const IntegrationAuth = model<IIntegrationAuth>(
|
||||
'IntegrationAuth',
|
||||
"IntegrationAuth",
|
||||
integrationAuthSchema
|
||||
);
|
||||
|
||||
|
29
backend/src/models/loginSRPDetail.ts
Normal file
29
backend/src/models/loginSRPDetail.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import mongoose, { Schema, model, Types } from 'mongoose';
|
||||
|
||||
export interface ILoginSRPDetail {
|
||||
_id: Types.ObjectId;
|
||||
clientPublicKey: string;
|
||||
email: string;
|
||||
serverBInt: mongoose.Schema.Types.Buffer;
|
||||
expireAt: Date;
|
||||
}
|
||||
|
||||
const loginSRPDetailSchema = new Schema<ILoginSRPDetail>(
|
||||
{
|
||||
clientPublicKey: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true
|
||||
},
|
||||
serverBInt: { type: mongoose.Schema.Types.Buffer },
|
||||
expireAt: { type: Date }
|
||||
}
|
||||
);
|
||||
|
||||
const LoginSRPDetail = model('LoginSRPDetail', loginSRPDetailSchema);
|
||||
|
||||
export default LoginSRPDetail;
|
@ -1,15 +1,21 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { ADMIN, MEMBER } from '../variables';
|
||||
|
||||
export interface IMembershipPermission {
|
||||
environmentSlug: string,
|
||||
ability: string
|
||||
}
|
||||
|
||||
export interface IMembership {
|
||||
_id: Types.ObjectId;
|
||||
user: Types.ObjectId;
|
||||
inviteEmail?: string;
|
||||
workspace: Types.ObjectId;
|
||||
role: 'admin' | 'member';
|
||||
deniedPermissions: IMembershipPermission[]
|
||||
}
|
||||
|
||||
const membershipSchema = new Schema(
|
||||
const membershipSchema = new Schema<IMembership>(
|
||||
{
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
@ -23,6 +29,18 @@ const membershipSchema = new Schema(
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
},
|
||||
deniedPermissions: {
|
||||
type: [
|
||||
{
|
||||
environmentSlug: String,
|
||||
ability: {
|
||||
type: String,
|
||||
enum: ['read', 'write']
|
||||
},
|
||||
},
|
||||
],
|
||||
default: []
|
||||
},
|
||||
role: {
|
||||
type: String,
|
||||
enum: [ADMIN, MEMBER],
|
||||
|
@ -23,6 +23,7 @@ export interface ISecret {
|
||||
secretCommentIV?: string;
|
||||
secretCommentTag?: string;
|
||||
secretCommentHash?: string;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
const secretSchema = new Schema<ISecret>(
|
||||
@ -47,6 +48,11 @@ const secretSchema = new Schema<ISecret>(
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
},
|
||||
tags: {
|
||||
ref: 'Tag',
|
||||
type: [Schema.Types.ObjectId],
|
||||
default: []
|
||||
},
|
||||
environment: {
|
||||
type: String,
|
||||
required: true
|
||||
@ -103,6 +109,9 @@ const secretSchema = new Schema<ISecret>(
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
secretSchema.index({ tags: 1 }, { background: true })
|
||||
|
||||
const Secret = model<ISecret>('Secret', secretSchema);
|
||||
|
||||
export default Secret;
|
||||
|
83
backend/src/models/secretApprovalRequest.ts
Normal file
83
backend/src/models/secretApprovalRequest.ts
Normal file
@ -0,0 +1,83 @@
|
||||
import mongoose, { Schema, model } from 'mongoose';
|
||||
import Secret, { ISecret } from './secret';
|
||||
|
||||
interface ISecretApprovalRequest {
|
||||
secret: mongoose.Types.ObjectId;
|
||||
requestedChanges: ISecret;
|
||||
requestedBy: mongoose.Types.ObjectId;
|
||||
approvers: IApprover[];
|
||||
status: ApprovalStatus;
|
||||
timestamp: Date;
|
||||
requestType: RequestType;
|
||||
requestId: string;
|
||||
}
|
||||
|
||||
interface IApprover {
|
||||
userId: mongoose.Types.ObjectId;
|
||||
status: ApprovalStatus;
|
||||
}
|
||||
|
||||
export enum ApprovalStatus {
|
||||
PENDING = 'pending',
|
||||
APPROVED = 'approved',
|
||||
REJECTED = 'rejected'
|
||||
}
|
||||
|
||||
export enum RequestType {
|
||||
UPDATE = 'update',
|
||||
DELETE = 'delete',
|
||||
CREATE = 'create'
|
||||
}
|
||||
|
||||
const approverSchema = new mongoose.Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true
|
||||
},
|
||||
status: {
|
||||
type: String,
|
||||
enum: [ApprovalStatus],
|
||||
default: ApprovalStatus.PENDING
|
||||
}
|
||||
});
|
||||
|
||||
const secretApprovalRequestSchema = new Schema<ISecretApprovalRequest>(
|
||||
{
|
||||
secret: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'Secret'
|
||||
},
|
||||
requestedChanges: Secret,
|
||||
requestedBy: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
},
|
||||
approvers: [approverSchema],
|
||||
status: {
|
||||
type: String,
|
||||
enum: ApprovalStatus,
|
||||
default: ApprovalStatus.PENDING
|
||||
},
|
||||
timestamp: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
requestType: {
|
||||
type: String,
|
||||
enum: RequestType,
|
||||
required: true
|
||||
},
|
||||
requestId: {
|
||||
type: String,
|
||||
required: false
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
const SecretApprovalRequest = model<ISecretApprovalRequest>('SecretApprovalRequest', secretApprovalRequestSchema);
|
||||
|
||||
export default SecretApprovalRequest;
|
49
backend/src/models/tag.ts
Normal file
49
backend/src/models/tag.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
|
||||
export interface ITag {
|
||||
_id: Types.ObjectId;
|
||||
name: string;
|
||||
slug: string;
|
||||
user: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
}
|
||||
|
||||
const tagSchema = new Schema<ITag>(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
trim: true,
|
||||
},
|
||||
slug: {
|
||||
type: String,
|
||||
required: true,
|
||||
trim: true,
|
||||
lowercase: true,
|
||||
validate: [
|
||||
function (value: any) {
|
||||
return value.indexOf(' ') === -1;
|
||||
},
|
||||
'slug cannot contain spaces'
|
||||
]
|
||||
},
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
},
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace'
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
tagSchema.index({ slug: 1, workspace: 1 }, { unique: true })
|
||||
tagSchema.index({ workspace: 1 })
|
||||
|
||||
const Tag = model<ITag>('Tag', tagSchema);
|
||||
|
||||
export default Tag;
|
@ -5,6 +5,7 @@ export interface IToken {
|
||||
email: string;
|
||||
token: string;
|
||||
createdAt: Date;
|
||||
ttl: Number;
|
||||
}
|
||||
|
||||
const tokenSchema = new Schema<IToken>({
|
||||
@ -19,14 +20,13 @@ const tokenSchema = new Schema<IToken>({
|
||||
createdAt: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
ttl: {
|
||||
type: Number,
|
||||
}
|
||||
});
|
||||
|
||||
tokenSchema.index({
|
||||
createdAt: 1
|
||||
}, {
|
||||
expireAfterSeconds: parseInt(EMAIL_TOKEN_LIFETIME)
|
||||
});
|
||||
tokenSchema.index({ email: 1 });
|
||||
|
||||
const Token = model<IToken>('Token', tokenSchema);
|
||||
|
||||
|
@ -12,6 +12,7 @@ export interface IUser {
|
||||
salt?: string;
|
||||
verifier?: string;
|
||||
refreshVersion?: number;
|
||||
seenIps: [string];
|
||||
}
|
||||
|
||||
const userSchema = new Schema<IUser>(
|
||||
@ -54,7 +55,8 @@ const userSchema = new Schema<IUser>(
|
||||
type: Number,
|
||||
default: 0,
|
||||
select: false
|
||||
}
|
||||
},
|
||||
seenIps: [String]
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
|
@ -8,6 +8,7 @@ export interface IWorkspace {
|
||||
name: string;
|
||||
slug: string;
|
||||
}>;
|
||||
autoCapitalization: boolean;
|
||||
}
|
||||
|
||||
const workspaceSchema = new Schema<IWorkspace>({
|
||||
@ -15,6 +16,10 @@ const workspaceSchema = new Schema<IWorkspace>({
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
autoCapitalization: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
organization: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Organization',
|
||||
|
@ -10,7 +10,7 @@ import { ADMIN, MEMBER } from '../../variables';
|
||||
import { body, param } from 'express-validator';
|
||||
import { integrationController } from '../../controllers/v1';
|
||||
|
||||
router.post( // new: add new integration
|
||||
router.post( // new: add new integration for integration auth
|
||||
'/',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey']
|
||||
@ -19,7 +19,15 @@ router.post( // new: add new integration
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
location: 'body'
|
||||
}),
|
||||
body('integrationAuthId').exists().trim(),
|
||||
body('integrationAuthId').exists().isString().trim(),
|
||||
body('app').trim(),
|
||||
body('isActive').exists().isBoolean(),
|
||||
body('appId').trim(),
|
||||
body('sourceEnvironment').trim(),
|
||||
body('targetEnvironment').trim(),
|
||||
body('owner').trim(),
|
||||
body('path').trim(),
|
||||
body('region').trim(),
|
||||
validateRequest,
|
||||
integrationController.createIntegration
|
||||
);
|
||||
|
@ -18,6 +18,19 @@ router.get(
|
||||
integrationAuthController.getIntegrationOptions
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:integrationAuthId',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
requireIntegrationAuthorizationAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
}),
|
||||
param('integrationAuthId'),
|
||||
validateRequest,
|
||||
integrationAuthController.getIntegrationAuth
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/oauth-token',
|
||||
requireAuth({
|
||||
@ -44,6 +57,7 @@ router.post(
|
||||
location: 'body'
|
||||
}),
|
||||
body('workspaceId').exists().trim().notEmpty(),
|
||||
body('accessId').trim(),
|
||||
body('accessToken').exists().trim().notEmpty(),
|
||||
body('integration').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
|
@ -3,14 +3,15 @@ const router = express.Router();
|
||||
import { body, param } from 'express-validator';
|
||||
import { requireAuth, validateRequest } from '../../middleware';
|
||||
import { membershipController } from '../../controllers/v1';
|
||||
import { membershipController as EEMembershipControllers } from '../../ee/controllers/v1';
|
||||
|
||||
// note: ALL DEPRECIATED (moved to api/v2/workspace/:workspaceId/memberships/:membershipId)
|
||||
|
||||
router.get( // used for old CLI (deprecate)
|
||||
'/:workspaceId/connect',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
param('workspaceId').exists().trim(),
|
||||
validateRequest,
|
||||
membershipController.validateMembership
|
||||
@ -19,8 +20,8 @@ router.get( // used for old CLI (deprecate)
|
||||
router.delete(
|
||||
'/:membershipId',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
param('membershipId').exists().trim(),
|
||||
validateRequest,
|
||||
membershipController.deleteMembership
|
||||
@ -29,11 +30,22 @@ router.delete(
|
||||
router.post(
|
||||
'/:membershipId/change-role',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
body('role').exists().trim(),
|
||||
validateRequest,
|
||||
membershipController.changeMembershipRole
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/:membershipId/deny-permissions',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
param('membershipId').isMongoId().exists().trim(),
|
||||
body('permissions').isArray().exists(),
|
||||
validateRequest,
|
||||
EEMembershipControllers.denyMembershipPermissions
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
@ -156,4 +156,19 @@ router.get(
|
||||
organizationController.getOrganizationSubscriptions
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:organizationId/workspace-memberships',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED]
|
||||
}),
|
||||
param('organizationId').exists().trim(),
|
||||
validateRequest,
|
||||
organizationController.getOrganizationMembersAndTheirWorkspaces
|
||||
);
|
||||
|
||||
|
||||
export default router;
|
||||
|
@ -54,4 +54,17 @@ router.delete(
|
||||
environmentController.deleteWorkspaceEnvironment
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:workspaceId/environments',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt'],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [MEMBER, ADMIN],
|
||||
}),
|
||||
param('workspaceId').exists().trim(),
|
||||
validateRequest,
|
||||
environmentController.getAllAccessibleEnvironmentsOfWorkspace
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
@ -6,6 +6,7 @@ import secrets from './secrets';
|
||||
import serviceTokenData from './serviceTokenData';
|
||||
import apiKeyData from './apiKeyData';
|
||||
import environment from "./environment"
|
||||
import tags from "./tags"
|
||||
|
||||
export {
|
||||
users,
|
||||
@ -15,5 +16,6 @@ export {
|
||||
secrets,
|
||||
serviceTokenData,
|
||||
apiKeyData,
|
||||
environment
|
||||
environment,
|
||||
tags
|
||||
}
|
@ -30,7 +30,7 @@ router.patch(
|
||||
'/:organizationId/memberships/:membershipId',
|
||||
param('organizationId').exists().trim(),
|
||||
param('membershipId').exists().trim(),
|
||||
body('role').exists().isString().trim().isIn([ADMIN, MEMBER]),
|
||||
body('role').exists().isString().trim().isIn([OWNER, ADMIN, MEMBER]),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey']
|
||||
|
@ -32,7 +32,7 @@ router.post(
|
||||
!secret.secretKeyCiphertext ||
|
||||
!secret.secretKeyIV ||
|
||||
!secret.secretKeyTag ||
|
||||
!secret.secretValueCiphertext ||
|
||||
(typeof secret.secretValueCiphertext !== 'string') ||
|
||||
!secret.secretValueIV ||
|
||||
!secret.secretValueTag
|
||||
) {
|
||||
@ -74,6 +74,7 @@ router.get(
|
||||
'/',
|
||||
query('workspaceId').exists().trim(),
|
||||
query('environment').exists().trim(),
|
||||
query('tagSlugs'),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken']
|
||||
|
50
backend/src/routes/v2/tags.ts
Normal file
50
backend/src/routes/v2/tags.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import express, { Response, Request } from 'express';
|
||||
const router = express.Router();
|
||||
import { body, param } from 'express-validator';
|
||||
import { tagController } from '../../controllers/v2';
|
||||
import {
|
||||
requireAuth,
|
||||
requireWorkspaceAuth,
|
||||
validateRequest,
|
||||
} from '../../middleware';
|
||||
import { ADMIN, MEMBER } from '../../variables';
|
||||
|
||||
router.get(
|
||||
'/:workspaceId/tags',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt'],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [MEMBER, ADMIN],
|
||||
}),
|
||||
param('workspaceId').exists().trim(),
|
||||
validateRequest,
|
||||
tagController.getWorkspaceTags
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/tags/:tagId',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt'],
|
||||
}),
|
||||
param('tagId').exists().trim(),
|
||||
validateRequest,
|
||||
tagController.deleteWorkspaceTag
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/:workspaceId/tags',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt'],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [MEMBER, ADMIN],
|
||||
}),
|
||||
param('workspaceId').exists().trim(),
|
||||
body('name').exists().trim(),
|
||||
body('slug').exists().trim(),
|
||||
validateRequest,
|
||||
tagController.createWorkspaceTag
|
||||
);
|
||||
|
||||
export default router;
|
@ -118,4 +118,19 @@ router.delete( // TODO - rewire dashboard to this route
|
||||
workspaceController.deleteWorkspaceMembership
|
||||
);
|
||||
|
||||
|
||||
router.patch(
|
||||
'/:workspaceId/auto-capitalization',
|
||||
requireAuth({
|
||||
acceptedAuthModes: ['jwt']
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
}),
|
||||
param('workspaceId').exists().trim(),
|
||||
body('autoCapitalization').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
workspaceController.toggleAutoCapitalization
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
@ -1,7 +1,3 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Integration
|
||||
} from '../models';
|
||||
import {
|
||||
handleOAuthExchangeHelper,
|
||||
syncIntegrationsHelper,
|
||||
@ -10,7 +6,6 @@ import {
|
||||
setIntegrationAuthRefreshHelper,
|
||||
setIntegrationAuthAccessHelper,
|
||||
} from '../helpers/integration';
|
||||
import { exchangeCode } from '../integrations';
|
||||
|
||||
// should sync stuff be here too? Probably.
|
||||
// TODO: move bot functions to IntegrationService.
|
||||
@ -26,11 +21,12 @@ class IntegrationService {
|
||||
* - Store integration access and refresh tokens returned from the OAuth2 code-token exchange
|
||||
* - Add placeholder inactive integration
|
||||
* - Create bot sequence for integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.environment - workspace environment
|
||||
* @param {String} obj.integration - name of integration
|
||||
* @param {String} obj.code - code
|
||||
* @param {Object} obj1
|
||||
* @param {String} obj1.workspaceId - id of workspace
|
||||
* @param {String} obj1.environment - workspace environment
|
||||
* @param {String} obj1.integration - name of integration
|
||||
* @param {String} obj1.code - code
|
||||
* @returns {IntegrationAuth} integrationAuth - integration authorization after OAuth2 code-token exchange
|
||||
*/
|
||||
static async handleOAuthExchange({
|
||||
workspaceId,
|
||||
@ -43,7 +39,7 @@ class IntegrationService {
|
||||
code: string;
|
||||
environment: string;
|
||||
}) {
|
||||
await handleOAuthExchangeHelper({
|
||||
return await handleOAuthExchangeHelper({
|
||||
workspaceId,
|
||||
integration,
|
||||
code,
|
||||
@ -116,26 +112,30 @@ class IntegrationService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt access token [accessToken] using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* Encrypt access token [accessToken] and (optionally) access id using the
|
||||
* bot's copy of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} obj.accessId - access id
|
||||
* @param {String} obj.accessToken - access token
|
||||
* @param {Date} obj.accessExpiresAt - expiration date of access token
|
||||
* @returns {IntegrationAuth} - updated integration auth
|
||||
*/
|
||||
static async setIntegrationAuthAccess({
|
||||
integrationAuthId,
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
}: {
|
||||
integrationAuthId: string;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
accessExpiresAt: Date | undefined;
|
||||
}) {
|
||||
return await setIntegrationAuthAccessHelper({
|
||||
integrationAuthId,
|
||||
accessId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
});
|
||||
|
@ -6,10 +6,9 @@
|
||||
<title>Email Verification</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Confirm your email address</h2>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.</p>
|
||||
<h2>{{code}}</h2>
|
||||
<h1>{{code}}</h1>
|
||||
<p>Questions about setting up Infisical? Email us at support@infisical.com</p>
|
||||
</body>
|
||||
</html>
|
@ -7,12 +7,10 @@
|
||||
<title>Organization Invitation</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Join your team on Infisical</h2>
|
||||
<p>{{inviterFirstName}}({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<h2>Join your organization on Infisical</h2>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is a simple end-to-end encrypted solution that enables teams to sync and manage their environment
|
||||
variables.</p>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
</body>
|
||||
</html>
|
@ -6,7 +6,6 @@
|
||||
<title>Account Recovery</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Reset your password</h2>
|
||||
<p>Someone requested a password reset.</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}">Reset password</a>
|
||||
|
@ -6,11 +6,10 @@
|
||||
<title>Project Invitation</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Join your team on Infisical</h2>
|
||||
<p>{{inviterFirstName}}({{inviterEmail}}) has invited you to their Infisical workspace — {{workspaceName}}</p>
|
||||
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical project — {{workspaceName}}</p>
|
||||
<a href="{{callback_url}}">Join now</a>
|
||||
<h3>What is Infisical?</h3>
|
||||
<p>Infisical is a simple end-to-end encrypted solution that enables teams to sync and manage their environment variables.</p>
|
||||
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
|
||||
</body>
|
||||
</html>
|
140
backend/src/utils/addDevelopmentUser.ts
Normal file
140
backend/src/utils/addDevelopmentUser.ts
Normal file
@ -0,0 +1,140 @@
|
||||
/************************************************************************************************
|
||||
*
|
||||
* Attention: The credentials below are only for development purposes, it should never be used for production
|
||||
*
|
||||
************************************************************************************************/
|
||||
|
||||
import { NODE_ENV } from "../config"
|
||||
import { Key, Membership, MembershipOrg, Organization, User, Workspace } from "../models";
|
||||
import { Types } from 'mongoose';
|
||||
|
||||
export const createTestUserForDevelopment = async () => {
|
||||
if (NODE_ENV === "development") {
|
||||
const testUserEmail = "test@localhost.local"
|
||||
const testUserPassword = "testInfisical1"
|
||||
const testUserId = "63cefa6ec8d3175601cfa980"
|
||||
const testWorkspaceId = "63cefb15c8d3175601cfa989"
|
||||
const testOrgId = "63cefb15c8d3175601cfa985"
|
||||
const testMembershipId = "63cefb159185d9aa3ef0cf35"
|
||||
const testMembershipOrgId = "63cefb159185d9aa3ef0cf31"
|
||||
const testWorkspaceKeyId = "63cf48f0225e6955acec5eff"
|
||||
|
||||
const testUser = {
|
||||
_id: testUserId,
|
||||
email: testUserEmail,
|
||||
refreshVersion: 0,
|
||||
encryptedPrivateKey: 'ITMdDXtLoxib4+53U/qzvIV/T/UalRwimogFCXv/UsulzEoiKM+aK2aqOb0=',
|
||||
firstName: 'Jake',
|
||||
iv: '9fp0dZHI+UuHeKkWMDvD6w==',
|
||||
lastName: 'Moni',
|
||||
publicKey: 'cf44BhkybbBfsE0fZHe2jvqtCj6KLXvSq4hVjV0svzk=',
|
||||
salt: 'd8099dc70958090346910fb9639262b83cf526fc9b4555a171b36a9e1bcd0240',
|
||||
tag: 'bQ/UTghqcQHRoSMpLQD33g==',
|
||||
verifier: '12271fcd50937ca4512e1e3166adaf9d9fc7a5cd0e4c4cb3eda89f35572ede4d9eef23f64aef9220367abff9437b0b6fa55792c442f177201d87051cf77dadade254ff667170440327355fb7d6ac4745d4db302f4843632c2ed5919ebdcff343287a4cd552255d9e3ce81177edefe089617b7616683901475d393405f554634b9bf9230c041ac85624f37a60401be20b78044932580ae0868323be3749fbf856df1518153ba375fec628275f0c445f237446ea4aa7f12c1aa1d6b5fd74b7f2e88d062845a19819ec63f2d2ed9e9f37c055149649461d997d2ae1482f53b04f9de7493efbb9686fb19b2d559b9aa2b502c22dec83f9fc43290dfea89a1dc6f03580b3642b3824513853e81a441be9a0b2fde2231bac60f3287872617a36884697805eeea673cf1a351697834484ada0f282e4745015c9c2928d61e6d092f1b9c3a27eda8413175d23bb2edae62f82ccaf52bf5a6a90344a766c7e4ebf65dae9ae90b2ad4ae65dbf16e3a6948e429771cc50307ae86d454f71a746939ed061f080dd3ae369c1a0739819aca17af46a085bac1f2a5d936d198e7951a8ac3bb38b893665fe7312835abd3f61811f81efa2a8761af5070085f9b6adcca80bf9b0d81899c3d41487fba90728bb24eceb98bd69770360a232624133700ceb4d153f2ad702e0a5b7dfaf97d20bc8aa71dc8c20024a58c06a8fecdad18cb5a2f89c51eaf7'
|
||||
}
|
||||
|
||||
const testWorkspaceKey = {
|
||||
_id: new Types.ObjectId(testWorkspaceKeyId),
|
||||
workspace: testWorkspaceId,
|
||||
encryptedKey: '96ZIRSU21CjVzIQ4Yp994FGWQvDdyK3gq+z+NCaJLK0ByTlvUePmf+AYGFJjkAdz',
|
||||
nonce: '1jhCGqg9Wx3n0OtVxbDgiYYGq4S3EdgO',
|
||||
sender: '63cefa6ec8d3175601cfa980',
|
||||
receiver: '63cefa6ec8d3175601cfa980',
|
||||
}
|
||||
|
||||
const testWorkspace = {
|
||||
_id: new Types.ObjectId(testWorkspaceId),
|
||||
name: 'Example Project',
|
||||
organization: testOrgId,
|
||||
environments: [
|
||||
{
|
||||
_id: '63cefb15c8d3175601cfa98a',
|
||||
name: 'Development',
|
||||
slug: 'dev'
|
||||
},
|
||||
{
|
||||
_id: '63cefb15c8d3175601cfa98b',
|
||||
name: 'Test',
|
||||
slug: 'test'
|
||||
},
|
||||
{
|
||||
_id: '63cefb15c8d3175601cfa98c',
|
||||
name: 'Staging',
|
||||
slug: 'staging'
|
||||
},
|
||||
{
|
||||
_id: '63cefb15c8d3175601cfa98d',
|
||||
name: 'Production',
|
||||
slug: 'prod'
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
const testOrg = {
|
||||
_id: testOrgId,
|
||||
name: 'Jake\'s organization'
|
||||
}
|
||||
|
||||
const testMembershipOrg = {
|
||||
_id: testMembershipOrgId,
|
||||
organization: testOrgId,
|
||||
role: 'owner',
|
||||
status: 'accepted',
|
||||
user: testUserId,
|
||||
}
|
||||
|
||||
const testMembership = {
|
||||
_id: testMembershipId,
|
||||
role: 'admin',
|
||||
user: testUserId,
|
||||
workspace: testWorkspaceId
|
||||
}
|
||||
|
||||
try {
|
||||
// create user if not exist
|
||||
const userInDB = await User.findById(testUserId)
|
||||
if (!userInDB) {
|
||||
await User.create(testUser)
|
||||
}
|
||||
|
||||
// create org if not exist
|
||||
const orgInDB = await Organization.findById(testOrgId)
|
||||
if (!orgInDB) {
|
||||
await Organization.create(testOrg)
|
||||
}
|
||||
|
||||
// create membership org if not exist
|
||||
const membershipOrgInDB = await MembershipOrg.findById(testMembershipOrgId)
|
||||
if (!membershipOrgInDB) {
|
||||
await MembershipOrg.create(testMembershipOrg)
|
||||
}
|
||||
|
||||
// create membership
|
||||
const membershipInDB = await Membership.findById(testMembershipId)
|
||||
if (!membershipInDB) {
|
||||
await Membership.create(testMembership)
|
||||
}
|
||||
|
||||
// create workspace if not exist
|
||||
const workspaceInDB = await Workspace.findById(testWorkspaceId)
|
||||
if (!workspaceInDB) {
|
||||
await Workspace.create(testWorkspace)
|
||||
}
|
||||
|
||||
// create workspace key if not exist
|
||||
const workspaceKeyInDB = await Key.findById(testWorkspaceKeyId)
|
||||
if (!workspaceKeyInDB) {
|
||||
await Key.create(testWorkspaceKey)
|
||||
}
|
||||
|
||||
/* eslint-disable no-console */
|
||||
console.info(`DEVELOPMENT MODE DETECTED: You may login with test user with email: ${testUserEmail} and password: ${testUserPassword}`)
|
||||
/* eslint-enable no-console */
|
||||
|
||||
} catch (e) {
|
||||
/* eslint-disable no-console */
|
||||
console.error(`Unable to create test user while booting up [err=${e}]`)
|
||||
/* eslint-enable no-console */
|
||||
}
|
||||
}
|
||||
}
|
@ -1,9 +1,13 @@
|
||||
const ACTION_LOGIN = 'login';
|
||||
const ACTION_LOGOUT = 'logout';
|
||||
const ACTION_ADD_SECRETS = 'addSecrets';
|
||||
const ACTION_DELETE_SECRETS = 'deleteSecrets';
|
||||
const ACTION_UPDATE_SECRETS = 'updateSecrets';
|
||||
const ACTION_READ_SECRETS = 'readSecrets';
|
||||
|
||||
export {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_DELETE_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
|
@ -3,17 +3,22 @@ import {
|
||||
ENV_TESTING,
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
ENV_SET
|
||||
} from './environment';
|
||||
ENV_SET,
|
||||
} from "./environment";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
@ -23,25 +28,22 @@ import {
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_OPTIONS
|
||||
} from './integration';
|
||||
import {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
} from './organization';
|
||||
import { SECRET_SHARED, SECRET_PERSONAL } from './secret';
|
||||
import { EVENT_PUSH_SECRETS, EVENT_PULL_SECRETS } from './event';
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_OPTIONS,
|
||||
} from "./integration";
|
||||
import { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED } from "./organization";
|
||||
import { SECRET_SHARED, SECRET_PERSONAL } from "./secret";
|
||||
import { EVENT_PUSH_SECRETS, EVENT_PULL_SECRETS } from "./event";
|
||||
import {
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
ACTION_DELETE_SECRETS,
|
||||
ACTION_READ_SECRETS
|
||||
} from './action';
|
||||
import { SMTP_HOST_SENDGRID, SMTP_HOST_MAILGUN } from './smtp';
|
||||
import { PLAN_STARTER, PLAN_PRO } from './stripe';
|
||||
ACTION_READ_SECRETS,
|
||||
} from "./action";
|
||||
import { SMTP_HOST_SENDGRID, SMTP_HOST_MAILGUN } from "./smtp";
|
||||
import { PLAN_STARTER, PLAN_PRO } from "./stripe";
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
@ -56,14 +58,19 @@ export {
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
ENV_SET,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
@ -73,8 +80,11 @@ export {
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
EVENT_PUSH_SECRETS,
|
||||
EVENT_PULL_SECRETS,
|
||||
ACTION_LOGIN,
|
||||
ACTION_LOGOUT,
|
||||
ACTION_ADD_SECRETS,
|
||||
ACTION_UPDATE_SECRETS,
|
||||
ACTION_DELETE_SECRETS,
|
||||
|
@ -1,43 +1,55 @@
|
||||
import {
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_ID_GITHUB,
|
||||
CLIENT_SLUG_VERCEL
|
||||
CLIENT_ID_AZURE,
|
||||
TENANT_ID_AZURE
|
||||
} from '../config';
|
||||
import {
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_ID_GITHUB,
|
||||
CLIENT_SLUG_VERCEL,
|
||||
} from "../config";
|
||||
|
||||
// integrations
|
||||
const INTEGRATION_HEROKU = 'heroku';
|
||||
const INTEGRATION_VERCEL = 'vercel';
|
||||
const INTEGRATION_NETLIFY = 'netlify';
|
||||
const INTEGRATION_GITHUB = 'github';
|
||||
const INTEGRATION_RENDER = 'render';
|
||||
const INTEGRATION_FLYIO = 'flyio';
|
||||
const INTEGRATION_AZURE_KEY_VAULT = 'azure-key-vault';
|
||||
const INTEGRATION_AWS_PARAMETER_STORE = 'aws-parameter-store';
|
||||
const INTEGRATION_AWS_SECRET_MANAGER = 'aws-secret-manager';
|
||||
const INTEGRATION_HEROKU = "heroku";
|
||||
const INTEGRATION_VERCEL = "vercel";
|
||||
const INTEGRATION_NETLIFY = "netlify";
|
||||
const INTEGRATION_GITHUB = "github";
|
||||
const INTEGRATION_RENDER = "render";
|
||||
const INTEGRATION_FLYIO = "flyio";
|
||||
const INTEGRATION_CIRCLECI = "circleci";
|
||||
const INTEGRATION_SET = new Set([
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
]);
|
||||
|
||||
// integration types
|
||||
const INTEGRATION_OAUTH2 = 'oauth2';
|
||||
const INTEGRATION_OAUTH2 = "oauth2";
|
||||
|
||||
// integration oauth endpoints
|
||||
const INTEGRATION_AZURE_TOKEN_URL = `https://login.microsoftonline.com/${TENANT_ID_AZURE}/oauth2/v2.0/token`;
|
||||
const INTEGRATION_HEROKU_TOKEN_URL = 'https://id.heroku.com/oauth/token';
|
||||
const INTEGRATION_VERCEL_TOKEN_URL =
|
||||
'https://api.vercel.com/v2/oauth/access_token';
|
||||
const INTEGRATION_NETLIFY_TOKEN_URL = 'https://api.netlify.com/oauth/token';
|
||||
"https://api.vercel.com/v2/oauth/access_token";
|
||||
const INTEGRATION_NETLIFY_TOKEN_URL = "https://api.netlify.com/oauth/token";
|
||||
const INTEGRATION_GITHUB_TOKEN_URL =
|
||||
'https://github.com/login/oauth/access_token';
|
||||
"https://github.com/login/oauth/access_token";
|
||||
|
||||
// integration apps endpoints
|
||||
const INTEGRATION_HEROKU_API_URL = 'https://api.heroku.com';
|
||||
const INTEGRATION_VERCEL_API_URL = 'https://api.vercel.com';
|
||||
const INTEGRATION_NETLIFY_API_URL = 'https://api.netlify.com';
|
||||
const INTEGRATION_RENDER_API_URL = 'https://api.render.com';
|
||||
const INTEGRATION_FLYIO_API_URL = 'https://api.fly.io/graphql';
|
||||
const INTEGRATION_HEROKU_API_URL = "https://api.heroku.com";
|
||||
const INTEGRATION_VERCEL_API_URL = "https://api.vercel.com";
|
||||
const INTEGRATION_NETLIFY_API_URL = "https://api.netlify.com";
|
||||
const INTEGRATION_RENDER_API_URL = "https://api.render.com";
|
||||
const INTEGRATION_FLYIO_API_URL = "https://api.fly.io/graphql";
|
||||
const INTEGRATION_CIRCLECI_API_URL = "https://circleci.com/api";
|
||||
|
||||
const INTEGRATION_OPTIONS = [
|
||||
{
|
||||
@ -95,6 +107,43 @@ const INTEGRATION_OPTIONS = [
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'AWS Parameter Store',
|
||||
slug: 'aws-parameter-store',
|
||||
image: 'Amazon Web Services.png',
|
||||
isAvailable: true,
|
||||
type: 'custom',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'AWS Secret Manager',
|
||||
slug: 'aws-secret-manager',
|
||||
image: 'Amazon Web Services.png',
|
||||
isAvailable: true,
|
||||
type: 'custom',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Circle CI',
|
||||
slug: 'circleci',
|
||||
image: 'Circle CI.png',
|
||||
isAvailable: true,
|
||||
type: 'pat',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Azure Key Vault',
|
||||
slug: 'azure-key-vault',
|
||||
image: 'Microsoft Azure.png',
|
||||
isAvailable: false,
|
||||
type: 'oauth',
|
||||
clientId: CLIENT_ID_AZURE,
|
||||
tenantId: TENANT_ID_AZURE,
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Google Cloud Platform',
|
||||
slug: 'gcp',
|
||||
@ -104,24 +153,6 @@ const INTEGRATION_OPTIONS = [
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Amazon Web Services',
|
||||
slug: 'aws',
|
||||
image: 'Amazon Web Services.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Microsoft Azure',
|
||||
slug: 'azure',
|
||||
image: 'Microsoft Azure.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Travis CI',
|
||||
slug: 'travisci',
|
||||
@ -130,35 +161,32 @@ const INTEGRATION_OPTIONS = [
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Circle CI',
|
||||
slug: 'circleci',
|
||||
image: 'Circle CI.png',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
}
|
||||
]
|
||||
|
||||
export {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_OPTIONS
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_OPTIONS,
|
||||
};
|
||||
|
@ -1,18 +1,16 @@
|
||||
// membership roles
|
||||
const OWNER = 'owner';
|
||||
const ADMIN = 'admin';
|
||||
const MEMBER = 'member';
|
||||
const OWNER = "owner";
|
||||
const ADMIN = "admin";
|
||||
const MEMBER = "member";
|
||||
|
||||
// membership statuses
|
||||
const INVITED = 'invited';
|
||||
const INVITED = "invited";
|
||||
|
||||
// membership permissions ability
|
||||
const ABILITY_READ = "read";
|
||||
const ABILITY_WRITE = "write";
|
||||
|
||||
// -- organization
|
||||
const ACCEPTED = 'accepted';
|
||||
const ACCEPTED = "accepted";
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED
|
||||
}
|
||||
export { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED, ABILITY_READ, ABILITY_WRITE };
|
||||
|
4
cli/docker/Dockerfile
Normal file
4
cli/docker/Dockerfile
Normal file
@ -0,0 +1,4 @@
|
||||
FROM alpine
|
||||
RUN apk add --no-cache tini
|
||||
COPY infisical /bin/infisical
|
||||
ENTRYPOINT ["/sbin/tini", "--", "/bin/infisical"]
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package main
|
||||
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/config"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const USER_AGENT = "cli"
|
||||
@ -113,6 +114,7 @@ func CallGetSecretsV2(httpClient *resty.Client, request GetEncryptedSecretsV2Req
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetQueryParam("environment", request.Environment).
|
||||
SetQueryParam("workspaceId", request.WorkspaceId).
|
||||
SetQueryParam("tagSlugs", request.TagSlugs).
|
||||
Get(fmt.Sprintf("%v/v2/secrets", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
@ -144,3 +146,42 @@ func CallGetAllWorkSpacesUserBelongsTo(httpClient *resty.Client) (GetWorkSpacesR
|
||||
|
||||
return workSpacesResponse, nil
|
||||
}
|
||||
|
||||
func CallIsAuthenticated(httpClient *resty.Client) bool {
|
||||
var workSpacesResponse GetWorkSpacesResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&workSpacesResponse).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
Post(fmt.Sprintf("%v/v1/auth/checkAuth", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
log.Debugln(fmt.Errorf("CallIsAuthenticated: Unsuccessful response: [response=%v]", response))
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func CallGetAccessibleEnvironments(httpClient *resty.Client, request GetAccessibleEnvironmentsRequest) (GetAccessibleEnvironmentsResponse, error) {
|
||||
var accessibleEnvironmentsResponse GetAccessibleEnvironmentsResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetResult(&accessibleEnvironmentsResponse).
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
Get(fmt.Sprintf("%v/v2/workspace/%s/environments", config.INFISICAL_URL, request.WorkspaceId))
|
||||
|
||||
if err != nil {
|
||||
return GetAccessibleEnvironmentsResponse{}, err
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetAccessibleEnvironmentsResponse{}, fmt.Errorf("CallGetAccessibleEnvironments: Unsuccessful response: [response=%v]", response)
|
||||
}
|
||||
|
||||
return accessibleEnvironmentsResponse, nil
|
||||
}
|
||||
|
@ -197,25 +197,35 @@ type GetSecretsByWorkspaceIdAndEnvironmentRequest struct {
|
||||
type GetEncryptedSecretsV2Request struct {
|
||||
Environment string `json:"environment"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
TagSlugs string `json:"tagSlugs"`
|
||||
}
|
||||
|
||||
type GetEncryptedSecretsV2Response struct {
|
||||
Secrets []struct {
|
||||
ID string `json:"_id"`
|
||||
Version int `json:"version"`
|
||||
Workspace string `json:"workspace"`
|
||||
Type string `json:"type"`
|
||||
Environment string `json:"environment"`
|
||||
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
|
||||
SecretKeyIV string `json:"secretKeyIV"`
|
||||
SecretKeyTag string `json:"secretKeyTag"`
|
||||
SecretValueCiphertext string `json:"secretValueCiphertext"`
|
||||
SecretValueIV string `json:"secretValueIV"`
|
||||
SecretValueTag string `json:"secretValueTag"`
|
||||
V int `json:"__v"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
User string `json:"user,omitempty"`
|
||||
ID string `json:"_id"`
|
||||
Version int `json:"version"`
|
||||
Workspace string `json:"workspace"`
|
||||
Type string `json:"type"`
|
||||
Environment string `json:"environment"`
|
||||
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
|
||||
SecretKeyIV string `json:"secretKeyIV"`
|
||||
SecretKeyTag string `json:"secretKeyTag"`
|
||||
SecretValueCiphertext string `json:"secretValueCiphertext"`
|
||||
SecretValueIV string `json:"secretValueIV"`
|
||||
SecretValueTag string `json:"secretValueTag"`
|
||||
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
|
||||
SecretCommentIV string `json:"secretCommentIV"`
|
||||
SecretCommentTag string `json:"secretCommentTag"`
|
||||
V int `json:"__v"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
User string `json:"user,omitempty"`
|
||||
Tags []struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
Workspace string `json:"workspace"`
|
||||
} `json:"tags"`
|
||||
} `json:"secrets"`
|
||||
}
|
||||
|
||||
@ -241,3 +251,15 @@ type GetServiceTokenDetailsResponse struct {
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
V int `json:"__v"`
|
||||
}
|
||||
|
||||
type GetAccessibleEnvironmentsRequest struct {
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
}
|
||||
|
||||
type GetAccessibleEnvironmentsResponse struct {
|
||||
AccessibleEnvironments []struct {
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
IsWriteDenied bool `json:"isWriteDenied"`
|
||||
} `json:"accessibleEnvironments"`
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
@ -56,7 +56,17 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(envName)
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: envName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to fetch secrets")
|
||||
}
|
||||
@ -91,6 +101,8 @@ func init() {
|
||||
exportCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
|
||||
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
}
|
||||
|
||||
// Format according to the format flag
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
@ -56,7 +56,7 @@ var initCmd = &cobra.Command{
|
||||
workspaces := workspaceResponse.Workspaces
|
||||
if len(workspaces) == 0 {
|
||||
message := fmt.Sprintf("You don't have any projects created in Infisical. You must first create a project at %s", util.INFISICAL_TOKEN_NAME)
|
||||
util.PrintMessageAndExit(message)
|
||||
util.PrintErrorMessageAndExit(message)
|
||||
}
|
||||
|
||||
var workspaceNames []string
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
@ -33,13 +33,13 @@ var loginCmd = &cobra.Command{
|
||||
PreRun: toggleDebug,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
if err != nil && strings.Contains(err.Error(), "The specified item could not be found in the keyring") { // if the key can't be found allow them to override
|
||||
if err != nil && (strings.Contains(err.Error(), "The specified item could not be found in the keyring") || strings.Contains(err.Error(), "unable to get key from Keyring")) { // if the key can't be found allow them to override
|
||||
log.Debug(err)
|
||||
} else if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if currentLoggedInUserDetails.IsUserLoggedIn {
|
||||
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired { // if you are logged in but not expired
|
||||
shouldOverride, err := shouldOverrideLoginPrompt(currentLoggedInUserDetails.UserCredentials.Email)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@ -101,6 +101,9 @@ var loginCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to write write to Infisical Config file. Please try again")
|
||||
}
|
||||
|
||||
// clear backed up secrets from prev account
|
||||
util.DeleteBackupSecrets()
|
||||
|
||||
color.Green("Nice! You are logged in as: %v", email)
|
||||
|
||||
},
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
|
45
cli/packages/cmd/reset.go
Normal file
45
cli/packages/cmd/reset.go
Normal file
@ -0,0 +1,45 @@
|
||||
/*
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var resetCmd = &cobra.Command{
|
||||
Use: "reset",
|
||||
Short: "Used delete all Infisical related data on your machine",
|
||||
DisableFlagsInUseLine: true,
|
||||
Example: "infisical reset",
|
||||
Args: cobra.NoArgs,
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
toggleDebug(cmd, args)
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
// delete config
|
||||
_, pathToDir, err := util.GetFullConfigFilePath()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
os.RemoveAll(pathToDir)
|
||||
|
||||
// delete keyring
|
||||
keyringInstance, err := util.GetKeyRing()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
keyringInstance.Remove(util.KEYRING_SERVICE_NAME)
|
||||
|
||||
util.PrintSuccessMessage("Reset successful")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(resetCmd)
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
@ -17,7 +17,7 @@ var rootCmd = &cobra.Command{
|
||||
Short: "Infisical CLI is used to inject environment variables into any process",
|
||||
Long: `Infisical is a simple, end-to-end encrypted service that enables teams to sync and manage their environment variables across their development life cycle.`,
|
||||
CompletionOptions: cobra.CompletionOptions{HiddenDefaultCmd: true},
|
||||
Version: "0.2.5",
|
||||
Version: util.CLI_VERSION,
|
||||
}
|
||||
|
||||
// Execute adds all child commands to the root command and sets flags appropriately.
|
||||
@ -32,14 +32,15 @@ func Execute() {
|
||||
func init() {
|
||||
rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
rootCmd.PersistentFlags().BoolVarP(&debugLogging, "debug", "d", false, "Enable verbose logging")
|
||||
rootCmd.PersistentFlags().StringVar(&config.INFISICAL_URL, "domain", util.INFISICAL_DEFAULT_API_URL, "Point the CLI to your own backend [can also set via environment variable name: API_URL]")
|
||||
// rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
// }
|
||||
rootCmd.PersistentFlags().StringVar(&config.INFISICAL_URL, "domain", util.INFISICAL_DEFAULT_API_URL, "Point the CLI to your own backend [can also set via environment variable name: INFISICAL_API_URL]")
|
||||
rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.CheckForUpdate()
|
||||
}
|
||||
|
||||
// if config.INFISICAL_URL is set to the default value, check if INFISICAL_URL is set in the environment
|
||||
// this is used to allow overrides of the default value
|
||||
if !rootCmd.Flag("domain").Changed {
|
||||
if envInfisicalBackendUrl, ok := os.LookupEnv("API_URL"); ok {
|
||||
if envInfisicalBackendUrl, ok := os.LookupEnv("INFISICAL_API_URL"); ok {
|
||||
config.INFISICAL_URL = envInfisicalBackendUrl
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
@ -12,6 +12,7 @@ import (
|
||||
"strings"
|
||||
"syscall"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/fatih/color"
|
||||
log "github.com/sirupsen/logrus"
|
||||
@ -58,8 +59,9 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
if !util.IsSecretEnvironmentValid(envName) {
|
||||
util.PrintMessageAndExit("Invalid environment name passed. Environment names can only be prod, dev, test or staging")
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
|
||||
@ -72,7 +74,13 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(envName)
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: envName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
|
||||
}
|
||||
@ -140,10 +148,12 @@ var runCmd = &cobra.Command{
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(runCmd)
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")")
|
||||
runCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs ")
|
||||
}
|
||||
|
||||
// Will execute a single command and pass in the given secrets into the process
|
||||
@ -166,7 +176,10 @@ func executeMultipleCommandWithEnvs(fullCommand string, secretsCount int, env []
|
||||
if runtime.GOOS == "windows" {
|
||||
shell = [2]string{"cmd", "/C"}
|
||||
} else {
|
||||
shell[0] = os.Getenv("SHELL")
|
||||
currentShell := os.Getenv("SHELL")
|
||||
if currentShell != "" {
|
||||
shell[0] = currentShell
|
||||
}
|
||||
}
|
||||
|
||||
cmd := exec.Command(shell[0], shell[1], fullCommand)
|
||||
|
@ -1,11 +1,13 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
@ -22,7 +24,7 @@ import (
|
||||
)
|
||||
|
||||
var secretsCmd = &cobra.Command{
|
||||
Example: `infisical secrets"`,
|
||||
Example: `infisical secrets`,
|
||||
Short: "Used to create, read update and delete secrets",
|
||||
Use: "secrets",
|
||||
DisableFlagsInUseLine: true,
|
||||
@ -34,12 +36,22 @@ var secretsCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(environmentName)
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
@ -62,6 +74,16 @@ var secretsGetCmd = &cobra.Command{
|
||||
Run: getSecretsByNames,
|
||||
}
|
||||
|
||||
var secretsGenerateExampleEnvCmd = &cobra.Command{
|
||||
Example: `secrets generate-example-env > .example-env`,
|
||||
Short: "Used to generate a example .env file",
|
||||
Use: "generate-example-env",
|
||||
DisableFlagsInUseLine: true,
|
||||
Args: cobra.NoArgs,
|
||||
PreRun: toggleDebug,
|
||||
Run: generateExampleEnv,
|
||||
}
|
||||
|
||||
var secretsSetCmd = &cobra.Command{
|
||||
Example: `secrets set <secretName=secretValue> <secretName=secretValue>..."`,
|
||||
Short: "Used set secrets",
|
||||
@ -70,15 +92,13 @@ var secretsSetCmd = &cobra.Command{
|
||||
PreRun: toggleDebug,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
if !util.IsSecretEnvironmentValid(environmentName) {
|
||||
util.PrintMessageAndExit("You have entered a invalid environment name", "Environment names can only be prod, dev, test or staging")
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get your local config details")
|
||||
@ -111,7 +131,7 @@ var secretsSetCmd = &cobra.Command{
|
||||
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := util.GetAllEnvironmentVariables(environmentName)
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName})
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to retrieve secrets")
|
||||
}
|
||||
@ -131,11 +151,11 @@ var secretsSetCmd = &cobra.Command{
|
||||
for _, arg := range args {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
util.PrintMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
util.PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
util.PrintMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
util.PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
@ -267,7 +287,7 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(environmentName)
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName})
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to fetch secrets")
|
||||
}
|
||||
@ -286,7 +306,7 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
|
||||
if len(invalidSecretNamesThatDoNotExist) != 0 {
|
||||
message := fmt.Sprintf("secret name(s) [%v] does not exist in your project. To see which secrets exist run [infisical secrets]", strings.Join(invalidSecretNamesThatDoNotExist, ", "))
|
||||
util.PrintMessageAndExit(message)
|
||||
util.PrintErrorMessageAndExit(message)
|
||||
}
|
||||
|
||||
request := api.BatchDeleteSecretsBySecretIdsRequest{
|
||||
@ -309,42 +329,23 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
secretsCmd.AddCommand(secretsGetCmd)
|
||||
secretsGetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
secretsCmd.AddCommand(secretsSetCmd)
|
||||
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
secretsCmd.AddCommand(secretsDeleteCmd)
|
||||
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
rootCmd.AddCommand(secretsCmd)
|
||||
}
|
||||
|
||||
func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
workspaceFileExists := util.WorkspaceConfigFileExistsInCurrentPath()
|
||||
if !workspaceFileExists {
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(environmentName)
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
}
|
||||
@ -371,6 +372,208 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
visualize.PrintAllSecretDetails(requestedSecrets)
|
||||
}
|
||||
|
||||
func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
environmentName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
}
|
||||
|
||||
tagsHashToSecretKey := make(map[string]int)
|
||||
slugsToFilerBy := make(map[string]int)
|
||||
|
||||
for _, slug := range strings.Split(tagSlugs, ",") {
|
||||
slugsToFilerBy[slug] = 1
|
||||
}
|
||||
|
||||
type TagsAndSecrets struct {
|
||||
Secrets []models.SingleEnvironmentVariable
|
||||
Tags []struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
Workspace string `json:"workspace"`
|
||||
}
|
||||
}
|
||||
|
||||
// sort secrets by associated tags (most number of tags to least tags)
|
||||
sort.Slice(secrets, func(i, j int) bool {
|
||||
return len(secrets[i].Tags) > len(secrets[j].Tags)
|
||||
})
|
||||
|
||||
for i, secret := range secrets {
|
||||
filteredTag := []struct {
|
||||
ID string "json:\"_id\""
|
||||
Name string "json:\"name\""
|
||||
Slug string "json:\"slug\""
|
||||
Workspace string "json:\"workspace\""
|
||||
}{}
|
||||
|
||||
for _, secretTag := range secret.Tags {
|
||||
_, exists := slugsToFilerBy[secretTag.Slug]
|
||||
if !exists {
|
||||
filteredTag = append(filteredTag, secretTag)
|
||||
}
|
||||
}
|
||||
|
||||
secret.Tags = filteredTag
|
||||
secrets[i] = secret
|
||||
}
|
||||
|
||||
for _, secret := range secrets {
|
||||
listOfTagSlugs := []string{}
|
||||
|
||||
for _, tag := range secret.Tags {
|
||||
listOfTagSlugs = append(listOfTagSlugs, tag.Slug)
|
||||
}
|
||||
sort.Strings(listOfTagSlugs)
|
||||
|
||||
tagsHash := util.GetHashFromStringList(listOfTagSlugs)
|
||||
|
||||
tagsHashToSecretKey[tagsHash] += 1
|
||||
}
|
||||
|
||||
finalTagHashToSecretKey := make(map[string]TagsAndSecrets)
|
||||
|
||||
for _, secret := range secrets {
|
||||
listOfTagSlugs := []string{}
|
||||
for _, tag := range secret.Tags {
|
||||
listOfTagSlugs = append(listOfTagSlugs, tag.Slug)
|
||||
}
|
||||
|
||||
// sort the slug so we get the same hash each time
|
||||
sort.Strings(listOfTagSlugs)
|
||||
|
||||
tagsHash := util.GetHashFromStringList(listOfTagSlugs)
|
||||
occurrence, exists := tagsHashToSecretKey[tagsHash]
|
||||
if exists && occurrence > 0 {
|
||||
|
||||
value, exists2 := finalTagHashToSecretKey[tagsHash]
|
||||
allSecretsForTags := append(value.Secrets, secret)
|
||||
|
||||
// sort the the secrets by keys so that they can later be sorted by the first item in the secrets array
|
||||
sort.Slice(allSecretsForTags, func(i, j int) bool {
|
||||
return allSecretsForTags[i].Key < allSecretsForTags[j].Key
|
||||
})
|
||||
|
||||
if exists2 {
|
||||
finalTagHashToSecretKey[tagsHash] = TagsAndSecrets{
|
||||
Tags: secret.Tags,
|
||||
Secrets: allSecretsForTags,
|
||||
}
|
||||
} else {
|
||||
finalTagHashToSecretKey[tagsHash] = TagsAndSecrets{
|
||||
Tags: secret.Tags,
|
||||
Secrets: []models.SingleEnvironmentVariable{secret},
|
||||
}
|
||||
}
|
||||
|
||||
tagsHashToSecretKey[tagsHash] -= 1
|
||||
}
|
||||
}
|
||||
|
||||
// sort the fianl result by secret key fo consistent print order
|
||||
listOfsecretDetails := make([]TagsAndSecrets, 0, len(finalTagHashToSecretKey))
|
||||
for _, secretDetails := range finalTagHashToSecretKey {
|
||||
listOfsecretDetails = append(listOfsecretDetails, secretDetails)
|
||||
}
|
||||
|
||||
// sort the order of the headings by the order of the secrets
|
||||
sort.Slice(listOfsecretDetails, func(i, j int) bool {
|
||||
return len(listOfsecretDetails[i].Tags) < len(listOfsecretDetails[j].Tags)
|
||||
})
|
||||
|
||||
tableOfContents := []string{}
|
||||
fullyGeneratedDocuments := []string{}
|
||||
for _, secretDetails := range listOfsecretDetails {
|
||||
listOfKeyValue := []string{}
|
||||
|
||||
for _, secret := range secretDetails.Secrets {
|
||||
re := regexp.MustCompile(`(?s)(.*)DEFAULT:(.*)`)
|
||||
match := re.FindStringSubmatch(secret.Comment)
|
||||
defaultValue := ""
|
||||
comment := secret.Comment
|
||||
|
||||
// Case: Only has default value
|
||||
if len(match) == 2 {
|
||||
defaultValue = strings.TrimSpace(match[1])
|
||||
}
|
||||
|
||||
// Case: has a comment and a default value
|
||||
if len(match) == 3 {
|
||||
comment = match[1]
|
||||
defaultValue = match[2]
|
||||
}
|
||||
|
||||
row := ""
|
||||
if comment != "" {
|
||||
comment = addHash(comment)
|
||||
row = fmt.Sprintf("%s \n%s=%s", strings.TrimSpace(comment), strings.TrimSpace(secret.Key), strings.TrimSpace(defaultValue))
|
||||
} else {
|
||||
row = fmt.Sprintf("%s=%s", strings.TrimSpace(secret.Key), strings.TrimSpace(defaultValue))
|
||||
}
|
||||
|
||||
// each secret row to be added to the file
|
||||
listOfKeyValue = append(listOfKeyValue, row)
|
||||
}
|
||||
|
||||
listOfTagNames := []string{}
|
||||
for _, tag := range secretDetails.Tags {
|
||||
listOfTagNames = append(listOfTagNames, tag.Name)
|
||||
}
|
||||
|
||||
heading := CenterString(strings.Join(listOfTagNames, " & "), 80)
|
||||
|
||||
if len(listOfTagNames) == 0 {
|
||||
fullyGeneratedDocuments = append(fullyGeneratedDocuments, fmt.Sprintf("\n%s \n", strings.Join(listOfKeyValue, "\n")))
|
||||
} else {
|
||||
fullyGeneratedDocuments = append(fullyGeneratedDocuments, fmt.Sprintf("\n\n\n%s \n%s \n", heading, strings.Join(listOfKeyValue, "\n")))
|
||||
tableOfContents = append(tableOfContents, strings.ToUpper(strings.Join(listOfTagNames, " & ")))
|
||||
}
|
||||
}
|
||||
|
||||
dashedList := []string{}
|
||||
for _, item := range tableOfContents {
|
||||
dashedList = append(dashedList, fmt.Sprintf("# - %s \n", item))
|
||||
}
|
||||
if len(dashedList) > 0 {
|
||||
fmt.Println(CenterString("TABLE OF CONTENTS", 80))
|
||||
fmt.Println(strings.Join(dashedList, ""))
|
||||
}
|
||||
fmt.Println(strings.Join(fullyGeneratedDocuments, ""))
|
||||
}
|
||||
|
||||
func CenterString(s string, numStars int) string {
|
||||
stars := strings.Repeat("*", numStars)
|
||||
padding := (numStars - len(s)) / 2
|
||||
cenetredTextWithStar := stars[:padding] + " " + strings.ToUpper(s) + " " + stars[padding:]
|
||||
|
||||
hashes := strings.Repeat("#", len(cenetredTextWithStar)+2)
|
||||
return fmt.Sprintf("%s \n# %s \n%s", hashes, cenetredTextWithStar, hashes)
|
||||
}
|
||||
|
||||
func addHash(input string) string {
|
||||
lines := strings.Split(input, "\n")
|
||||
for i, line := range lines {
|
||||
lines[i] = "# " + line
|
||||
}
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
|
||||
func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]models.SingleEnvironmentVariable {
|
||||
secretMapByName := make(map[string]models.SingleEnvironmentVariable)
|
||||
|
||||
@ -380,3 +583,30 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod
|
||||
|
||||
return secretMapByName
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
||||
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.AddCommand(secretsGenerateExampleEnvCmd)
|
||||
|
||||
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.AddCommand(secretsGetCmd)
|
||||
|
||||
secretsCmd.AddCommand(secretsSetCmd)
|
||||
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
secretsCmd.AddCommand(secretsDeleteCmd)
|
||||
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsCmd.PersistentFlags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
rootCmd.AddCommand(secretsCmd)
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
|
@ -12,6 +12,11 @@ import (
|
||||
|
||||
// will decrypt cipher text to plain text using iv and tag
|
||||
func DecryptSymmetric(key []byte, cipherText []byte, tag []byte, iv []byte) ([]byte, error) {
|
||||
// Case: empty string
|
||||
if len(cipherText) == 0 && len(tag) == 0 && len(iv) == 0 {
|
||||
return []byte{}, nil
|
||||
}
|
||||
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -1,6 +1,8 @@
|
||||
package models
|
||||
|
||||
import "github.com/99designs/keyring"
|
||||
import (
|
||||
"github.com/99designs/keyring"
|
||||
)
|
||||
|
||||
type UserCredentials struct {
|
||||
Email string `json:"email"`
|
||||
@ -19,6 +21,13 @@ type SingleEnvironmentVariable struct {
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
ID string `json:"_id"`
|
||||
Tags []struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
Workspace string `json:"workspace"`
|
||||
} `json:"tags"`
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type Workspace struct {
|
||||
@ -30,11 +39,18 @@ type Workspace struct {
|
||||
}
|
||||
|
||||
type WorkspaceConfigFile struct {
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
DefaultEnvironment string `json:"defaultEnvironment"`
|
||||
}
|
||||
|
||||
type SymmetricEncryptionResult struct {
|
||||
CipherText []byte
|
||||
Nonce []byte
|
||||
AuthTag []byte
|
||||
CipherText []byte `json:"CipherText"`
|
||||
Nonce []byte `json:"Nonce"`
|
||||
AuthTag []byte `json:"AuthTag"`
|
||||
}
|
||||
|
||||
type GetAllSecretsParameters struct {
|
||||
Environment string
|
||||
InfisicalToken string
|
||||
TagSlugs string
|
||||
}
|
||||
|
46
cli/packages/util/check-for-update.go
Normal file
46
cli/packages/util/check-for-update.go
Normal file
@ -0,0 +1,46 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func CheckForUpdate() {
|
||||
latestVersion, err := getLatestTag("Infisical", "infisical")
|
||||
if err != nil {
|
||||
// do nothing and continue
|
||||
return
|
||||
}
|
||||
if latestVersion != CLI_VERSION {
|
||||
PrintWarning(fmt.Sprintf("Please update your CLI. You are running version %s but the latest version is %s", CLI_VERSION, latestVersion))
|
||||
}
|
||||
}
|
||||
|
||||
func getLatestTag(repoOwner string, repoName string) (string, error) {
|
||||
url := fmt.Sprintf("https://api.github.com/repos/%s/%s/tags", repoOwner, repoName)
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
return "", errors.New(fmt.Sprintf("GitHub API returned status code %d", resp.StatusCode))
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var tags []struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
json.Unmarshal(body, &tags)
|
||||
|
||||
return tags[0].Name[1:], nil
|
||||
}
|
@ -2,6 +2,7 @@ package util
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
)
|
||||
|
||||
@ -19,3 +20,11 @@ func WriteToFile(fileName string, dataToWrite []byte, filePerm os.FileMode) erro
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func CheckIsConnectedToInternet() (ok bool) {
|
||||
_, err := http.Get("http://clients3.google.com/generate_204")
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
@ -12,3 +12,7 @@ const (
|
||||
PERSONAL_SECRET_TYPE_NAME = "personal"
|
||||
SHARED_SECRET_TYPE_NAME = "shared"
|
||||
)
|
||||
|
||||
var (
|
||||
CLI_VERSION = "devel"
|
||||
)
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
"github.com/99designs/keyring"
|
||||
"github.com/Infisical/infisical-merge/packages/config"
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
@ -87,17 +87,10 @@ func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) {
|
||||
SetAuthToken(userCreds.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
response, err := httpClient.
|
||||
R().
|
||||
Post(fmt.Sprintf("%v/v1/auth/checkAuth", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return LoggedInUserDetails{}, err
|
||||
}
|
||||
|
||||
if response.StatusCode() > 299 {
|
||||
isAuthenticated := api.CallIsAuthenticated(httpClient)
|
||||
if !isAuthenticated {
|
||||
return LoggedInUserDetails{
|
||||
IsUserLoggedIn: true,
|
||||
IsUserLoggedIn: true, // was logged in
|
||||
LoginExpired: true,
|
||||
UserCredentials: userCreds,
|
||||
}, nil
|
||||
|
@ -1,6 +1,7 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"os"
|
||||
@ -64,29 +65,29 @@ func RequireLogin() {
|
||||
}
|
||||
|
||||
if !currentUserDetails.IsUserLoggedIn {
|
||||
PrintMessageAndExit("You must be logged in to run this command. To login, run [infisical login]")
|
||||
PrintErrorMessageAndExit("You must be logged in to run this command. To login, run [infisical login]")
|
||||
}
|
||||
|
||||
if currentUserDetails.LoginExpired {
|
||||
PrintMessageAndExit("Your login expired, please login in again. To login, run [infisical login]")
|
||||
PrintErrorMessageAndExit("Your login expired, please login in again. To login, run [infisical login]")
|
||||
}
|
||||
|
||||
if currentUserDetails.UserCredentials.Email == "" && currentUserDetails.UserCredentials.JTWToken == "" && currentUserDetails.UserCredentials.PrivateKey == "" {
|
||||
PrintMessageAndExit("One or more of your login details is empty. Please try logging in again via by running [infisical login]")
|
||||
PrintErrorMessageAndExit("One or more of your login details is empty. Please try logging in again via by running [infisical login]")
|
||||
}
|
||||
}
|
||||
|
||||
func RequireServiceToken() {
|
||||
serviceToken := os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
if serviceToken == "" {
|
||||
PrintMessageAndExit("No service token is found in your terminal")
|
||||
PrintErrorMessageAndExit("No service token is found in your terminal")
|
||||
}
|
||||
}
|
||||
|
||||
func RequireLocalWorkspaceFile() {
|
||||
workspaceFileExists := WorkspaceConfigFileExistsInCurrentPath()
|
||||
if !workspaceFileExists {
|
||||
PrintMessageAndExit("It looks you have not yet connected this project to Infisical", "To do so, run [infisical init] then run your command again")
|
||||
PrintErrorMessageAndExit("It looks you have not yet connected this project to Infisical", "To do so, run [infisical init] then run your command again")
|
||||
}
|
||||
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
@ -95,6 +96,17 @@ func RequireLocalWorkspaceFile() {
|
||||
}
|
||||
|
||||
if workspaceFile.WorkspaceId == "" {
|
||||
PrintMessageAndExit("Your project id is missing in your local config file. Please add it or run again [infisical init]")
|
||||
PrintErrorMessageAndExit("Your project id is missing in your local config file. Please add it or run again [infisical init]")
|
||||
}
|
||||
}
|
||||
|
||||
func GetHashFromStringList(list []string) string {
|
||||
hash := sha256.New()
|
||||
|
||||
for _, item := range list {
|
||||
hash.Write([]byte(item))
|
||||
}
|
||||
|
||||
sum := sha256.Sum256(hash.Sum(nil))
|
||||
return fmt.Sprintf("%x", sum)
|
||||
}
|
||||
|
@ -24,13 +24,17 @@ func PrintErrorAndExit(exitCode int, err error, messages ...string) {
|
||||
}
|
||||
|
||||
func PrintWarning(message string) {
|
||||
color.Yellow("Warning: %v", message)
|
||||
color.New(color.FgYellow).Fprintf(os.Stderr, "Warning: %v \n", message)
|
||||
}
|
||||
|
||||
func PrintMessageAndExit(messages ...string) {
|
||||
func PrintSuccessMessage(message string) {
|
||||
color.New(color.FgGreen).Println(message)
|
||||
}
|
||||
|
||||
func PrintErrorMessageAndExit(messages ...string) {
|
||||
if len(messages) > 0 {
|
||||
for _, message := range messages {
|
||||
fmt.Println(message)
|
||||
fmt.Fprintln(os.Stderr, message)
|
||||
}
|
||||
}
|
||||
|
||||
@ -38,5 +42,5 @@ func PrintMessageAndExit(messages ...string) {
|
||||
}
|
||||
|
||||
func printError(e error) {
|
||||
color.Red("Hmm, we ran into an error: %v", e)
|
||||
color.New(color.FgRed).Fprintf(os.Stderr, "Hmm, we ran into an error: %v", e)
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user