Compare commits

..

222 Commits

Author SHA1 Message Date
Maidul Islam
d83220cdc3 Update README.md 2023-03-22 19:37:51 -07:00
Maidul Islam
8aa44ba103 Add cli guide to read me 2023-03-22 13:16:37 -07:00
Maidul Islam
119600b64f remove site url env var from frontend 2023-03-20 10:19:09 -07:00
Maidul Islam
18bbe09af4 remove background from aws deploy btn 2023-03-19 10:46:28 -07:00
Maidul Islam
b944e8bb84 fix typo in self host docs 2023-03-18 21:30:05 -07:00
Maidul Islam
0b2e6a0d77 Update self hosting docs 2023-03-18 20:57:13 -07:00
BlackMagiq
51f4ab473b Merge pull request #440 from Aashish-Upadhyay-101/aashish/backend-test-setup
test setup
2023-03-18 13:58:01 +07:00
Aashish-Upadhyay-101
9e8e538647 example test to resolve CI fail 2023-03-18 12:27:06 +05:45
Maidul Islam
809a551073 Remove 1X1 call 2023-03-17 15:42:22 -07:00
Maidul Islam
83e1900d89 get site url from request 2023-03-17 15:29:49 -07:00
Maidul Islam
9b2a31761a add default jwt life time 2023-03-17 13:23:19 -07:00
Aashish-Upadhyay-101
87c99df13d jest test basic setup 2023-03-17 15:50:22 +05:45
Aashish Upadhyay
8adc53a8bc Merge branch 'Infisical:main' into main 2023-03-17 15:48:12 +05:45
Maidul Islam
1487afb36b Show invite code to send if no email service in modal 2023-03-17 00:04:00 -07:00
Maidul Islam
2bbcd3d9e6 parse invite url from mutationFn 2023-03-17 00:00:15 -07:00
Maidul Islam
fb1f93a3c0 Skip adding users if no email service 2023-03-16 23:59:24 -07:00
Maidul Islam
1489604f82 send back invite url if no email service 2023-03-16 23:57:11 -07:00
Maidul Islam
fdae5105f8 merge backend 2023-03-16 11:26:15 -07:00
Maidul Islam
8e55d17a55 show popup when email not configured 2023-03-16 09:56:42 -07:00
Aashish-Upadhyay-101
ded5c50157 setup done ! 2023-03-16 17:36:49 +05:45
Aashish-Upadhyay-101
46f2b7a3f8 test setup 2023-03-16 17:34:11 +05:45
Tuan Dang
3d818f953d Make root/example default DB user in development 2023-03-16 16:44:47 +07:00
Tuan Dang
3ac98ba326 Improve SDK, quickstart, token, features docs 2023-03-16 15:47:24 +07:00
Maidul Islam
bb2bcb8bd1 Merge pull request #438 from xinity/xinity_infisical_typofix
typo fix
2023-03-15 10:42:59 -07:00
BlackMagiq
d103c81f67 Merge pull request #432 from Infisical/add-infisical-node
Add the new infisical-node SDK to the backend
2023-03-16 00:31:12 +07:00
Tuan Dang
b591d638d0 Update Node SDK docs 2023-03-16 00:28:29 +07:00
Rachid Zarouali
778631f396 typo fix
removed the 'add' from the helm uninstall command
2023-03-15 18:14:09 +01:00
Tuan Dang
2ec3143d27 Merge remote-tracking branch 'origin' into add-infisical-node 2023-03-16 00:01:09 +07:00
Tuan Dang
d705440400 Revamp Node SDK docs 2023-03-15 18:21:52 +07:00
Tuan Dang
e6e3d82fa6 Modify healthcheck and server on-close to close database connection 2023-03-15 16:58:59 +07:00
Tuan Dang
db48ab8f6c Modify healthcheck.test 2023-03-15 15:16:50 +07:00
Tuan Dang
b868b6a5f3 Clean up infisical-node 2023-03-15 14:03:39 +07:00
Vladyslav Matsiiako
dabc7e3eb1 Solved the issue with empty secret names 2023-03-14 22:13:02 -07:00
Vladyslav Matsiiako
38efb6a1e2 Solved the issue with empty secret names 2023-03-14 22:07:52 -07:00
Tuan Dang
a6c8638345 Refactor infisical-node to config file for birds eye view of envars 2023-03-15 00:09:40 +07:00
Tuan Dang
31111fc63b Merge remote-tracking branch 'origin' into add-infisical-node 2023-03-14 16:39:34 +07:00
Tuan Dang
7fd06e36bc Complete preliminary addition of infisical-node to support service tokens 2023-03-14 16:38:30 +07:00
Maidul Islam
5c55e6e508 clean up prod nginx 2023-03-12 19:09:27 -07:00
Maidul Islam
71fe15d56e remove ssl cert in nginx 2023-03-12 17:18:39 -07:00
mv-turtle
0a71c993ed Update README.md 2023-03-12 11:58:27 -07:00
mv-turtle
63adc181c8 Merge pull request #430 from simonemargio/main
README.md Italian translation
2023-03-12 11:57:45 -07:00
Simone Margio
76fc82811a README.md Italian translation 2023-03-12 18:05:29 +01:00
BlackMagiq
1e859c19f4 Merge pull request #429 from Infisical/check-vercel
Add docs for Node SDK
2023-03-12 23:04:48 +07:00
Tuan Dang
175b4a3fb6 Add docs for Node SDK 2023-03-12 23:03:25 +07:00
BlackMagiq
d89976802d Merge pull request #424 from Aashish-Upadhyay-101/aashish/example-docs
docs: python docs for example CRUD
2023-03-11 21:53:51 +07:00
Maidul Islam
dce5c8f621 add emailConfigured to status api 2023-03-10 23:20:08 -08:00
Maidul Islam
d8ff36f59f Merge pull request #400 from Infisical/snyk-fix-0ab98e0c00b32ecebcd11cb2298f542f
[Snyk] Security upgrade styled-components from 5.3.5 to 5.3.7
2023-03-10 20:46:33 -08:00
Aashish-Upadhyay-101
1090a61162 python create_secrets, update_secrets and delete_secrets docs 2023-03-10 22:45:58 +05:45
Maidul Islam
0e11ff198c Merge pull request #400 from Infisical/snyk-fix-0ab98e0c00b32ecebcd11cb2298f542f
[Snyk] Security upgrade styled-components from 5.3.5 to 5.3.7
2023-03-10 08:46:15 -08:00
BlackMagiq
cdbc6f5619 Merge pull request #423 from Infisical/check-integrations
Patch create integration page on no integration projects and add support for groups in GitLab integration
2023-03-10 21:50:10 +07:00
Tuan Dang
78cb18ad0e Fix lint errors 2023-03-10 21:45:51 +07:00
Aashish-Upadhyay-101
42374a775d python retrieve_secrets docs 2023-03-10 20:29:58 +05:45
Tuan Dang
0269b58a3c Finish support for GitLab groups integration 2023-03-10 21:25:04 +07:00
Maidul Islam
ef4a316558 Update docker.mdx 2023-03-09 16:26:32 -08:00
mv-turtle
a676ce7c21 Update features.mdx 2023-03-09 13:13:38 -08:00
mv-turtle
f475daf7a6 Update README.md 2023-03-08 22:00:28 -08:00
Maidul Islam
c8110c31ef update helm chart with rbac for configmaps 2023-03-08 21:37:04 -08:00
Maidul Islam
a5c8c9c279 add rbac for config 2023-03-08 21:33:29 -08:00
Tuan Dang
5860136494 Patch serviceTokenData workspace string comparison 2023-03-09 12:00:30 +07:00
Tuan Dang
3f3516b7ba Checkpoint GitLab integration group support 2023-03-09 11:52:34 +07:00
mv-turtle
06e26da684 Update README.md 2023-03-08 20:41:16 -08:00
Maidul Islam
bb70ff96d2 Add docs for k8 Global configuration 2023-03-08 20:33:18 -08:00
Maidul Islam
c019d57fb6 allow global defaults for secrets operator 2023-03-08 18:52:05 -08:00
Vladyslav Matsiiako
7854a5eea2 Fix stripe checks 2023-03-08 16:20:47 -08:00
Vladyslav Matsiiako
29636173ef Removed the add to project button for new people 2023-03-08 08:22:31 -08:00
BlackMagiq
4edfc1e0be Merge pull request #411 from Infisical/revised-service-token-docs
Add read/write support for service tokens and update CRUD examples in docs to use service tokens
2023-03-07 15:42:07 +07:00
Tuan Dang
61d4da49aa Merge remote-tracking branch 'origin' into revised-service-token-docs 2023-03-07 15:34:22 +07:00
Tuan Dang
56187ec43e Fix lint errors 2023-03-07 15:33:45 +07:00
Tuan Dang
971ac26033 Fix lint errors 2023-03-07 15:23:10 +07:00
mv-turtle
1f316a0b65 Add SveteKit to the docs sidebar 2023-03-06 21:17:33 -08:00
mv-turtle
23d09c37b5 Merge pull request #407 from jerriclynsjohn/patch-2
Adding sveltekit into the index of Integrations
2023-03-06 21:12:50 -08:00
mv-turtle
fc7c3022be Merge pull request #410 from jerriclynsjohn/add-sveltekit
Adding SvelteKit in the frontend app
2023-03-06 21:12:09 -08:00
Tuan Dang
5b65adedbb Resolve merge conflicts 2023-03-07 11:22:56 +07:00
Maidul Islam
6faf9bf4bf bug fix for https://github.com/Infisical/infisical/issues/403 2023-03-06 11:13:35 -05:00
Jerric Lyns John
b5998d7f22 Adding SvelteKit in the frontend app 2023-03-06 20:30:40 +05:30
Tuan Dang
6abbc1c54d Revise docs for working with CRUD secrets 2023-03-06 18:52:30 +07:00
Maidul Islam
85e5319981 Merge pull request #401 from jon4hz/update-check
Update check
2023-03-05 22:38:06 -05:00
Maidul Islam
50da0a753a Add cli docs for supported environment variables 2023-03-05 22:36:00 -05:00
Jerric Lyns John
6a5f2d0566 Adding sveltekit into the index 2023-03-06 06:44:17 +05:30
mv-turtle
d93277155f Merge pull request #405 from jerriclynsjohn/patch-1
Adding documentation for SvelteKit
2023-03-05 15:56:32 -08:00
Jerric Lyns John
cb905e5ee6 Create sveltekit.mdx 2023-03-06 05:18:53 +05:30
BlackMagiq
71261e7594 Merge pull request #394 from ha-sante/patch-1
Update create-secrets.mdx
2023-03-05 12:34:26 +07:00
BlackMagiq
27e4f490d3 Merge pull request #387 from MatthewJohn/main
Correct port in self-host documentation and simplify downloading nginx config
2023-03-05 12:32:20 +07:00
mv-turtle
298c8705d7 Merge pull request #370 from Neeraj138/login-after-delete-all-projects
Fix: Unable to login after deleting all projects
2023-03-04 21:22:54 -08:00
mv-turtle
edc4382a48 Merge pull request #378 from caioluis/fix/update-pt-br-copies
feat(webui-localization): update and fix pt-BR
2023-03-04 20:59:05 -08:00
mv-turtle
5baab76f2e Merge branch 'main' into fix/update-pt-br-copies 2023-03-04 20:54:32 -08:00
Maidul Islam
f9f30efe03 add integrations to main nav in docs 2023-03-04 19:33:11 -05:00
BlackMagiq
12701bdf98 Merge pull request #402 from Aqib-Rime/update_overview_docs
update AWS and AZURE integrations
2023-03-04 15:01:57 +07:00
Aqib-Rime
70967ac7b0 update AWS and AZURE integrations 2023-03-04 13:52:14 +06:00
Maidul Islam
98b443da82 Merge pull request #380 from jon4hz/tf
docs: add terraform
2023-03-03 21:07:51 -05:00
Maidul Islam
10f75c8e55 add terraform docs 2023-03-03 21:07:18 -05:00
jon4hz
b226642853 fix: add debug log 2023-03-03 23:56:14 +01:00
jon4hz
933f837f64 feat: option to disable update check 2023-03-03 23:52:01 +01:00
jon4hz
7327698305 fix: dont use ioutils and handle error 2023-03-03 23:47:54 +01:00
mv-turtle
1dc59d0d41 Merge pull request #399 from eltociear/add-ja
Add Japanese README.md
2023-03-03 14:43:05 -08:00
snyk-bot
13e067dc4f fix: frontend/package.json & frontend/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-STYLEDCOMPONENTS-3149924
2023-03-03 22:37:03 +00:00
Ikko Eltociear Ashimine
885d348f96 Add Ja link 2023-03-03 21:35:05 +09:00
Ikko Eltociear Ashimine
c71ee77503 Add Japanese README.md 2023-03-03 21:28:59 +09:00
BlackMagiq
14206de926 Merge pull request #398 from Infisical/patch-azure
Add secret recovery for setting deleted secrets for Azure integration
2023-03-03 15:05:41 +07:00
Tuan Dang
c73d64d784 Add secret recovery for setting deleted secrets for Azure integration 2023-03-03 14:59:59 +07:00
mv-turtle
913067f014 Merge pull request #390 from jorgeteixe/main
Feat: add spanish (es) locale
2023-03-02 14:33:58 -08:00
Tuan Dang
ff2ee989d6 Add GitLab and Azure Key Vault docs 2023-03-02 23:44:57 +07:00
Tuan Dang
200cefc1b2 Complete Azure integration 2023-03-02 22:12:56 +07:00
Tuan Dang
721af0f26d Paginate Netlify sites 2023-03-02 14:36:52 +07:00
Maidul Islam
aca6269920 add support link for all major errors 2023-03-02 00:00:31 -05:00
Maidul Islam
a4074c9687 add update Instructions 2023-03-01 23:44:40 -05:00
Maidul Islam
205ec61549 improve export and run command docs 2023-03-01 18:01:09 -05:00
Maidul Islam
0d16f707c2 update export command docs with proejctId flag 2023-03-01 17:40:39 -05:00
Maidul Islam
d3d5ead6ed allow export by explicit projectId 2023-03-01 17:36:02 -05:00
Maidul Islam
1f05d6ea4d update file permissions to be r/w only for owner 2023-03-01 17:22:59 -05:00
Maidul Islam
ff82af8358 remove unused GetAllWorkSpaceConfigsStartingFromCurrentPath method 2023-03-01 17:22:59 -05:00
Maidul Islam
a7da858694 reset cmd also delete secret backups 2023-03-01 17:22:59 -05:00
Maidul Islam
b5c2f6e551 no login override popup when invalid private key 2023-03-01 17:22:59 -05:00
Maidul Islam
77226e0924 check public and private keys before DecryptAsymmetric call 2023-03-01 17:22:59 -05:00
Vladyslav Matsiiako
0cc4286f5f Added notifications for wrong file types when dropping 2023-03-01 11:08:22 -08:00
Vladyslav Matsiiako
99144143ff Added Kubernetes to the integrations list 2023-02-28 20:42:34 -08:00
Vladyslav Matsiiako
efff841121 Updated slack link 2023-02-28 09:44:47 -08:00
BlackMagiq
2f8d914ecb Merge pull request #391 from Aashish-Upadhyay-101/Aashish-Upadhyay-101/GitLab-integration
Feat: GitLab Integration
2023-03-01 00:16:21 +07:00
ha-sante
7dd28a5941 Update create-secrets.mdx
I am making this change to draw your attention to this as it seems that some variables are used wrong.

- Specifically this section:


		util.decodeBase64(encryptedProjectKey),
		util.decodeBase64(encryptedProjectKey.nonce),
		util.decodeBase64(encryptedProjectKey.sender.publicKey),
		util.decodeBase64(PSWD)



- Imported tweetnacl as well so it's easier to understand and for the code to make sense from the get go.
2023-02-28 13:03:32 +00:00
Tuan Dang
a89fccdc1f Add support for Zoho email 2023-02-28 19:02:17 +07:00
Aashish-Upadhyay-101
40ddd3b2a5 remove console.log() i.e used for testing 2023-02-28 10:19:31 +05:45
Aashish-Upadhyay-101
74d17a20a4 axios changes to request 2023-02-28 10:15:09 +05:45
Aashish-Upadhyay-101
d537bd2f58 merge conflict resolve 2023-02-28 09:52:49 +05:45
Aashish-Upadhyay-101
2f045be8a4 missing break statement 2023-02-28 09:37:47 +05:45
Jorge Teixeira
c5ee4810ad add dropdown option for spanish locale 2023-02-28 00:24:12 +01:00
Jorge Teixeira
1dbda5876f add spanish locale files 2023-02-28 00:23:47 +01:00
Maidul Islam
d948923d95 add typescript types to secret versions 2023-02-27 16:32:13 -05:00
Maidul Islam
fb1085744a Merge pull request #389 from Infisical/revert-374-shell
Revert "fix: always execute cmd in subshell"
2023-02-27 14:26:41 -05:00
Maidul Islam
ec22291aca Revert "fix: always execute cmd in subshell" 2023-02-27 14:24:47 -05:00
Matt John
00a07fd27c Correct port in linux selfhost setup 2023-02-27 18:47:59 +00:00
Matt John
ec0e77cc5a Remove unecessary 'cd' during download of nginx config in linux selfhost setup 2023-02-27 18:47:23 +00:00
Maidul Islam
16c49a9626 update slack link in welcome message 2023-02-26 23:21:36 -05:00
Maidul Islam
06ea809d60 change color of bold welcome text 2023-02-26 23:19:28 -05:00
Maidul Islam
12364005c1 improve login welcome message 2023-02-26 22:49:31 -05:00
Maidul Islam
98573e9e05 Dependabot alerts #13 2023-02-26 21:55:34 -05:00
Maidul Islam
c1a4ca6203 Dependabot alerts #19 2023-02-26 21:43:01 -05:00
Maidul Islam
21c2fd8542 address Dependabot alerts #17 2023-02-26 21:40:55 -05:00
Maidul Islam
b27bc8fc1b address dependabot alerts #18 2023-02-26 21:38:44 -05:00
Vladyslav Matsiiako
091115e6ba Merge branch 'main' of https://github.com/Infisical/infisical 2023-02-26 17:51:45 -08:00
Vladyslav Matsiiako
d9c055872d Fixed minor bugs everywhere 2023-02-26 17:51:26 -08:00
Maidul Islam
f73d18ddc7 merge PR 287 2023-02-26 20:03:40 -05:00
Maidul Islam
eb47126f68 merge PR 288 2023-02-26 20:00:37 -05:00
Maidul Islam
4750767268 merge PR 289 2023-02-26 19:53:03 -05:00
Maidul Islam
b0ed772885 merge PR 290 2023-02-26 19:50:22 -05:00
Maidul Islam
7fdab81b5f merge pr 291 2023-02-26 19:46:40 -05:00
Maidul Islam
c17bf13f8c remove sudo for alpine 2023-02-26 19:32:33 -05:00
Maidul Islam
0e17c9a6db Merge pull request #383 from 5h4k4r/use-multi-stage-build
Builds Docker image of backend component with multi-stage Dockerfile
2023-02-26 10:50:56 -05:00
Maidul Islam
1c4dd78dea use node alpine base image 2023-02-26 10:49:01 -05:00
Shakar
23418b3a09 Builds Docker image of backend component with multi-stage Dockerfile
Updates package.json `npm start` command
Delete unnecessary `npm run start` commands

Signed-off-by: Shakar <5h4k4r.b4kr@gmail.com>
2023-02-26 14:50:49 +03:00
mv-turtle
0f143adbde Merge pull request #377 from caioluis/main
feat(webui localization): add support for pt-PT
2023-02-25 12:01:51 -08:00
Maidul Islam
1f3f4b7900 Merge pull request #353 from Grraahaam/chore/helm-docs
chore(docs): helm charts + local cluster setup script
2023-02-25 13:25:47 -05:00
Maidul Islam
2c5f26380e Merge pull request #373 from jon4hz/default-env
fix: properly support default environment
2023-02-25 13:08:00 -05:00
Maidul Islam
8f974fb087 Add docs for default environment 2023-02-25 13:07:07 -05:00
Maidul Islam
a0722b4ca5 Merge pull request #372 from jon4hz/format-cfg
fix: pretty print workspace file
2023-02-25 12:42:46 -05:00
Maidul Islam
41e039578a Merge pull request #374 from jon4hz/shell
fix: always execute cmd in subshell
2023-02-25 12:39:08 -05:00
jon4hz
515e010065 docs: add terraform 2023-02-25 18:09:06 +01:00
Maidul Islam
c89e8e8a96 Merge pull request #375 from jon4hz/workspace
feat: search for workspace config in parent dir
2023-02-25 11:27:34 -05:00
Maidul Islam
cac83ab927 add debug log to workspace path location 2023-02-25 11:25:49 -05:00
Caio Gomes
2c46e8a2dc feat(webui-localization): fix and update pt-br copies 2023-02-25 08:24:27 +00:00
Caio Gomes
0f0b894363 Revert "feat(webui-localization): fix and update pt-br copies"
I was dizzy and forgot to make a branch

This reverts commit 43f9af1bc6.

Signed-off-by: Caio Gomes <ocaioluis@gmail.com>
2023-02-25 08:05:43 +00:00
Caio Gomes
43f9af1bc6 feat(webui-localization): fix and update pt-br copies 2023-02-25 08:04:02 +00:00
Caio Gomes
f5ed14c84c fix(localization): add pt-PT to the options and fix a copy 2023-02-25 07:22:34 +00:00
Caio Gomes
2dd57d7c73 feat(localization): add locales for pt-PT 2023-02-25 06:53:02 +00:00
Maidul Islam
0b1891b64a Merge pull request #371 from jon4hz/env-filter
Improve env filter
2023-02-24 23:12:20 -05:00
Maidul Islam
5614b0f58a nit: method name change 2023-02-24 22:21:20 -05:00
Maidul Islam
3bb178976d Remove auto delete index 2023-02-24 21:57:13 -05:00
jon4hz
1777f98aef feat: search for workspace config in parent dir 2023-02-25 01:25:39 +01:00
jon4hz
45e3706335 fix: always execute cmd in subshell 2023-02-25 00:19:58 +01:00
jon4hz
337ed1fc46 fix: properly support default environment 2023-02-24 23:20:30 +01:00
jon4hz
d1ea76e5a0 fix: pretty print workspace file 2023-02-24 22:28:28 +01:00
jon4hz
4a72d725b1 fix: use function to get secrets by key 2023-02-24 22:03:50 +01:00
jon4hz
1693db3199 fix: preallocate map size 2023-02-24 22:03:13 +01:00
jon4hz
1ff42991b3 fix: improve filtering of reserved env vars 2023-02-24 21:57:48 +01:00
Neeraj138
eebe3c164a Fix: Unable to login after deleting all projects 2023-02-25 01:16:10 +05:30
mv-turtle
978423ba5b Merge pull request #364 from alexdanilowicz/patch-3
chore(docs): Update README MIT License badge link
2023-02-24 11:41:28 -08:00
mv-turtle
4d0dc0d7b7 Update README.md 2023-02-24 11:40:03 -08:00
BlackMagiq
3817e666a9 Merge pull request #365 from Infisical/travisci
Add Travis CI Docs
2023-02-24 16:44:15 +07:00
Tuan Dang
b61350f6a4 Add Travis CI docs, minor edits 2023-02-24 16:37:26 +07:00
Tuan Dang
0fb1a1dc6f Merge remote-tracking branch 'origin' into travisci 2023-02-24 16:08:53 +07:00
BlackMagiq
9eefc87b7a Merge pull request #350 from Aashish-Upadhyay-101/Aashish-Upadhyay-101/TravisCI-integration
Feat: travis ci integration
2023-02-24 16:08:19 +07:00
Tuan Dang
53d35757ee Make minor changes to TravisCI sync, faster, reliable 2023-02-24 15:59:12 +07:00
Tuan Dang
e80e8e00b1 Replace axios with request 2023-02-24 15:31:46 +07:00
Aashish-Upadhyay-101
0b08e574c7 Merge remote-tracking branch 'refs/remotes/origin/main' into Aashish-Upadhyay-101/TravisCI-integration 2023-02-24 13:36:35 +05:45
Aashish-Upadhyay-101
499323d0e3 Sync travis-ci 2023-02-24 13:36:17 +05:45
Alexander Danilowicz
89ad2f163a chore(docs): README
Another small README typo that I noticed. Links to a different repo.
2023-02-23 23:27:28 -08:00
Maidul Islam
7f04617b7d update brew command to update cli 2023-02-23 19:37:19 -05:00
mv-turtle
44904628bc Merge pull request #360 from bngmnn/main
add german as readme language
2023-02-23 15:10:44 -08:00
Marvin Bangemann
fafde7b1ad update other languages' readme files with 'de' flag 2023-02-23 23:56:48 +01:00
Marvin Bangemann
7e65314670 add german readme 2023-02-23 23:56:17 +01:00
Maidul Islam
df52c56e83 patch login bug in cli 2023-02-23 14:14:41 -05:00
Maidul Islam
4276fb54cc Add docs for git branch mapping 2023-02-23 13:07:18 -05:00
BlackMagiq
bb5a0db79c Merge pull request #359 from Infisical/axios-retry
Add axios-retry for Vercel integration for now
2023-02-24 00:59:29 +07:00
Tuan Dang
b906048ea1 Add axios-retry for Vercel integration for now 2023-02-24 00:52:43 +07:00
Tuan Dang
7ce9c816c5 Merge branch 'main' of https://github.com/Infisical/infisical 2023-02-23 21:38:21 +07:00
Tuan Dang
3fef6e4849 Replace Promise.all with for-await Vercel getting envars 2023-02-23 21:38:11 +07:00
mv-turtle
e7ce1e36e7 Update README.md 2023-02-23 06:25:48 -08:00
mv-turtle
734c915206 Merge pull request #358 from alisson-acioli/main
Readme translation to PT-BR.
2023-02-23 06:23:35 -08:00
Tuan Dang
783174adc6 Add for-await for better Vercel integration reliability 2023-02-23 20:21:17 +07:00
Alisson Acioli
d769db7668 integrações world 2023-02-23 09:52:58 -03:00
Alisson Acioli
00e532fce4 remove english 2023-02-23 09:52:08 -03:00
Alisson Acioli
7cf8cba54b remove word 2023-02-23 09:51:30 -03:00
Alisson Acioli
70b26811d9 Remove word API 2023-02-23 09:50:13 -03:00
Alisson Acioli
e7aafecbc2 Update language readme options 2023-02-23 09:49:03 -03:00
Alisson Acioli
949fb052cd Readme PT-BR 2023-02-23 09:45:58 -03:00
BlackMagiq
fcb1f5a51b Merge pull request #357 from Infisical/vercel-integration-patch
Patch Vercel case where secrets can be of type plain and sensitive
2023-02-23 16:53:44 +07:00
Tuan Dang
e24f70b891 Patch Vercel case where secrets can be of type plain and sensitive 2023-02-23 16:47:21 +07:00
Grraahaam
40c80f417c chore(script): added warning 2023-02-22 10:30:33 +01:00
Grraahaam
7bb2c1c278 fix(script): auto generate secrets at runtime 2023-02-22 10:25:08 +01:00
Grraahaam
a5278affe6 chore(docs): improved charts related documentation 2023-02-22 10:18:22 +01:00
Grraahaam
2f953192d6 feat(script): kind local development setup 2023-02-22 10:18:22 +01:00
Aashish-Upadhyay-101
8bf8968588 Other frontend configurations for travis-ci 2023-02-22 13:36:22 +05:45
Aashish-Upadhyay-101
7e9ce0360a Create.tsx for travis-ci 2023-02-22 13:26:10 +05:45
Aashish-Upadhyay-101
1d35c41dcb Authorize.tsx for travis-ci 2023-02-22 13:20:34 +05:45
Aashish-Upadhyay-101
824315f773 model steup for travis-ci 2023-02-22 13:14:06 +05:45
Aashish-Upadhyay-101
8a74799d64 variable setup for travis-ci 2023-02-22 12:52:42 +05:45
Aashish-Upadhyay-101
f0f6e8a988 Merge remote-tracking branch 'refs/remotes/origin/main' 2023-02-22 12:29:49 +05:45
Aashish-Upadhyay-101
c233fd8ed1 Merge remote-tracking branch 'refs/remotes/origin/main' 2023-02-13 15:39:38 +05:45
snyk-bot
d7acd7aef6 fix: upgrade i18next from 22.4.6 to 22.4.9
Snyk has created this PR to upgrade i18next from 22.4.6 to 22.4.9.

See this package in npm:
https://www.npmjs.com/package/i18next

See this project in Snyk:
https://app.snyk.io/org/maidul98/project/53d4ecb6-6cc1-4918-aa73-bf9cae4ffd13?utm_source=github&utm_medium=referral&page=upgrade-pr
2023-02-02 22:37:54 +00:00
snyk-bot
860b8efd7d fix: upgrade axios-auth-refresh from 3.3.3 to 3.3.6
Snyk has created this PR to upgrade axios-auth-refresh from 3.3.3 to 3.3.6.

See this package in npm:
https://www.npmjs.com/package/axios-auth-refresh

See this project in Snyk:
https://app.snyk.io/org/maidul98/project/53d4ecb6-6cc1-4918-aa73-bf9cae4ffd13?utm_source=github&utm_medium=referral&page=upgrade-pr
2023-02-02 22:37:50 +00:00
snyk-bot
6ca3fc5ad2 fix: upgrade @headlessui/react from 1.6.6 to 1.7.7
Snyk has created this PR to upgrade @headlessui/react from 1.6.6 to 1.7.7.

See this package in npm:
https://www.npmjs.com/package/@headlessui/react

See this project in Snyk:
https://app.snyk.io/org/maidul98/project/53d4ecb6-6cc1-4918-aa73-bf9cae4ffd13?utm_source=github&utm_medium=referral&page=upgrade-pr
2023-02-02 22:37:45 +00:00
snyk-bot
189af07ff5 fix: upgrade @stripe/react-stripe-js from 1.10.0 to 1.16.3
Snyk has created this PR to upgrade @stripe/react-stripe-js from 1.10.0 to 1.16.3.

See this package in npm:
https://www.npmjs.com/package/@stripe/react-stripe-js

See this project in Snyk:
https://app.snyk.io/org/maidul98/project/53d4ecb6-6cc1-4918-aa73-bf9cae4ffd13?utm_source=github&utm_medium=referral&page=upgrade-pr
2023-02-02 22:37:38 +00:00
snyk-bot
caf7426f86 fix: upgrade posthog-js from 1.34.0 to 1.39.4
Snyk has created this PR to upgrade posthog-js from 1.34.0 to 1.39.4.

See this package in npm:
https://www.npmjs.com/package/posthog-js

See this project in Snyk:
https://app.snyk.io/org/maidul98/project/53d4ecb6-6cc1-4918-aa73-bf9cae4ffd13?utm_source=github&utm_medium=referral&page=upgrade-pr
2023-02-02 22:37:32 +00:00
269 changed files with 12927 additions and 8695 deletions

View File

@@ -16,9 +16,6 @@ JWT_AUTH_LIFETIME=
JWT_REFRESH_LIFETIME=
JWT_SIGNUP_LIFETIME=
# Optional lifetimes for OTP expressed in seconds
EMAIL_TOKEN_LIFETIME=
# MongoDB
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
# to the MongoDB container instance or Mongo Cloud
@@ -48,10 +45,12 @@ CLIENT_ID_HEROKU=
CLIENT_ID_VERCEL=
CLIENT_ID_NETLIFY=
CLIENT_ID_GITHUB=
CLIENT_ID_GITLAB=
CLIENT_SECRET_HEROKU=
CLIENT_SECRET_VERCEL=
CLIENT_SECRET_NETLIFY=
CLIENT_SECRET_GITHUB=
CLIENT_SECRET_GITLAB=
CLIENT_SLUG_VERCEL=
# Sentry (optional) for monitoring errors

153
README.md

File diff suppressed because one or more lines are too long

View File

@@ -1,18 +1,27 @@
FROM node:16-bullseye-slim
# Build stage
FROM node:16-alpine AS build
WORKDIR /app
COPY package.json package-lock.json ./
# RUN npm ci --only-production --ignore-scripts
# "prepare": "cd .. && npm install"
COPY package*.json ./
RUN npm ci --only-production
COPY . .
RUN npm run build
# Production stage
FROM node:16-alpine
WORKDIR /app
COPY package*.json ./
RUN npm ci --only-production
COPY --from=build /app .
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
EXPOSE 4000
CMD ["npm", "run", "start"]
CMD ["npm", "run", "start"]

View File

@@ -1,19 +0,0 @@
import { server } from '../src/app';
import { describe, expect, it, beforeAll, afterAll } from '@jest/globals';
import supertest from 'supertest';
import { setUpHealthEndpoint } from '../src/services/health';
const requestWithSupertest = supertest(server);
describe('Healthcheck endpoint', () => {
beforeAll(async () => {
setUpHealthEndpoint(server);
});
afterAll(async () => {
server.close();
});
it('GET /healthcheck should return OK', async () => {
const res = await requestWithSupertest.get('/healthcheck');
expect(res.status).toEqual(200);
});
});

View File

@@ -4,7 +4,6 @@ declare global {
namespace NodeJS {
interface ProcessEnv {
PORT: string;
EMAIL_TOKEN_LIFETIME: string;
ENCRYPTION_KEY: string;
SALT_ROUNDS: string;
JWT_AUTH_LIFETIME: string;
@@ -22,10 +21,12 @@ declare global {
CLIENT_ID_VERCEL: string;
CLIENT_ID_NETLIFY: string;
CLIENT_ID_GITHUB: string;
CLIENT_ID_GITLAB: string;
CLIENT_SECRET_HEROKU: string;
CLIENT_SECRET_VERCEL: string;
CLIENT_SECRET_NETLIFY: string;
CLIENT_SECRET_GITHUB: string;
CLIENT_SECRET_GITLAB: string;
CLIENT_SLUG_VERCEL: string;
POSTHOG_HOST: string;
POSTHOG_PROJECT_API_KEY: string;

9
backend/jest.config.ts Normal file
View File

@@ -0,0 +1,9 @@
export default {
preset: 'ts-jest',
testEnvironment: 'node',
collectCoverageFrom: ['src/*.{js,ts}', '!**/node_modules/**'],
modulePaths: ['<rootDir>/src'],
testMatch: ['<rootDir>/tests/**/*.test.ts'],
setupFiles: ['<rootDir>/test-resources/env-vars.js'],
setupFilesAfterEnv: ['<rootDir>/tests/setupTests.ts']
};

9655
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -10,6 +10,7 @@
"await-to-js": "^3.0.0",
"aws-sdk": "^2.1311.0",
"axios": "^1.1.3",
"axios-retry": "^3.4.0",
"bcrypt": "^5.1.0",
"bigint-conversion": "^2.2.2",
"builder-pattern": "^2.2.0",
@@ -22,6 +23,7 @@
"express-validator": "^6.14.2",
"handlebars": "^4.7.7",
"helmet": "^5.1.1",
"infisical-node": "^1.0.37",
"js-yaml": "^4.1.0",
"jsonwebtoken": "^9.0.0",
"jsrp": "^0.2.4",
@@ -47,7 +49,7 @@
"version": "1.0.0",
"main": "src/index.js",
"scripts": {
"start": "npm run build && node build/index.js",
"start": "node build/index.js",
"dev": "nodemon",
"swagger-autogen": "node ./swagger/index.ts",
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build",
@@ -86,7 +88,7 @@
"@types/supertest": "^2.0.12",
"@types/swagger-jsdoc": "^6.0.1",
"@types/swagger-ui-express": "^4.1.3",
"@typescript-eslint/eslint-plugin": "^5.40.1",
"@typescript-eslint/eslint-plugin": "^5.54.0",
"@typescript-eslint/parser": "^5.40.1",
"cross-env": "^7.0.3",
"eslint": "^8.26.0",
@@ -99,17 +101,6 @@
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1"
},
"jest": {
"preset": "ts-jest",
"testEnvironment": "node",
"collectCoverageFrom": [
"src/*.{js,ts}",
"!**/node_modules/**"
],
"setupFiles": [
"<rootDir>/test-resources/env-vars.js"
]
},
"jest-junit": {
"outputDirectory": "reports",
"outputName": "jest-junit.xml",

File diff suppressed because it is too large Load Diff

View File

@@ -1,144 +0,0 @@
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { patchRouterParam } = require('./utils/patchAsyncRoutes');
import express from 'express';
import helmet from 'helmet';
import cors from 'cors';
import cookieParser from 'cookie-parser';
import dotenv from 'dotenv';
import swaggerUi = require('swagger-ui-express');
// eslint-disable-next-line @typescript-eslint/no-var-requires
const swaggerFile = require('../spec.json');
// eslint-disable-next-line @typescript-eslint/no-var-requires
const requestIp = require('request-ip');
dotenv.config();
import { PORT, NODE_ENV, SITE_URL } from './config';
import { apiLimiter } from './helpers/rateLimiter';
import {
workspace as eeWorkspaceRouter,
secret as eeSecretRouter,
secretSnapshot as eeSecretSnapshotRouter,
action as eeActionRouter
} from './ee/routes/v1';
import {
signup as v1SignupRouter,
auth as v1AuthRouter,
bot as v1BotRouter,
organization as v1OrganizationRouter,
workspace as v1WorkspaceRouter,
membershipOrg as v1MembershipOrgRouter,
membership as v1MembershipRouter,
key as v1KeyRouter,
inviteOrg as v1InviteOrgRouter,
user as v1UserRouter,
userAction as v1UserActionRouter,
secret as v1SecretRouter,
serviceToken as v1ServiceTokenRouter,
password as v1PasswordRouter,
stripe as v1StripeRouter,
integration as v1IntegrationRouter,
integrationAuth as v1IntegrationAuthRouter
} from './routes/v1';
import {
signup as v2SignupRouter,
auth as v2AuthRouter,
users as v2UsersRouter,
organizations as v2OrganizationsRouter,
workspace as v2WorkspaceRouter,
secret as v2SecretRouter, // begin to phase out
secrets as v2SecretsRouter,
serviceTokenData as v2ServiceTokenDataRouter,
apiKeyData as v2APIKeyDataRouter,
environment as v2EnvironmentRouter,
tags as v2TagsRouter,
} from './routes/v2';
import { healthCheck } from './routes/status';
import { getLogger } from './utils/logger';
import { RouteNotFoundError } from './utils/errors';
import { requestErrorHandler } from './middleware/requestErrorHandler';
// patch async route params to handle Promise Rejections
patchRouterParam();
export const app = express();
app.enable('trust proxy');
app.use(express.json());
app.use(cookieParser());
app.use(
cors({
credentials: true,
origin: SITE_URL
})
);
app.use(requestIp.mw())
if (NODE_ENV === 'production') {
// enable app-wide rate-limiting + helmet security
// in production
app.disable('x-powered-by');
app.use(apiLimiter);
app.use(helmet());
}
// (EE) routes
app.use('/api/v1/secret', eeSecretRouter);
app.use('/api/v1/secret-snapshot', eeSecretSnapshotRouter);
app.use('/api/v1/workspace', eeWorkspaceRouter);
app.use('/api/v1/action', eeActionRouter);
// v1 routes
app.use('/api/v1/signup', v1SignupRouter);
app.use('/api/v1/auth', v1AuthRouter);
app.use('/api/v1/bot', v1BotRouter);
app.use('/api/v1/user', v1UserRouter);
app.use('/api/v1/user-action', v1UserActionRouter);
app.use('/api/v1/organization', v1OrganizationRouter);
app.use('/api/v1/workspace', v1WorkspaceRouter);
app.use('/api/v1/membership-org', v1MembershipOrgRouter);
app.use('/api/v1/membership', v1MembershipRouter);
app.use('/api/v1/key', v1KeyRouter);
app.use('/api/v1/invite-org', v1InviteOrgRouter);
app.use('/api/v1/secret', v1SecretRouter);
app.use('/api/v1/service-token', v1ServiceTokenRouter); // deprecated
app.use('/api/v1/password', v1PasswordRouter);
app.use('/api/v1/stripe', v1StripeRouter);
app.use('/api/v1/integration', v1IntegrationRouter);
app.use('/api/v1/integration-auth', v1IntegrationAuthRouter);
// v2 routes
app.use('/api/v2/signup', v2SignupRouter);
app.use('/api/v2/auth', v2AuthRouter);
app.use('/api/v2/users', v2UsersRouter);
app.use('/api/v2/organizations', v2OrganizationsRouter);
app.use('/api/v2/workspace', v2EnvironmentRouter);
app.use('/api/v2/workspace', v2TagsRouter);
app.use('/api/v2/workspace', v2WorkspaceRouter);
app.use('/api/v2/secret', v2SecretRouter); // deprecated
app.use('/api/v2/secrets', v2SecretsRouter);
app.use('/api/v2/service-token', v2ServiceTokenDataRouter); // TODO: turn into plural route
app.use('/api/v2/api-key', v2APIKeyDataRouter);
// api docs
app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerFile))
// Server status
app.use('/api', healthCheck)
//* Handle unrouted requests and respond with proper error message as well as status code
app.use((req, res, next) => {
if (res.headersSent) return next();
next(RouteNotFoundError({ message: `The requested source '(${req.method})${req.url}' was not found` }))
})
//* Error Handling Middleware (must be after all routing logic)
app.use(requestErrorHandler)
export const server = app.listen(PORT, () => {
getLogger("backend-main").info(`Server started listening at port ${PORT}`)
});

View File

@@ -1,103 +1,51 @@
const PORT = process.env.PORT || 4000;
const EMAIL_TOKEN_LIFETIME = parseInt(process.env.EMAIL_TOKEN_LIFETIME! || '86400');
const INVITE_ONLY_SIGNUP = process.env.INVITE_ONLY_SIGNUP == undefined ? false : process.env.INVITE_ONLY_SIGNUP
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY!;
const SALT_ROUNDS = parseInt(process.env.SALT_ROUNDS!) || 10;
const JWT_AUTH_LIFETIME = process.env.JWT_AUTH_LIFETIME! || '10d';
const JWT_AUTH_SECRET = process.env.JWT_AUTH_SECRET!;
const JWT_MFA_LIFETIME = process.env.JWT_MFA_LIFETIME! || '5m';
const JWT_MFA_SECRET = process.env.JWT_MFA_SECRET!;
const JWT_REFRESH_LIFETIME = process.env.JWT_REFRESH_LIFETIME! || '90d';
const JWT_REFRESH_SECRET = process.env.JWT_REFRESH_SECRET!;
const JWT_SERVICE_SECRET = process.env.JWT_SERVICE_SECRET!;
const JWT_SIGNUP_LIFETIME = process.env.JWT_SIGNUP_LIFETIME! || '15m';
const JWT_SIGNUP_SECRET = process.env.JWT_SIGNUP_SECRET!;
const MONGO_URL = process.env.MONGO_URL!;
const NODE_ENV = process.env.NODE_ENV! || 'production';
const VERBOSE_ERROR_OUTPUT = process.env.VERBOSE_ERROR_OUTPUT! === 'true' && true;
const LOKI_HOST = process.env.LOKI_HOST || undefined;
const CLIENT_ID_AZURE = process.env.CLIENT_ID_AZURE!;
const TENANT_ID_AZURE = process.env.TENANT_ID_AZURE!;
const CLIENT_ID_HEROKU = process.env.CLIENT_ID_HEROKU!;
const CLIENT_ID_VERCEL = process.env.CLIENT_ID_VERCEL!;
const CLIENT_ID_NETLIFY = process.env.CLIENT_ID_NETLIFY!;
const CLIENT_ID_GITHUB = process.env.CLIENT_ID_GITHUB!;
const CLIENT_SECRET_AZURE = process.env.CLIENT_SECRET_AZURE!;
const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
const CLIENT_SECRET_VERCEL = process.env.CLIENT_SECRET_VERCEL!;
const CLIENT_SECRET_NETLIFY = process.env.CLIENT_SECRET_NETLIFY!;
const CLIENT_SECRET_GITHUB = process.env.CLIENT_SECRET_GITHUB!;
const CLIENT_SLUG_VERCEL = process.env.CLIENT_SLUG_VERCEL!;
const POSTHOG_HOST = process.env.POSTHOG_HOST! || 'https://app.posthog.com';
const POSTHOG_PROJECT_API_KEY =
process.env.POSTHOG_PROJECT_API_KEY! ||
'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
const SENTRY_DSN = process.env.SENTRY_DSN!;
const SITE_URL = process.env.SITE_URL!;
const SMTP_HOST = process.env.SMTP_HOST!;
const SMTP_SECURE = process.env.SMTP_SECURE! === 'true' || false;
const SMTP_PORT = parseInt(process.env.SMTP_PORT!) || 587;
const SMTP_USERNAME = process.env.SMTP_USERNAME!;
const SMTP_PASSWORD = process.env.SMTP_PASSWORD!;
const SMTP_FROM_ADDRESS = process.env.SMTP_FROM_ADDRESS!;
const SMTP_FROM_NAME = process.env.SMTP_FROM_NAME! || 'Infisical';
const STRIPE_PRODUCT_STARTER = process.env.STRIPE_PRODUCT_STARTER!;
const STRIPE_PRODUCT_PRO = process.env.STRIPE_PRODUCT_PRO!;
const STRIPE_PRODUCT_TEAM = process.env.STRIPE_PRODUCT_TEAM!;
const STRIPE_PUBLISHABLE_KEY = process.env.STRIPE_PUBLISHABLE_KEY!;
const STRIPE_SECRET_KEY = process.env.STRIPE_SECRET_KEY!;
const STRIPE_WEBHOOK_SECRET = process.env.STRIPE_WEBHOOK_SECRET!;
const TELEMETRY_ENABLED = process.env.TELEMETRY_ENABLED! !== 'false' && true;
const LICENSE_KEY = process.env.LICENSE_KEY!;
export {
PORT,
EMAIL_TOKEN_LIFETIME,
INVITE_ONLY_SIGNUP,
ENCRYPTION_KEY,
SALT_ROUNDS,
JWT_AUTH_LIFETIME,
JWT_AUTH_SECRET,
JWT_MFA_LIFETIME,
JWT_MFA_SECRET,
JWT_REFRESH_LIFETIME,
JWT_REFRESH_SECRET,
JWT_SERVICE_SECRET,
JWT_SIGNUP_LIFETIME,
JWT_SIGNUP_SECRET,
MONGO_URL,
NODE_ENV,
VERBOSE_ERROR_OUTPUT,
LOKI_HOST,
CLIENT_ID_AZURE,
TENANT_ID_AZURE,
CLIENT_ID_HEROKU,
CLIENT_ID_VERCEL,
CLIENT_ID_NETLIFY,
CLIENT_ID_GITHUB,
CLIENT_SECRET_AZURE,
CLIENT_SECRET_HEROKU,
CLIENT_SECRET_VERCEL,
CLIENT_SECRET_NETLIFY,
CLIENT_SECRET_GITHUB,
CLIENT_SLUG_VERCEL,
POSTHOG_HOST,
POSTHOG_PROJECT_API_KEY,
SENTRY_DSN,
SITE_URL,
SMTP_HOST,
SMTP_PORT,
SMTP_SECURE,
SMTP_USERNAME,
SMTP_PASSWORD,
SMTP_FROM_ADDRESS,
SMTP_FROM_NAME,
STRIPE_PRODUCT_STARTER,
STRIPE_PRODUCT_TEAM,
STRIPE_PRODUCT_PRO,
STRIPE_PUBLISHABLE_KEY,
STRIPE_SECRET_KEY,
STRIPE_WEBHOOK_SECRET,
TELEMETRY_ENABLED,
LICENSE_KEY
};
import infisical from 'infisical-node';
export const getPort = () => infisical.get('PORT')! || 4000;
export const getInviteOnlySignup = () => infisical.get('INVITE_ONLY_SIGNUP')! == undefined ? false : infisical.get('INVITE_ONLY_SIGNUP');
export const getEncryptionKey = () => infisical.get('ENCRYPTION_KEY')!;
export const getSaltRounds = () => parseInt(infisical.get('SALT_ROUNDS')!) || 10;
export const getJwtAuthLifetime = () => infisical.get('JWT_AUTH_LIFETIME')! || '10d';
export const getJwtAuthSecret = () => infisical.get('JWT_AUTH_SECRET')!;
export const getJwtMfaLifetime = () => infisical.get('JWT_MFA_LIFETIME')! || '5m';
export const getJwtMfaSecret = () => infisical.get('JWT_MFA_LIFETIME')! || '5m';
export const getJwtRefreshLifetime = () => infisical.get('JWT_REFRESH_LIFETIME')! || '90d';
export const getJwtRefreshSecret = () => infisical.get('JWT_REFRESH_SECRET')!;
export const getJwtServiceSecret = () => infisical.get('JWT_SERVICE_SECRET')!;
export const getJwtSignupLifetime = () => infisical.get('JWT_SIGNUP_LIFETIME')! || '15m';
export const getJwtSignupSecret = () => infisical.get('JWT_SIGNUP_SECRET')!;
export const getMongoURL = () => infisical.get('MONGO_URL')!;
export const getNodeEnv = () => infisical.get('NODE_ENV')!;
export const getVerboseErrorOutput = () => infisical.get('VERBOSE_ERROR_OUTPUT')! === 'true' && true;
export const getLokiHost = () => infisical.get('LOKI_HOST')!;
export const getClientIdAzure = () => infisical.get('CLIENT_ID_AZURE')!;
export const getClientIdHeroku = () => infisical.get('CLIENT_ID_HEROKU')!;
export const getClientIdVercel = () => infisical.get('CLIENT_ID_VERCEL')!;
export const getClientIdNetlify = () => infisical.get('CLIENT_ID_NETLIFY')!;
export const getClientIdGitHub = () => infisical.get('CLIENT_ID_GITHUB')!;
export const getClientIdGitLab = () => infisical.get('CLIENT_ID_GITLAB')!;
export const getClientSecretAzure = () => infisical.get('CLIENT_SECRET_AZURE')!;
export const getClientSecretHeroku = () => infisical.get('CLIENT_SECRET_HEROKU')!;
export const getClientSecretVercel = () => infisical.get('CLIENT_SECRET_VERCEL')!;
export const getClientSecretNetlify = () => infisical.get('CLIENT_SECRET_NETLIFY')!;
export const getClientSecretGitHub = () => infisical.get('CLIENT_SECRET_GITHUB')!;
export const getClientSecretGitLab = () => infisical.get('CLIENT_SECRET_GITLAB')!;
export const getClientSlugVercel = () => infisical.get('CLIENT_SLUG_VERCEL')!;
export const getPostHogHost = () => infisical.get('POSTHOG_HOST')! || 'https://app.posthog.com';
export const getPostHogProjectApiKey = () => infisical.get('POSTHOG_PROJECT_API_KEY')! || 'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
export const getSentryDSN = () => infisical.get('SENTRY_DSN')!;
export const getSiteURL = () => infisical.get('SITE_URL')!;
export const getSmtpHost = () => infisical.get('SMTP_HOST')!;
export const getSmtpSecure = () => infisical.get('SMTP_SECURE')! === 'true' || false;
export const getSmtpPort = () => parseInt(infisical.get('SMTP_PORT')!) || 587;
export const getSmtpUsername = () => infisical.get('SMTP_USERNAME')!;
export const getSmtpPassword = () => infisical.get('SMTP_PASSWORD')!;
export const getSmtpFromAddress = () => infisical.get('SMTP_FROM_ADDRESS')!;
export const getSmtpFromName = () => infisical.get('SMTP_FROM_NAME')! || 'Infisical';
export const getStripeProductStarter = () => infisical.get('STRIPE_PRODUCT_STARTER')!;
export const getStripeProductPro = () => infisical.get('STRIPE_PRODUCT_PRO')!;
export const getStripeProductTeam = () => infisical.get('STRIPE_PRODUCT_TEAM')!;
export const getStripePublishableKey = () => infisical.get('STRIPE_PUBLISHABLE_KEY')!;
export const getStripeSecretKey = () => infisical.get('STRIPE_SECRET_KEY')!;
export const getStripeWebhookSecret = () => infisical.get('STRIPE_WEBHOOK_SECRET')!;
export const getTelemetryEnabled = () => infisical.get('TELEMETRY_ENABLED')! !== 'false' && true;
export const getLoopsApiKey = () => infisical.get('LOOPS_API_KEY')!;
export const getSmtpConfigured = () => infisical.get('SMTP_HOST') == '' || infisical.get('SMTP_HOST') == undefined ? false : true

View File

@@ -0,0 +1,16 @@
import axios from 'axios';
import axiosRetry from 'axios-retry';
const axiosInstance = axios.create();
// add retry functionality to the axios instance
axiosRetry(axiosInstance, {
retries: 3,
retryDelay: axiosRetry.exponentialDelay, // exponential back-off delay between retries
retryCondition: (error) => {
// only retry if the error is a network error or a 5xx server error
return axiosRetry.isNetworkError(error) || axiosRetry.isRetryableError(error);
},
});
export default axiosInstance;

View File

@@ -1,8 +1,8 @@
/* eslint-disable @typescript-eslint/no-var-requires */
import * as Sentry from '@sentry/node';
import { Request, Response } from 'express';
import jwt from 'jsonwebtoken';
import * as Sentry from '@sentry/node';
import * as bigintConversion from 'bigint-conversion';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const jsrp = require('jsrp');
import { User, LoginSRPDetail } from '../../models';
import { createToken, issueAuthTokens, clearTokens } from '../../helpers/auth';
@@ -11,15 +11,15 @@ import {
ACTION_LOGIN,
ACTION_LOGOUT
} from '../../variables';
import {
NODE_ENV,
JWT_AUTH_LIFETIME,
JWT_AUTH_SECRET,
JWT_REFRESH_SECRET
} from '../../config';
import { BadRequestError } from '../../utils/errors';
import { EELogService } from '../../ee/services';
import { getChannelFromUserAgent } from '../../utils/posthog'; // TODO: move this
import {
getNodeEnv,
getJwtRefreshSecret,
getJwtAuthLifetime,
getJwtAuthSecret
} from '../../config';
declare module 'jsonwebtoken' {
export interface UserIDJwtPayload extends jwt.JwtPayload {
@@ -126,7 +126,7 @@ export const login2 = async (req: Request, res: Response) => {
httpOnly: true,
path: '/',
sameSite: 'strict',
secure: NODE_ENV === 'production' ? true : false
secure: getNodeEnv() === 'production' ? true : false
});
const loginAction = await EELogService.createAction({
@@ -182,7 +182,7 @@ export const logout = async (req: Request, res: Response) => {
httpOnly: true,
path: '/',
sameSite: 'strict',
secure: NODE_ENV === 'production' ? true : false
secure: getNodeEnv() === 'production' ? true : false
});
const logoutAction = await EELogService.createAction({
@@ -237,7 +237,7 @@ export const getNewToken = async (req: Request, res: Response) => {
}
const decodedToken = <jwt.UserIDJwtPayload>(
jwt.verify(refreshToken, JWT_REFRESH_SECRET)
jwt.verify(refreshToken, getJwtRefreshSecret())
);
const user = await User.findOne({
@@ -252,8 +252,8 @@ export const getNewToken = async (req: Request, res: Response) => {
payload: {
userId: decodedToken.userId
},
expiresIn: JWT_AUTH_LIFETIME,
secret: JWT_AUTH_SECRET
expiresIn: getJwtAuthLifetime(),
secret: getJwtAuthSecret()
});
return res.status(200).send({

View File

@@ -2,13 +2,16 @@ import { Request, Response } from 'express';
import { Types } from 'mongoose';
import * as Sentry from '@sentry/node';
import {
Integration,
IntegrationAuth,
Bot
} from '../../models';
import { INTEGRATION_SET, INTEGRATION_OPTIONS } from '../../variables';
import { INTEGRATION_SET, getIntegrationOptions as getIntegrationOptionsFunc } from '../../variables';
import { IntegrationService } from '../../services';
import { getApps, revokeAccess } from '../../integrations';
import {
getApps,
getTeams,
revokeAccess
} from '../../integrations';
/***
* Return integration authorization with id [integrationAuthId]
@@ -36,9 +39,11 @@ export const getIntegrationAuth = async (req: Request, res: Response) => {
}
export const getIntegrationOptions = async (req: Request, res: Response) => {
return res.status(200).send({
integrationOptions: INTEGRATION_OPTIONS,
});
const INTEGRATION_OPTIONS = getIntegrationOptionsFunc();
return res.status(200).send({
integrationOptions: INTEGRATION_OPTIONS,
});
};
/**
@@ -154,25 +159,54 @@ export const saveIntegrationAccessToken = async (
* @returns
*/
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
let apps;
try {
apps = await getApps({
integrationAuth: req.integrationAuth,
accessToken: req.accessToken,
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get integration authorization applications",
});
}
let apps;
try {
const teamId = req.query.teamId as string;
apps = await getApps({
integrationAuth: req.integrationAuth,
accessToken: req.accessToken,
...teamId && { teamId }
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get integration authorization applications",
});
}
return res.status(200).send({
apps,
});
return res.status(200).send({
apps
});
};
/**
* Return list of teams allowed for integration with integration authorization id [integrationAuthId]
* @param req
* @param res
* @returns
*/
export const getIntegrationAuthTeams = async (req: Request, res: Response) => {
let teams;
try {
teams = await getTeams({
integrationAuth: req.integrationAuth,
accessToken: req.accessToken
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get integration authorization teams"
});
}
return res.status(200).send({
teams
});
}
/**
* Delete integration authorization with id [integrationAuthId]
* @param req

View File

@@ -2,10 +2,7 @@ import { Request, Response } from 'express';
import { Types } from 'mongoose';
import * as Sentry from '@sentry/node';
import {
Integration,
Workspace,
Bot,
BotKey
Integration
} from '../../models';
import { EventService } from '../../services';
import { eventPushSecrets } from '../../events';
@@ -18,6 +15,7 @@ import { eventPushSecrets } from '../../events';
*/
export const createIntegration = async (req: Request, res: Response) => {
let integration;
try {
const {
integrationAuthId,
@@ -34,19 +32,19 @@ export const createIntegration = async (req: Request, res: Response) => {
// TODO: validate [sourceEnvironment] and [targetEnvironment]
// initialize new integration after saving integration access token
integration = await new Integration({
workspace: req.integrationAuth.workspace._id,
environment: sourceEnvironment,
isActive,
app,
integration = await new Integration({
workspace: req.integrationAuth.workspace._id,
environment: sourceEnvironment,
isActive,
app,
appId,
targetEnvironment,
owner,
path,
region,
integration: req.integrationAuth.integration,
integrationAuth: new Types.ObjectId(integrationAuthId)
}).save();
integration: req.integrationAuth.integration,
integrationAuth: new Types.ObjectId(integrationAuthId)
}).save();
if (integration) {
// trigger event - push secrets

View File

@@ -1,13 +1,13 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import { Membership, MembershipOrg, User, Key, IMembership, Workspace } from '../../models';
import { Request, Response } from 'express';
import { Membership, MembershipOrg, User, Key } from '../../models';
import {
findMembership,
deleteMembership as deleteMember
} from '../../helpers/membership';
import { sendMail } from '../../helpers/nodemailer';
import { SITE_URL } from '../../config';
import { ADMIN, MEMBER, ACCEPTED } from '../../variables';
import { getSiteURL } from '../../config';
/**
* Check that user is a member of workspace with id [workspaceId]
@@ -215,7 +215,7 @@ export const inviteUserToWorkspace = async (req: Request, res: Response) => {
inviterFirstName: req.user.firstName,
inviterEmail: req.user.email,
workspaceName: req.membership.workspace.name,
callback_url: SITE_URL + '/login'
callback_url: getSiteURL() + '/login'
}
});
} catch (err) {

View File

@@ -1,6 +1,5 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import { SITE_URL, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
import { MembershipOrg, Organization, User } from '../../models';
import { deleteMembershipOrg as deleteMemberFromOrg } from '../../helpers/membershipOrg';
import { createToken } from '../../helpers/auth';
@@ -8,6 +7,7 @@ import { updateSubscriptionOrgQuantity } from '../../helpers/organization';
import { sendMail } from '../../helpers/nodemailer';
import { TokenService } from '../../services';
import { OWNER, ADMIN, MEMBER, ACCEPTED, INVITED, TOKEN_EMAIL_ORG_INVITATION } from '../../variables';
import { getSiteURL, getJwtSignupLifetime, getJwtSignupSecret, getSmtpConfigured } from '../../config';
/**
* Delete organization membership with id [membershipOrgId] from organization
@@ -99,9 +99,11 @@ export const changeMembershipOrgRole = async (req: Request, res: Response) => {
* @returns
*/
export const inviteUserToOrganization = async (req: Request, res: Response) => {
let invitee, inviteeMembershipOrg;
let invitee, inviteeMembershipOrg, completeInviteLink;
try {
const { organizationId, inviteeEmail } = req.body;
const host = req.headers.host;
const siteUrl = `${req.protocol}://${host}`;
// validate membership
const membershipOrg = await MembershipOrg.findOne({
@@ -178,9 +180,13 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
organizationName: organization.name,
email: inviteeEmail,
token,
callback_url: SITE_URL + '/signupinvite'
callback_url: getSiteURL() + '/signupinvite'
}
});
if (!getSmtpConfigured()) {
completeInviteLink = `${siteUrl + '/signupinvite'}?token=${token}&to=${inviteeEmail}`
}
}
await updateSubscriptionOrgQuantity({ organizationId });
@@ -193,7 +199,8 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
}
return res.status(200).send({
message: `Sent an invite link to ${req.body.inviteeEmail}`
message: `Sent an invite link to ${req.body.inviteeEmail}`,
completeInviteLink
});
};
@@ -218,7 +225,7 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
if (!membershipOrg)
throw new Error('Failed to find any invitations for email');
await TokenService.validateToken({
type: TOKEN_EMAIL_ORG_INVITATION,
email,
@@ -250,8 +257,8 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
payload: {
userId: user._id.toString()
},
expiresIn: JWT_SIGNUP_LIFETIME,
secret: JWT_SIGNUP_SECRET
expiresIn: getJwtSignupLifetime(),
secret: getJwtSignupSecret()
});
} catch (err) {
Sentry.setUser(null);

View File

@@ -1,26 +1,18 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import {
SITE_URL,
STRIPE_SECRET_KEY
} from '../../config';
import { Request, Response } from 'express';
import Stripe from 'stripe';
const stripe = new Stripe(STRIPE_SECRET_KEY, {
apiVersion: '2022-08-01'
});
import {
Membership,
MembershipOrg,
Organization,
Workspace,
IncidentContactOrg,
IMembershipOrg
IncidentContactOrg
} from '../../models';
import { createOrganization as create } from '../../helpers/organization';
import { addMembershipsOrg } from '../../helpers/membershipOrg';
import { OWNER, ACCEPTED } from '../../variables';
import _ from 'lodash';
import { getStripeSecretKey, getSiteURL } from '../../config';
export const getOrganizations = async (req: Request, res: Response) => {
let organizations;
@@ -325,6 +317,10 @@ export const createOrganizationPortalSession = async (
) => {
let session;
try {
const stripe = new Stripe(getStripeSecretKey(), {
apiVersion: '2022-08-01'
});
// check if there is a payment method on file
const paymentMethods = await stripe.paymentMethods.list({
customer: req.membershipOrg.organization.customerId,
@@ -337,13 +333,13 @@ export const createOrganizationPortalSession = async (
customer: req.membershipOrg.organization.customerId,
mode: 'setup',
payment_method_types: ['card'],
success_url: SITE_URL + '/dashboard',
cancel_url: SITE_URL + '/dashboard'
success_url: getSiteURL() + '/dashboard',
cancel_url: getSiteURL() + '/dashboard'
});
} else {
session = await stripe.billingPortal.sessions.create({
customer: req.membershipOrg.organization.customerId,
return_url: SITE_URL + '/dashboard'
return_url: getSiteURL() + '/dashboard'
});
}
@@ -369,6 +365,10 @@ export const getOrganizationSubscriptions = async (
) => {
let subscriptions;
try {
const stripe = new Stripe(getStripeSecretKey(), {
apiVersion: '2022-08-01'
});
subscriptions = await stripe.subscriptions.list({
customer: req.membershipOrg.organization.customerId
});

View File

@@ -7,9 +7,9 @@ import { User, BackupPrivateKey, LoginSRPDetail } from '../../models';
import { createToken } from '../../helpers/auth';
import { sendMail } from '../../helpers/nodemailer';
import { TokenService } from '../../services';
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, SITE_URL } from '../../config';
import { TOKEN_EMAIL_PASSWORD_RESET } from '../../variables';
import { BadRequestError } from '../../utils/errors';
import { getSiteURL, getJwtSignupLifetime, getJwtSignupSecret } from '../../config';
/**
* Password reset step 1: Send email verification link to email [email]
@@ -44,7 +44,7 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
substitutions: {
email,
token,
callback_url: SITE_URL + '/password-reset'
callback_url: getSiteURL() + '/password-reset'
}
});
} catch (err) {
@@ -91,8 +91,8 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
payload: {
userId: user._id.toString()
},
expiresIn: JWT_SIGNUP_LIFETIME,
secret: JWT_SIGNUP_SECRET
expiresIn: getJwtSignupLifetime(),
secret: getJwtSignupSecret()
});
} catch (err) {
Sentry.setUser(null);

View File

@@ -9,7 +9,7 @@ import {
import { pushKeys } from '../../helpers/key';
import { eventPushSecrets } from '../../events';
import { EventService } from '../../services';
import { postHogClient } from '../../services';
import { getPostHogClient } from '../../services';
interface PushSecret {
ciphertextKey: string;
@@ -38,6 +38,7 @@ export const pushSecrets = async (req: Request, res: Response) => {
// upload (encrypted) secrets to workspace with id [workspaceId]
try {
const postHogClient = getPostHogClient();
let { secrets }: { secrets: PushSecret[] } = req.body;
const { keys, environment, channel } = req.body;
const { workspaceId } = req.params;
@@ -111,6 +112,7 @@ export const pullSecrets = async (req: Request, res: Response) => {
let secrets;
let key;
try {
const postHogClient = getPostHogClient();
const environment: string = req.query.environment as string;
const channel: string = req.query.channel as string;
const { workspaceId } = req.params;
@@ -179,6 +181,7 @@ export const pullSecretsServiceToken = async (req: Request, res: Response) => {
let secrets;
let key;
try {
const postHogClient = getPostHogClient();
const environment: string = req.query.environment as string;
const channel: string = req.query.channel as string;
const { workspaceId } = req.params;

View File

@@ -1,7 +1,7 @@
import { Request, Response } from 'express';
import { ServiceToken } from '../../models';
import { createToken } from '../../helpers/auth';
import { JWT_SERVICE_SECRET } from '../../config';
import { getJwtServiceSecret } from '../../config';
/**
* Return service token on request
@@ -61,7 +61,7 @@ export const createServiceToken = async (req: Request, res: Response) => {
workspaceId
},
expiresIn: expiresIn,
secret: JWT_SERVICE_SECRET
secret: getJwtServiceSecret()
});
} catch (err) {
return res.status(400).send({

View File

@@ -1,13 +1,13 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import { User } from '../../models';
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, INVITE_ONLY_SIGNUP } from '../../config';
import {
sendEmailVerification,
checkEmailVerification,
} from '../../helpers/signup';
import { createToken } from '../../helpers/auth';
import { BadRequestError } from '../../utils/errors';
import { getInviteOnlySignup, getJwtSignupLifetime, getJwtSignupSecret, getSmtpConfigured } from '../../config';
/**
* Signup step 1: Initialize account for user under email [email] and send a verification code
@@ -21,7 +21,7 @@ export const beginEmailSignup = async (req: Request, res: Response) => {
try {
email = req.body.email;
if (INVITE_ONLY_SIGNUP) {
if (getInviteOnlySignup()) {
// Only one user can create an account without being invited. The rest need to be invited in order to make an account
const userCount = await User.countDocuments({})
if (userCount != 0) {
@@ -66,7 +66,7 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
const { email, code } = req.body;
// initialize user account
user = await User.findOne({ email });
user = await User.findOne({ email }).select('+publicKey');
if (user && user?.publicKey) {
// case: user has already completed account
return res.status(403).send({
@@ -75,10 +75,12 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
}
// verify email
await checkEmailVerification({
email,
code
});
if (getSmtpConfigured()) {
await checkEmailVerification({
email,
code
});
}
if (!user) {
user = await new User({
@@ -91,8 +93,8 @@ export const verifyEmailSignup = async (req: Request, res: Response) => {
payload: {
userId: user._id.toString()
},
expiresIn: JWT_SIGNUP_LIFETIME,
secret: JWT_SIGNUP_SECRET
expiresIn: getJwtSignupLifetime(),
secret: getJwtSignupSecret()
});
} catch (err) {
Sentry.setUser(null);

View File

@@ -1,10 +1,7 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import Stripe from 'stripe';
import { STRIPE_SECRET_KEY, STRIPE_WEBHOOK_SECRET } from '../../config';
const stripe = new Stripe(STRIPE_SECRET_KEY, {
apiVersion: '2022-08-01'
});
import { getStripeSecretKey, getStripeWebhookSecret } from '../../config';
/**
* Handle service provisioning/un-provisioning via Stripe
@@ -16,11 +13,15 @@ export const handleWebhook = async (req: Request, res: Response) => {
let event;
try {
// check request for valid stripe signature
const stripe = new Stripe(getStripeSecretKey(), {
apiVersion: '2022-08-01'
});
const sig = req.headers['stripe-signature'] as string;
event = stripe.webhooks.constructEvent(
req.body,
sig,
STRIPE_WEBHOOK_SECRET // ?
getStripeWebhookSecret()
);
} catch (err) {
Sentry.setUser({ email: req.user.email });

View File

@@ -1,13 +1,11 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import { Request, Response } from 'express';
import crypto from 'crypto';
import bcrypt from 'bcrypt';
import {
APIKeyData
} from '../../models';
import {
SALT_ROUNDS
} from '../../config';
import { getSaltRounds } from '../../config';
/**
* Return API key data for user with id [req.user_id]
@@ -45,7 +43,7 @@ export const createAPIKeyData = async (req: Request, res: Response) => {
const { name, expiresIn } = req.body;
const secret = crypto.randomBytes(16).toString('hex');
const secretHash = await bcrypt.hash(secret, SALT_ROUNDS);
const secretHash = await bcrypt.hash(secret, getSaltRounds());
const expiresAt = new Date();
expiresAt.setSeconds(expiresAt.getSeconds() + expiresIn);

View File

@@ -10,17 +10,17 @@ import { checkUserDevice } from '../../helpers/user';
import { sendMail } from '../../helpers/nodemailer';
import { TokenService } from '../../services';
import { EELogService } from '../../ee/services';
import {
NODE_ENV,
JWT_MFA_LIFETIME,
JWT_MFA_SECRET
} from '../../config';
import { BadRequestError, InternalServerError } from '../../utils/errors';
import {
TOKEN_EMAIL_MFA,
ACTION_LOGIN
} from '../../variables';
import { getChannelFromUserAgent } from '../../utils/posthog'; // TODO: move this
import {
getNodeEnv,
getJwtMfaLifetime,
getJwtMfaSecret
} from '../../config';
declare module 'jsonwebtoken' {
export interface UserIDJwtPayload extends jwt.JwtPayload {
@@ -28,8 +28,6 @@ declare module 'jsonwebtoken' {
}
}
const clientPublicKeys: any = {};
/**
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
* @param req
@@ -89,7 +87,7 @@ export const login1 = async (req: Request, res: Response) => {
*/
export const login2 = async (req: Request, res: Response) => {
try {
if (!req.headers['user-agent']) throw InternalServerError({ message: 'User-Agent header is required' });
const { email, clientProof } = req.body;
@@ -126,15 +124,15 @@ export const login2 = async (req: Request, res: Response) => {
payload: {
userId: user._id.toString()
},
expiresIn: JWT_MFA_LIFETIME,
secret: JWT_MFA_SECRET
expiresIn: getJwtMfaLifetime(),
secret: getJwtMfaSecret()
});
const code = await TokenService.createToken({
type: TOKEN_EMAIL_MFA,
email
});
// send MFA code [code] to [email]
await sendMail({
template: 'emailMfa.handlebars',
@@ -144,13 +142,13 @@ export const login2 = async (req: Request, res: Response) => {
code
}
});
return res.status(200).send({
mfaEnabled: true,
token
});
}
await checkUserDevice({
user,
ip: req.ip,
@@ -165,7 +163,7 @@ export const login2 = async (req: Request, res: Response) => {
httpOnly: true,
path: '/',
sameSite: 'strict',
secure: NODE_ENV === 'production' ? true : false
secure: getNodeEnv() === 'production' ? true : false
});
// case: user does not have MFA enablgged
@@ -183,7 +181,7 @@ export const login2 = async (req: Request, res: Response) => {
iv?: string;
tag?: string;
}
const response: ResponseData = {
mfaEnabled: false,
encryptionVersion: user.encryptionVersion,
@@ -193,7 +191,7 @@ export const login2 = async (req: Request, res: Response) => {
iv: user.iv,
tag: user.tag
}
if (
user?.protectedKey &&
user?.protectedKeyIV &&
@@ -208,14 +206,14 @@ export const login2 = async (req: Request, res: Response) => {
name: ACTION_LOGIN,
userId: user._id
});
loginAction && await EELogService.createLog({
userId: user._id,
actions: [loginAction],
channel: getChannelFromUserAgent(req.headers['user-agent']),
ipAddress: req.ip
});
return res.status(200).send(response);
}
@@ -246,7 +244,7 @@ export const sendMfaToken = async (req: Request, res: Response) => {
type: TOKEN_EMAIL_MFA,
email
});
// send MFA code [code] to [email]
await sendMail({
template: 'emailMfa.handlebars',
@@ -261,9 +259,9 @@ export const sendMfaToken = async (req: Request, res: Response) => {
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to send MFA code'
});
});
}
return res.status(200).send({
message: 'Successfully sent new MFA code'
});
@@ -276,76 +274,87 @@ export const sendMfaToken = async (req: Request, res: Response) => {
* @param res
*/
export const verifyMfaToken = async (req: Request, res: Response) => {
const { email, mfaToken } = req.body;
const { email, mfaToken } = req.body;
await TokenService.validateToken({
type: TOKEN_EMAIL_MFA,
email,
token: mfaToken
});
await TokenService.validateToken({
type: TOKEN_EMAIL_MFA,
email,
token: mfaToken
});
const user = await User.findOne({
email
}).select('+salt +verifier +encryptionVersion +protectedKey +protectedKeyIV +protectedKeyTag +publicKey +encryptedPrivateKey +iv +tag');
const user = await User.findOne({
email
}).select('+salt +verifier +encryptionVersion +protectedKey +protectedKeyIV +protectedKeyTag +publicKey +encryptedPrivateKey +iv +tag');
if (!user) throw new Error('Failed to find user');
if (!user) throw new Error('Failed to find user');
await checkUserDevice({
user,
ip: req.ip,
userAgent: req.headers['user-agent'] ?? ''
});
await checkUserDevice({
user,
ip: req.ip,
userAgent: req.headers['user-agent'] ?? ''
});
// issue tokens
const tokens = await issueAuthTokens({ userId: user._id.toString() });
// issue tokens
const tokens = await issueAuthTokens({ userId: user._id.toString() });
// store (refresh) token in httpOnly cookie
res.cookie('jid', tokens.refreshToken, {
httpOnly: true,
path: '/',
sameSite: 'strict',
secure: NODE_ENV === 'production' ? true : false
});
interface VerifyMfaTokenRes {
encryptionVersion: number;
protectedKey?: string;
protectedKeyIV?: string;
protectedKeyTag?: string;
token: string;
publicKey: string;
encryptedPrivateKey: string;
iv: string;
tag: string;
}
// store (refresh) token in httpOnly cookie
res.cookie('jid', tokens.refreshToken, {
httpOnly: true,
path: '/',
sameSite: 'strict',
secure: getNodeEnv() === 'production' ? true : false
});
const resObj: VerifyMfaTokenRes = {
encryptionVersion: user.encryptionVersion,
token: tokens.token,
publicKey: user.publicKey as string,
encryptedPrivateKey: user.encryptedPrivateKey as string,
iv: user.iv as string,
tag: user.tag as string
}
if (user?.protectedKey && user?.protectedKeyIV && user?.protectedKeyTag) {
resObj.protectedKey = user.protectedKey;
resObj.protectedKeyIV = user.protectedKeyIV;
resObj.protectedKeyTag = user.protectedKeyTag;
}
interface VerifyMfaTokenRes {
encryptionVersion: number;
protectedKey?: string;
protectedKeyIV?: string;
protectedKeyTag?: string;
token: string;
publicKey: string;
encryptedPrivateKey: string;
iv: string;
tag: string;
}
const loginAction = await EELogService.createAction({
name: ACTION_LOGIN,
userId: user._id
});
loginAction && await EELogService.createLog({
userId: user._id,
actions: [loginAction],
channel: getChannelFromUserAgent(req.headers['user-agent']),
ipAddress: req.ip
});
interface VerifyMfaTokenRes {
encryptionVersion: number;
protectedKey?: string;
protectedKeyIV?: string;
protectedKeyTag?: string;
token: string;
publicKey: string;
encryptedPrivateKey: string;
iv: string;
tag: string;
}
return res.status(200).send(resObj);
const resObj: VerifyMfaTokenRes = {
encryptionVersion: user.encryptionVersion,
token: tokens.token,
publicKey: user.publicKey as string,
encryptedPrivateKey: user.encryptedPrivateKey as string,
iv: user.iv as string,
tag: user.tag as string
}
if (user?.protectedKey && user?.protectedKeyIV && user?.protectedKeyTag) {
resObj.protectedKey = user.protectedKey;
resObj.protectedKeyIV = user.protectedKeyIV;
resObj.protectedKeyTag = user.protectedKeyTag;
}
const loginAction = await EELogService.createAction({
name: ACTION_LOGIN,
userId: user._id
});
loginAction && await EELogService.createLog({
userId: user._id,
actions: [loginAction],
channel: getChannelFromUserAgent(req.headers['user-agent']),
ipAddress: req.ip
});
return res.status(200).send(resObj);
}

View File

@@ -7,7 +7,7 @@ const { ValidationError } = mongoose.Error;
import { BadRequestError, InternalServerError, UnauthorizedRequestError, ValidationError as RouteValidationError } from '../../utils/errors';
import { AnyBulkWriteOperation } from 'mongodb';
import { SECRET_PERSONAL, SECRET_SHARED } from "../../variables";
import { postHogClient } from '../../services';
import { getPostHogClient } from '../../services';
/**
* Create secret for workspace with id [workspaceId] and environment [environment]
@@ -15,6 +15,7 @@ import { postHogClient } from '../../services';
* @param res
*/
export const createSecret = async (req: Request, res: Response) => {
const postHogClient = getPostHogClient();
const secretToCreate: CreateSecretRequestBody = req.body.secret;
const { workspaceId, environment } = req.params
const sanitizedSecret: SanitizedSecretForCreate = {
@@ -67,6 +68,7 @@ export const createSecret = async (req: Request, res: Response) => {
* @param res
*/
export const createSecrets = async (req: Request, res: Response) => {
const postHogClient = getPostHogClient();
const secretsToCreate: CreateSecretRequestBody[] = req.body.secrets;
const { workspaceId, environment } = req.params
const sanitizedSecretesToCreate: SanitizedSecretForCreate[] = []
@@ -128,6 +130,7 @@ export const createSecrets = async (req: Request, res: Response) => {
* @param res
*/
export const deleteSecrets = async (req: Request, res: Response) => {
const postHogClient = getPostHogClient();
const { workspaceId, environmentName } = req.params
const secretIdsToDelete: string[] = req.body.secretIds
@@ -181,6 +184,7 @@ export const deleteSecrets = async (req: Request, res: Response) => {
* @param res
*/
export const deleteSecret = async (req: Request, res: Response) => {
const postHogClient = getPostHogClient();
await Secret.findByIdAndDelete(req._secret._id)
if (postHogClient) {
@@ -209,6 +213,7 @@ export const deleteSecret = async (req: Request, res: Response) => {
* @returns
*/
export const updateSecrets = async (req: Request, res: Response) => {
const postHogClient = getPostHogClient();
const { workspaceId, environmentName } = req.params
const secretsModificationsRequested: ModifySecretRequestBody[] = req.body.secrets;
const [secretIdsUserCanModifyError, secretIdsUserCanModify] = await to(Secret.find({ workspace: workspaceId, environment: environmentName }, { _id: 1 }).then())
@@ -276,6 +281,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
* @returns
*/
export const updateSecret = async (req: Request, res: Response) => {
const postHogClient = getPostHogClient();
const { workspaceId, environmentName } = req.params
const secretModificationsRequested: ModifySecretRequestBody = req.body.secret;
@@ -329,6 +335,7 @@ export const updateSecret = async (req: Request, res: Response) => {
* @returns
*/
export const getSecrets = async (req: Request, res: Response) => {
const postHogClient = getPostHogClient();
const { environment } = req.query;
const { workspaceId } = req.params;

View File

@@ -15,14 +15,14 @@ import { UnauthorizedRequestError, ValidationError } from '../../utils/errors';
import { EventService } from '../../services';
import { eventPushSecrets } from '../../events';
import { EESecretService, EELogService } from '../../ee/services';
import { postHogClient } from '../../services';
import { getPostHogClient } from '../../services';
import { getChannelFromUserAgent } from '../../utils/posthog';
import { ABILITY_READ, ABILITY_WRITE } from '../../variables/organization';
import { userHasNoAbility, userHasWorkspaceAccess, userHasWriteOnlyAbility } from '../../ee/helpers/checkMembershipPermissions';
import Tag from '../../models/tag';
import _ from 'lodash';
import {
BatchSecretRequest,
BatchSecretRequest,
BatchSecret
} from '../../types/secret';
@@ -33,6 +33,8 @@ import {
*/
export const batchSecrets = async (req: Request, res: Response) => {
const channel = getChannelFromUserAgent(req.headers['user-agent']);
const postHogClient = getPostHogClient();
const {
workspaceId,
environment,
@@ -41,13 +43,13 @@ export const batchSecrets = async (req: Request, res: Response) => {
workspaceId: string;
environment: string;
requests: BatchSecretRequest[];
}= req.body;
} = req.body;
const createSecrets: BatchSecret[] = [];
const updateSecrets: BatchSecret[] = [];
const deleteSecrets: Types.ObjectId[] = [];
const actions: IAction[] = [];
requests.forEach((request) => {
switch (request.method) {
case 'POST':
@@ -70,7 +72,7 @@ export const batchSecrets = async (req: Request, res: Response) => {
break;
}
});
// handle create secrets
let createdSecrets: ISecret[] = [];
if (createSecrets.length > 0) {
@@ -109,18 +111,18 @@ export const batchSecrets = async (req: Request, res: Response) => {
});
}
}
// handle update secrets
let updatedSecrets: ISecret[] = [];
if (updateSecrets.length > 0 && req.secrets) {
// construct object containing all secrets
let listedSecretsObj: {
[key: string]: {
[key: string]: {
version: number;
type: string;
}
} = {};
listedSecretsObj = req.secrets.reduce((obj: any, secret: ISecret) => ({
...obj,
[secret._id.toString()]: secret
@@ -140,7 +142,7 @@ export const batchSecrets = async (req: Request, res: Response) => {
}));
await Secret.bulkWrite(updateOperations);
const secretVersions = updateSecrets.map((u) => ({
secret: new Types.ObjectId(u._id),
version: listedSecretsObj[u._id.toString()].version,
@@ -227,7 +229,7 @@ export const batchSecrets = async (req: Request, res: Response) => {
});
}
}
if (actions.length > 0) {
// (EE) create (audit) log
await EELogService.createLog({
@@ -250,7 +252,7 @@ export const batchSecrets = async (req: Request, res: Response) => {
await EESecretService.takeSecretSnapshot({
workspaceId
});
const resObj: { [key: string]: ISecret[] | string[] } = {}
if (createSecrets.length > 0) {
@@ -260,11 +262,11 @@ export const batchSecrets = async (req: Request, res: Response) => {
if (updateSecrets.length > 0) {
resObj['updatedSecrets'] = updatedSecrets;
}
if (deleteSecrets.length > 0) {
resObj['deletedSecrets'] = deleteSecrets.map((d) => d.toString());
}
return res.status(200).send(resObj);
}
@@ -326,6 +328,7 @@ export const createSecrets = async (req: Request, res: Response) => {
}
}
*/
const postHogClient = getPostHogClient();
const channel = getChannelFromUserAgent(req.headers['user-agent'])
const { workspaceId, environment }: { workspaceId: string, environment: string } = req.body;
@@ -358,9 +361,25 @@ export const createSecrets = async (req: Request, res: Response) => {
tags: string[]
}
const newlyCreatedSecrets = await Secret.insertMany(
listOfSecretsToCreate.map(({
const secretsToInsert: ISecret[] = listOfSecretsToCreate.map(({
type,
secretKeyCiphertext,
secretKeyIV,
secretKeyTag,
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag,
tags
}: secretsToCreateType) => {
return ({
version: 1,
workspace: new Types.ObjectId(workspaceId),
type,
user: type === SECRET_PERSONAL ? req.user : undefined,
environment,
secretKeyCiphertext,
secretKeyIV,
secretKeyTag,
@@ -371,26 +390,10 @@ export const createSecrets = async (req: Request, res: Response) => {
secretCommentIV,
secretCommentTag,
tags
}: secretsToCreateType) => {
return ({
version: 1,
workspace: new Types.ObjectId(workspaceId),
type,
user: type === SECRET_PERSONAL ? req.user : undefined,
environment,
secretKeyCiphertext,
secretKeyIV,
secretKeyTag,
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag,
tags
});
})
);
});
})
const newlyCreatedSecrets: ISecret[] = (await Secret.insertMany(secretsToInsert)).map((insertedSecret) => insertedSecret.toObject());
setTimeout(async () => {
// trigger event - push secrets
@@ -530,6 +533,7 @@ export const getSecrets = async (req: Request, res: Response) => {
}
*/
const postHogClient = getPostHogClient();
const { workspaceId, environment, tagSlugs } = req.query;
const tagNamesList = typeof tagSlugs === 'string' && tagSlugs !== '' ? tagSlugs.split(',') : [];
@@ -732,6 +736,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
}
}
*/
const postHogClient = getPostHogClient();
const channel = req.headers?.['user-agent']?.toLowerCase().includes('mozilla') ? 'web' : 'cli';
// TODO: move type
@@ -953,6 +958,7 @@ export const deleteSecrets = async (req: Request, res: Response) => {
}
}
*/
const postHogClient = getPostHogClient();
const channel = getChannelFromUserAgent(req.headers['user-agent'])
const toDelete = req.secrets.map((s: any) => s._id);

View File

@@ -1,15 +1,13 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import { Request, Response } from 'express';
import crypto from 'crypto';
import bcrypt from 'bcrypt';
import {
ServiceTokenData
} from '../../models';
import {
SALT_ROUNDS
} from '../../config';
import { userHasWorkspaceAccess } from '../../ee/helpers/checkMembershipPermissions';
import { ABILITY_READ } from '../../variables/organization';
import { getSaltRounds } from '../../config';
/**
* Return service token data associated with service token on request
@@ -17,7 +15,35 @@ import { ABILITY_READ } from '../../variables/organization';
* @param res
* @returns
*/
export const getServiceTokenData = async (req: Request, res: Response) => res.status(200).json(req.serviceTokenData);
export const getServiceTokenData = async (req: Request, res: Response) => {
/*
#swagger.summary = 'Return Infisical Token data'
#swagger.description = 'Return Infisical Token data'
#swagger.security = [{
"bearerAuth": []
}]
#swagger.responses[200] = {
content: {
"application/json": {
"schema": {
"type": "object",
"properties": {
"serviceTokenData": {
"type": "object",
$ref: "#/components/schemas/ServiceTokenData",
"description": "Details of service token"
}
}
}
}
}
}
*/
return res.status(200).json(req.serviceTokenData);
}
/**
* Create new service token data for workspace with id [workspaceId] and
@@ -28,6 +54,7 @@ export const getServiceTokenData = async (req: Request, res: Response) => res.st
*/
export const createServiceTokenData = async (req: Request, res: Response) => {
let serviceToken, serviceTokenData;
try {
const {
name,
@@ -36,7 +63,8 @@ export const createServiceTokenData = async (req: Request, res: Response) => {
encryptedKey,
iv,
tag,
expiresIn
expiresIn,
permissions
} = req.body;
const hasAccess = await userHasWorkspaceAccess(req.user, workspaceId, environment, ABILITY_READ)
@@ -45,7 +73,7 @@ export const createServiceTokenData = async (req: Request, res: Response) => {
}
const secret = crypto.randomBytes(16).toString('hex');
const secretHash = await bcrypt.hash(secret, SALT_ROUNDS);
const secretHash = await bcrypt.hash(secret, getSaltRounds());
const expiresAt = new Date();
expiresAt.setSeconds(expiresAt.getSeconds() + expiresIn);
@@ -59,7 +87,8 @@ export const createServiceTokenData = async (req: Request, res: Response) => {
secretHash,
encryptedKey,
iv,
tag
tag,
permissions
}).save();
// return service token data without sensitive data
@@ -111,4 +140,4 @@ export const deleteServiceTokenData = async (req: Request, res: Response) => {
function UnauthorizedRequestError(arg0: { message: string; }) {
throw new Error('Function not implemented.');
}
}

View File

@@ -7,8 +7,8 @@ import {
} from '../../helpers/signup';
import { issueAuthTokens } from '../../helpers/auth';
import { INVITED, ACCEPTED } from '../../variables';
import { NODE_ENV } from '../../config';
import axios from 'axios';
import request from '../../config/request';
import { getNodeEnv, getLoopsApiKey } from '../../config';
/**
* Complete setting up user by adding their personal and auth information as part of the
@@ -108,8 +108,8 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
token = tokens.token;
// sending a welcome email to new users
if (process.env.LOOPS_API_KEY) {
await axios.post("https://app.loops.so/api/v1/events/send", {
if (getLoopsApiKey()) {
await request.post("https://app.loops.so/api/v1/events/send", {
"email": email,
"eventName": "Sign Up",
"firstName": firstName,
@@ -117,7 +117,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
}, {
headers: {
"Accept": "application/json",
"Authorization": "Bearer " + process.env.LOOPS_API_KEY
"Authorization": "Bearer " + getLoopsApiKey()
},
});
}
@@ -127,7 +127,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
httpOnly: true,
path: '/',
sameSite: 'strict',
secure: NODE_ENV === 'production' ? true : false
secure: getNodeEnv() === 'production' ? true : false
});
} catch (err) {
Sentry.setUser(null);
@@ -232,7 +232,7 @@ export const completeAccountInvite = async (req: Request, res: Response) => {
httpOnly: true,
path: '/',
sameSite: 'strict',
secure: NODE_ENV === 'production' ? true : false
secure: getNodeEnv() === 'production' ? true : false
});
} catch (err) {
Sentry.setUser(null);

View File

@@ -41,7 +41,7 @@ export const getMe = async (req: Request, res: Response) => {
try {
user = await User
.findById(req.user._id)
.select('+publicKey +encryptedPrivateKey +iv +tag');
.select('+salt +publicKey +encryptedPrivateKey +iv +tag +encryptionVersion +protectedKey +protectedKeyIV +protectedKeyTag');
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);

View File

@@ -19,7 +19,7 @@ import {
reformatPullSecrets
} from '../../helpers/secret';
import { pushKeys } from '../../helpers/key';
import { postHogClient, EventService } from '../../services';
import { getPostHogClient, EventService } from '../../services';
import { eventPushSecrets } from '../../events';
interface V2PushSecret {
@@ -48,6 +48,7 @@ interface V2PushSecret {
export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
// upload (encrypted) secrets to workspace with id [workspaceId]
try {
const postHogClient = getPostHogClient();
let { secrets }: { secrets: V2PushSecret[] } = req.body;
const { keys, environment, channel } = req.body;
const { workspaceId } = req.params;
@@ -121,6 +122,7 @@ export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
export const pullSecrets = async (req: Request, res: Response) => {
let secrets;
try {
const postHogClient = getPostHogClient();
const environment: string = req.query.environment as string;
const channel: string = req.query.channel as string;
const { workspaceId } = req.params;

View File

@@ -1,10 +1,7 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import { Request, Response } from 'express';
import Stripe from 'stripe';
import { STRIPE_SECRET_KEY, STRIPE_WEBHOOK_SECRET } from '../../../config';
const stripe = new Stripe(STRIPE_SECRET_KEY, {
apiVersion: '2022-08-01'
});
import { getStripeSecretKey, getStripeWebhookSecret } from '../../../config';
/**
* Handle service provisioning/un-provisioning via Stripe
@@ -15,12 +12,16 @@ const stripe = new Stripe(STRIPE_SECRET_KEY, {
export const handleWebhook = async (req: Request, res: Response) => {
let event;
try {
const stripe = new Stripe(getStripeSecretKey(), {
apiVersion: '2022-08-01'
});
// check request for valid stripe signature
const sig = req.headers['stripe-signature'] as string;
event = stripe.webhooks.constructEvent(
req.body,
sig,
STRIPE_WEBHOOK_SECRET // ?
getStripeWebhookSecret()
);
} catch (err) {
Sentry.setUser({ email: req.user.email });

View File

@@ -1,5 +1,3 @@
import { LICENSE_KEY } from '../../config';
/**
* Class to handle Enterprise Edition license actions
*/
@@ -16,4 +14,4 @@ class EELicenseService {
}
}
export default new EELicenseService(LICENSE_KEY);
export default new EELicenseService('N/A');

View File

@@ -1,17 +1,12 @@
import jwt from 'jsonwebtoken';
import * as Sentry from '@sentry/node';
import jwt from 'jsonwebtoken';
import bcrypt from 'bcrypt';
import {
IUser,
User,
ServiceTokenData,
APIKeyData
} from '../models';
import {
JWT_AUTH_LIFETIME,
JWT_AUTH_SECRET,
JWT_REFRESH_LIFETIME,
JWT_REFRESH_SECRET
} from '../config';
import {
AccountNotFoundError,
ServiceTokenDataNotFoundError,
@@ -19,6 +14,12 @@ import {
UnauthorizedRequestError,
BadRequestError
} from '../utils/errors';
import {
getJwtAuthLifetime,
getJwtAuthSecret,
getJwtRefreshLifetime,
getJwtRefreshSecret
} from '../config';
/**
*
@@ -92,7 +93,7 @@ const getAuthUserPayload = async ({
let user;
try {
const decodedToken = <jwt.UserIDJwtPayload>(
jwt.verify(authTokenValue, JWT_AUTH_SECRET)
jwt.verify(authTokenValue, getJwtAuthSecret())
);
user = await User.findOne({
@@ -148,7 +149,10 @@ const getAuthSTDPayload = async ({
serviceTokenData = await ServiceTokenData
.findById(TOKEN_IDENTIFIER)
.select('+encryptedKey +iv +tag').populate('user');
.select('+encryptedKey +iv +tag')
.populate<{user: IUser}>('user');
if (!serviceTokenData) throw ServiceTokenDataNotFoundError({ message: 'Failed to find service token data' });
} catch (err) {
throw UnauthorizedRequestError({
@@ -220,16 +224,16 @@ const issueAuthTokens = async ({ userId }: { userId: string }) => {
payload: {
userId
},
expiresIn: JWT_AUTH_LIFETIME,
secret: JWT_AUTH_SECRET
expiresIn: getJwtAuthLifetime(),
secret: getJwtAuthSecret()
});
refreshToken = createToken({
payload: {
userId
},
expiresIn: JWT_REFRESH_LIFETIME,
secret: JWT_REFRESH_SECRET
expiresIn: getJwtRefreshLifetime(),
secret: getJwtRefreshSecret()
});
} catch (err) {
Sentry.setUser(null);

View File

@@ -12,8 +12,8 @@ import {
decryptSymmetric,
decryptAsymmetric
} from '../utils/crypto';
import { ENCRYPTION_KEY } from '../config';
import { SECRET_SHARED } from '../variables';
import { getEncryptionKey } from '../config';
/**
* Create an inactive bot with name [name] for workspace with id [workspaceId]
@@ -33,7 +33,7 @@ const createBot = async ({
const { publicKey, privateKey } = generateKeyPair();
const { ciphertext, iv, tag } = encryptSymmetric({
plaintext: privateKey,
key: ENCRYPTION_KEY
key: getEncryptionKey()
});
bot = await new Bot({
@@ -130,7 +130,7 @@ const getKey = async ({ workspaceId }: { workspaceId: string }) => {
ciphertext: bot.encryptedPrivateKey,
iv: bot.iv,
tag: bot.tag,
key: ENCRYPTION_KEY
key: getEncryptionKey()
});
key = decryptAsymmetric({

View File

@@ -29,6 +29,23 @@ const initDatabaseHelper = async ({
return mongoose.connection;
}
/**
* Close database conection
*/
const closeDatabaseHelper = async () => {
return Promise.all([
new Promise((resolve) => {
if (mongoose.connection && mongoose.connection.readyState == 1) {
mongoose.connection.close()
.then(() => resolve('Database connection closed'));
} else {
resolve('Database connection already closed');
}
})
]);
}
export {
initDatabaseHelper
initDatabaseHelper,
closeDatabaseHelper
}

View File

@@ -229,7 +229,7 @@ const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrati
// access token is expired
const refreshToken = await getIntegrationAuthRefreshHelper({ integrationAuthId });
accessToken = await exchangeRefresh({
integration: integrationAuth.integration,
integrationAuth,
refreshToken
});
}

View File

@@ -1,9 +1,9 @@
import * as Sentry from '@sentry/node';
import fs from 'fs';
import path from 'path';
import handlebars from 'handlebars';
import nodemailer from 'nodemailer';
import { SMTP_FROM_NAME, SMTP_FROM_ADDRESS } from '../config';
import * as Sentry from '@sentry/node';
import { getSmtpFromName, getSmtpFromAddress, getSmtpConfigured } from '../config';
let smtpTransporter: nodemailer.Transporter;
@@ -25,23 +25,25 @@ const sendMail = async ({
recipients: string[];
substitutions: any;
}) => {
try {
const html = fs.readFileSync(
path.resolve(__dirname, '../templates/' + template),
'utf8'
);
const temp = handlebars.compile(html);
const htmlToSend = temp(substitutions);
if (getSmtpConfigured()) {
try {
const html = fs.readFileSync(
path.resolve(__dirname, '../templates/' + template),
'utf8'
);
const temp = handlebars.compile(html);
const htmlToSend = temp(substitutions);
await smtpTransporter.sendMail({
from: `"${SMTP_FROM_NAME}" <${SMTP_FROM_ADDRESS}>`,
to: recipients.join(', '),
subject: subjectLine,
html: htmlToSend
});
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
await smtpTransporter.sendMail({
from: `"${getSmtpFromName()}" <${getSmtpFromAddress()}>`,
to: recipients.join(', '),
subject: subjectLine,
html: htmlToSend
});
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
}
}
};

View File

@@ -1,23 +1,14 @@
import * as Sentry from '@sentry/node';
import Stripe from 'stripe';
import {
STRIPE_SECRET_KEY,
STRIPE_PRODUCT_STARTER,
STRIPE_PRODUCT_TEAM,
STRIPE_PRODUCT_PRO
} from '../config';
const stripe = new Stripe(STRIPE_SECRET_KEY, {
apiVersion: '2022-08-01'
});
import { Types } from 'mongoose';
import { ACCEPTED } from '../variables';
import { Organization, MembershipOrg } from '../models';
const productToPriceMap = {
starter: STRIPE_PRODUCT_STARTER,
team: STRIPE_PRODUCT_TEAM,
pro: STRIPE_PRODUCT_PRO
};
import {
getStripeSecretKey,
getStripeProductPro,
getStripeProductTeam,
getStripeProductStarter
} from '../config';
/**
* Create an organization with name [name]
@@ -36,8 +27,11 @@ const createOrganization = async ({
let organization;
try {
// register stripe account
const stripe = new Stripe(getStripeSecretKey(), {
apiVersion: '2022-08-01'
});
if (STRIPE_SECRET_KEY) {
if (getStripeSecretKey()) {
const customer = await stripe.customers.create({
email,
description: name
@@ -87,6 +81,16 @@ const initSubscriptionOrg = async ({
if (organization) {
if (organization.customerId) {
// initialize starter subscription with quantity of 0
const stripe = new Stripe(getStripeSecretKey(), {
apiVersion: '2022-08-01'
});
const productToPriceMap = {
starter: getStripeProductStarter(),
team: getStripeProductTeam(),
pro: getStripeProductPro()
};
stripeSubscription = await stripe.subscriptions.create({
customer: organization.customerId,
items: [
@@ -139,6 +143,10 @@ const updateSubscriptionOrgQuantity = async ({
status: ACCEPTED
});
const stripe = new Stripe(getStripeSecretKey(), {
apiVersion: '2022-08-01'
});
const subscription = (
await stripe.subscriptions.list({
customer: organization.customerId
@@ -167,4 +175,4 @@ export {
createOrganization,
initSubscriptionOrg,
updateSubscriptionOrgQuantity
};
};

View File

@@ -267,7 +267,7 @@ const v1PushSecrets = async ({
if (toAdd.length > 0) {
// add secrets
const newSecrets = await Secret.insertMany(
const newSecrets: ISecret[] = (await Secret.insertMany(
toAdd.map((s, idx) => {
const obj: any = {
version: 1,
@@ -294,7 +294,7 @@ const v1PushSecrets = async ({
return obj;
})
);
)).map((insertedSecret) => insertedSecret.toObject());
// (EE) add secret versions for new secrets
EESecretService.addSecretVersions({

View File

@@ -9,10 +9,8 @@ import {
TOKEN_EMAIL_ORG_INVITATION,
TOKEN_EMAIL_PASSWORD_RESET
} from '../variables';
import {
SALT_ROUNDS
} from '../config';
import { UnauthorizedRequestError } from '../utils/errors';
import { getSaltRounds } from '../config';
/**
* Create and store a token in the database for purpose [type]
@@ -86,7 +84,7 @@ const createTokenHelper = async ({
const query: TokenDataQuery = { type };
const update: TokenDataUpdate = {
type,
tokenHash: await bcrypt.hash(token, SALT_ROUNDS),
tokenHash: await bcrypt.hash(token, getSaltRounds()),
expiresAt
}

View File

@@ -1,28 +1,183 @@
import mongoose from 'mongoose';
import dotenv from 'dotenv';
dotenv.config();
import infisical from 'infisical-node';
import express from 'express';
import helmet from 'helmet';
import cors from 'cors';
import * as Sentry from '@sentry/node';
import { SENTRY_DSN, NODE_ENV, MONGO_URL } from './config';
import { server } from './app';
import { DatabaseService } from './services';
import { setUpHealthEndpoint } from './services/health';
import { initSmtp } from './services/smtp';
import { logTelemetryMessage } from './services';
import { setTransporter } from './helpers/nodemailer';
import { createTestUserForDevelopment } from './utils/addDevelopmentUser';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { patchRouterParam } = require('./utils/patchAsyncRoutes');
DatabaseService.initDatabase(MONGO_URL);
import cookieParser from 'cookie-parser';
import swaggerUi = require('swagger-ui-express');
// eslint-disable-next-line @typescript-eslint/no-var-requires
const swaggerFile = require('../spec.json');
// eslint-disable-next-line @typescript-eslint/no-var-requires
const requestIp = require('request-ip');
import { apiLimiter } from './helpers/rateLimiter';
import {
workspace as eeWorkspaceRouter,
secret as eeSecretRouter,
secretSnapshot as eeSecretSnapshotRouter,
action as eeActionRouter
} from './ee/routes/v1';
import {
signup as v1SignupRouter,
auth as v1AuthRouter,
bot as v1BotRouter,
organization as v1OrganizationRouter,
workspace as v1WorkspaceRouter,
membershipOrg as v1MembershipOrgRouter,
membership as v1MembershipRouter,
key as v1KeyRouter,
inviteOrg as v1InviteOrgRouter,
user as v1UserRouter,
userAction as v1UserActionRouter,
secret as v1SecretRouter,
serviceToken as v1ServiceTokenRouter,
password as v1PasswordRouter,
stripe as v1StripeRouter,
integration as v1IntegrationRouter,
integrationAuth as v1IntegrationAuthRouter
} from './routes/v1';
import {
signup as v2SignupRouter,
auth as v2AuthRouter,
users as v2UsersRouter,
organizations as v2OrganizationsRouter,
workspace as v2WorkspaceRouter,
secret as v2SecretRouter, // begin to phase out
secrets as v2SecretsRouter,
serviceTokenData as v2ServiceTokenDataRouter,
apiKeyData as v2APIKeyDataRouter,
environment as v2EnvironmentRouter,
tags as v2TagsRouter,
} from './routes/v2';
import { healthCheck } from './routes/status';
import { getLogger } from './utils/logger';
import { RouteNotFoundError } from './utils/errors';
import { requestErrorHandler } from './middleware/requestErrorHandler';
import {
getMongoURL,
getNodeEnv,
getPort,
getSentryDSN,
getSiteURL
} from './config';
setUpHealthEndpoint(server);
const main = async () => {
if (process.env.INFISICAL_TOKEN != "" || process.env.INFISICAL_TOKEN != undefined) {
await infisical.connect({
token: process.env.INFISICAL_TOKEN!
});
}
setTransporter(initSmtp());
logTelemetryMessage();
setTransporter(initSmtp());
if (NODE_ENV !== 'test') {
Sentry.init({
dsn: SENTRY_DSN,
tracesSampleRate: 1.0,
debug: NODE_ENV === 'production' ? false : true,
environment: NODE_ENV
});
await DatabaseService.initDatabase(getMongoURL());
if (getNodeEnv() !== 'test') {
Sentry.init({
dsn: getSentryDSN(),
tracesSampleRate: 1.0,
debug: getNodeEnv() === 'production' ? false : true,
environment: getNodeEnv()
});
}
patchRouterParam();
const app = express();
app.enable('trust proxy');
app.use(express.json());
app.use(cookieParser());
app.use(
cors({
credentials: true,
origin: getSiteURL()
})
);
app.use(requestIp.mw());
if (getNodeEnv() === 'production') {
// enable app-wide rate-limiting + helmet security
// in production
app.disable('x-powered-by');
app.use(apiLimiter);
app.use(helmet());
}
// (EE) routes
app.use('/api/v1/secret', eeSecretRouter);
app.use('/api/v1/secret-snapshot', eeSecretSnapshotRouter);
app.use('/api/v1/workspace', eeWorkspaceRouter);
app.use('/api/v1/action', eeActionRouter);
// v1 routes
app.use('/api/v1/signup', v1SignupRouter);
app.use('/api/v1/auth', v1AuthRouter);
app.use('/api/v1/bot', v1BotRouter);
app.use('/api/v1/user', v1UserRouter);
app.use('/api/v1/user-action', v1UserActionRouter);
app.use('/api/v1/organization', v1OrganizationRouter);
app.use('/api/v1/workspace', v1WorkspaceRouter);
app.use('/api/v1/membership-org', v1MembershipOrgRouter);
app.use('/api/v1/membership', v1MembershipRouter);
app.use('/api/v1/key', v1KeyRouter);
app.use('/api/v1/invite-org', v1InviteOrgRouter);
app.use('/api/v1/secret', v1SecretRouter);
app.use('/api/v1/service-token', v1ServiceTokenRouter); // deprecated
app.use('/api/v1/password', v1PasswordRouter);
app.use('/api/v1/stripe', v1StripeRouter);
app.use('/api/v1/integration', v1IntegrationRouter);
app.use('/api/v1/integration-auth', v1IntegrationAuthRouter);
// v2 routes
app.use('/api/v2/signup', v2SignupRouter);
app.use('/api/v2/auth', v2AuthRouter);
app.use('/api/v2/users', v2UsersRouter);
app.use('/api/v2/organizations', v2OrganizationsRouter);
app.use('/api/v2/workspace', v2EnvironmentRouter);
app.use('/api/v2/workspace', v2TagsRouter);
app.use('/api/v2/workspace', v2WorkspaceRouter);
app.use('/api/v2/secret', v2SecretRouter); // deprecated
app.use('/api/v2/secrets', v2SecretsRouter);
app.use('/api/v2/service-token', v2ServiceTokenDataRouter); // TODO: turn into plural route
app.use('/api/v2/api-key', v2APIKeyDataRouter);
// api docs
app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerFile))
// Server status
app.use('/api', healthCheck)
//* Handle unrouted requests and respond with proper error message as well as status code
app.use((req, res, next) => {
if (res.headersSent) return next();
next(RouteNotFoundError({ message: `The requested source '(${req.method})${req.url}' was not found` }))
})
app.use(requestErrorHandler)
const server = app.listen(getPort(), () => {
getLogger("backend-main").info(`Server started listening at port ${getPort()}`)
});
createTestUserForDevelopment();
setUpHealthEndpoint(server);
server.on('close', async () => {
await DatabaseService.closeDatabase();
})
return server;
}
createTestUserForDevelopment()
export default main();

View File

@@ -1,7 +1,7 @@
import axios from "axios";
import * as Sentry from "@sentry/node";
import { Octokit } from "@octokit/rest";
import { IIntegrationAuth } from "../models";
import request from '../config/request';
import {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
@@ -10,39 +10,47 @@ import {
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_GITLAB_API_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
INTEGRATION_TRAVISCI_API_URL,
} from "../variables";
interface App {
name: string;
appId?: string;
owner?: string;
}
/**
* Return list of names of apps for integration named [integration]
* @param {Object} obj
* @param {String} obj.integration - name of integration
* @param {String} obj.accessToken - access token for integration
* @param {String} obj.teamId - (optional) id of team for getting integration apps (used for integrations like GitLab)
* @returns {Object[]} apps - names of integration apps
* @returns {String} apps.name - name of integration app
*/
const getApps = async ({
integrationAuth,
accessToken,
teamId
}: {
integrationAuth: IIntegrationAuth;
accessToken: string;
teamId?: string;
}) => {
interface App {
name: string;
appId?: string;
owner?: string;
}
let apps: App[];
let apps: App[] = [];
try {
switch (integrationAuth.integration) {
case INTEGRATION_AZURE_KEY_VAULT:
@@ -75,6 +83,12 @@ const getApps = async ({
accessToken,
});
break;
case INTEGRATION_GITLAB:
apps = await getAppsGitlab({
accessToken,
teamId
});
break;
case INTEGRATION_RENDER:
apps = await getAppsRender({
accessToken,
@@ -90,6 +104,11 @@ const getApps = async ({
accessToken,
});
break;
case INTEGRATION_TRAVISCI:
apps = await getAppsTravisCI({
accessToken,
})
break;
}
} catch (err) {
Sentry.setUser(null);
@@ -111,7 +130,7 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
let apps;
try {
const res = (
await axios.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
await request.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
headers: {
Accept: "application/vnd.heroku+json; version=3",
Authorization: `Bearer ${accessToken}`,
@@ -148,7 +167,7 @@ const getAppsVercel = async ({
let apps;
try {
const res = (
await axios.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
await request.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
@@ -183,21 +202,40 @@ const getAppsVercel = async ({
* @returns {String} apps.name - name of Netlify site
*/
const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
let apps;
const apps: any = [];
try {
const res = (
await axios.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
let page = 1;
const perPage = 10;
let hasMorePages = true;
// paginate through all sites
while (hasMorePages) {
const params = new URLSearchParams({
page: String(page),
per_page: String(perPage)
});
const { data } = await request.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
params,
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
}
})
).data;
});
data.map((a: any) => {
apps.push({
name: a.name,
appId: a.site_id
});
});
if (data.length < perPage) {
hasMorePages = false;
}
apps = res.map((a: any) => ({
name: a.name,
appId: a.site_id,
}));
page++;
}
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
@@ -210,9 +248,9 @@ const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
/**
* Return list of repositories for Github integration
* @param {Object} obj
* @param {String} obj.accessToken - access token for Netlify API
* @returns {Object[]} apps - names of Netlify sites
* @returns {String} apps.name - name of Netlify site
* @param {String} obj.accessToken - access token for Github API
* @returns {Object[]} apps - names of Github sites
* @returns {String} apps.name - name of Github site
*/
const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
let apps;
@@ -257,7 +295,7 @@ const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
let apps: any;
try {
const res = (
await axios.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
await request.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
@@ -303,23 +341,18 @@ const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
}
`;
const res = (
await axios({
url: INTEGRATION_FLYIO_API_URL,
method: "post",
headers: {
Authorization: "Bearer " + accessToken,
const res = (await request.post(INTEGRATION_FLYIO_API_URL, {
query,
variables: {
role: null,
},
}, {
headers: {
Authorization: "Bearer " + accessToken,
'Accept': 'application/json',
'Accept-Encoding': 'application/json',
},
data: {
query,
variables: {
role: null,
},
},
})
).data.data.apps.nodes;
},
})).data.data.apps.nodes;
apps = res.map((a: any) => ({
name: a.name,
@@ -344,7 +377,7 @@ const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
let apps: any;
try {
const res = (
await axios.get(
await request.get(
`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`,
{
headers: {
@@ -369,4 +402,147 @@ const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
return apps;
};
const getAppsTravisCI = async ({ accessToken }: { accessToken: string }) => {
let apps: any;
try {
const res = (
await request.get(
`${INTEGRATION_TRAVISCI_API_URL}/repos`,
{
headers: {
"Authorization": `token ${accessToken}`,
"Accept-Encoding": "application/json",
},
}
)
).data;
apps = res?.map((a: any) => {
return {
name: a?.slug?.split("/")[1],
appId: a?.id,
}
});
}catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get TravisCI projects");
}
return apps;
}
/**
* Return list of repositories for GitLab integration
* @param {Object} obj
* @param {String} obj.accessToken - access token for GitLab API
* @returns {Object[]} apps - names of GitLab sites
* @returns {String} apps.name - name of GitLab site
*/
const getAppsGitlab = async ({
accessToken,
teamId
}: {
accessToken: string;
teamId?: string;
}) => {
const apps: App[] = [];
let page = 1;
const perPage = 10;
let hasMorePages = true;
try {
if (teamId) {
// case: fetch projects for group with id [teamId] in GitLab
while (hasMorePages) {
const params = new URLSearchParams({
page: String(page),
per_page: String(perPage)
});
const { data } = (
await request.get(
`${INTEGRATION_GITLAB_API_URL}/v4/groups/${teamId}/projects`,
{
params,
headers: {
"Authorization": `Bearer ${accessToken}`,
"Accept-Encoding": "application/json",
},
}
)
);
data.map((a: any) => {
apps.push({
name: a.name,
appId: a.id
});
});
if (data.length < perPage) {
hasMorePages = false;
}
page++;
}
} else {
// case: fetch projects for individual in GitLab
const { id } = (
await request.get(
`${INTEGRATION_GITLAB_API_URL}/v4/user`,
{
headers: {
"Authorization": `Bearer ${accessToken}`,
"Accept-Encoding": "application/json",
},
}
)
).data;
while (hasMorePages) {
const params = new URLSearchParams({
page: String(page),
per_page: String(perPage)
});
const { data } = (
await request.get(
`${INTEGRATION_GITLAB_API_URL}/v4/users/${id}/projects`,
{
params,
headers: {
"Authorization": `Bearer ${accessToken}`,
"Accept-Encoding": "application/json",
},
}
)
);
data.map((a: any) => {
apps.push({
name: a.name,
appId: a.id
});
});
if (data.length < perPage) {
hasMorePages = false;
}
page++;
}
}
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get GitLab projects");
}
return apps;
}
export { getApps };

View File

@@ -1,28 +1,32 @@
import axios from 'axios';
import * as Sentry from '@sentry/node';
import request from '../config/request';
import {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_AZURE_TOKEN_URL,
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
INTEGRATION_GITHUB_TOKEN_URL
INTEGRATION_GITHUB_TOKEN_URL,
INTEGRATION_GITLAB_TOKEN_URL
} from '../variables';
import {
SITE_URL,
CLIENT_ID_AZURE,
CLIENT_ID_VERCEL,
CLIENT_ID_NETLIFY,
CLIENT_ID_GITHUB,
CLIENT_SECRET_AZURE,
CLIENT_SECRET_HEROKU,
CLIENT_SECRET_VERCEL,
CLIENT_SECRET_NETLIFY,
CLIENT_SECRET_GITHUB
getSiteURL,
getClientIdAzure,
getClientSecretAzure,
getClientSecretHeroku,
getClientIdVercel,
getClientSecretVercel,
getClientIdNetlify,
getClientSecretNetlify,
getClientIdGitHub,
getClientSecretGitHub,
getClientIdGitLab,
getClientSecretGitLab
} from '../config';
interface ExchangeCodeAzureResponse {
@@ -66,6 +70,15 @@ interface ExchangeCodeGithubResponse {
token_type: string;
}
interface ExchangeCodeGitlabResponse {
access_token: string;
token_type: string;
expires_in: number;
refresh_token: string;
scope: string;
created_at: number;
}
/**
* Return [accessToken], [accessExpiresAt], and [refreshToken] for OAuth2
* code-token exchange for integration named [integration]
@@ -114,6 +127,10 @@ const exchangeCode = async ({
code
});
break;
case INTEGRATION_GITLAB:
obj = await exchangeCodeGitlab({
code
});
}
} catch (err) {
Sentry.setUser(null);
@@ -136,22 +153,22 @@ const exchangeCodeAzure = async ({
const accessExpiresAt = new Date();
let res: ExchangeCodeAzureResponse;
try {
res = (await axios.post(
res = (await request.post(
INTEGRATION_AZURE_TOKEN_URL,
new URLSearchParams({
grant_type: 'authorization_code',
code: code,
scope: 'https://vault.azure.net/.default openid offline_access',
client_id: CLIENT_ID_AZURE,
client_secret: CLIENT_SECRET_AZURE,
redirect_uri: `${SITE_URL}/integrations/azure-key-vault/oauth2/callback`
client_id: getClientIdAzure(),
client_secret: getClientSecretAzure(),
redirect_uri: `${getSiteURL()}/integrations/azure-key-vault/oauth2/callback`
} as any)
)).data;
accessExpiresAt.setSeconds(
accessExpiresAt.getSeconds() + res.expires_in
);
} catch (err: any) {
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed OAuth2 code-token exchange with Azure');
@@ -182,12 +199,12 @@ const exchangeCodeHeroku = async ({
let res: ExchangeCodeHerokuResponse;
const accessExpiresAt = new Date();
try {
res = (await axios.post(
res = (await request.post(
INTEGRATION_HEROKU_TOKEN_URL,
new URLSearchParams({
grant_type: 'authorization_code',
code: code,
client_secret: CLIENT_SECRET_HEROKU
client_secret: getClientSecretHeroku()
} as any)
)).data;
@@ -221,13 +238,13 @@ const exchangeCodeVercel = async ({ code }: { code: string }) => {
let res: ExchangeCodeVercelResponse;
try {
res = (
await axios.post(
await request.post(
INTEGRATION_VERCEL_TOKEN_URL,
new URLSearchParams({
code: code,
client_id: CLIENT_ID_VERCEL,
client_secret: CLIENT_SECRET_VERCEL,
redirect_uri: `${SITE_URL}/integrations/vercel/oauth2/callback`
client_id: getClientIdVercel(),
client_secret: getClientSecretVercel(),
redirect_uri: `${getSiteURL()}/integrations/vercel/oauth2/callback`
} as any)
)
).data;
@@ -260,26 +277,26 @@ const exchangeCodeNetlify = async ({ code }: { code: string }) => {
let accountId;
try {
res = (
await axios.post(
await request.post(
INTEGRATION_NETLIFY_TOKEN_URL,
new URLSearchParams({
grant_type: 'authorization_code',
code: code,
client_id: CLIENT_ID_NETLIFY,
client_secret: CLIENT_SECRET_NETLIFY,
redirect_uri: `${SITE_URL}/integrations/netlify/oauth2/callback`
client_id: getClientIdNetlify(),
client_secret: getClientSecretNetlify(),
redirect_uri: `${getSiteURL()}/integrations/netlify/oauth2/callback`
} as any)
)
).data;
const res2 = await axios.get('https://api.netlify.com/api/v1/sites', {
const res2 = await request.get('https://api.netlify.com/api/v1/sites', {
headers: {
Authorization: `Bearer ${res.access_token}`
}
});
const res3 = (
await axios.get('https://api.netlify.com/api/v1/accounts', {
await request.get('https://api.netlify.com/api/v1/accounts', {
headers: {
Authorization: `Bearer ${res.access_token}`
}
@@ -314,12 +331,12 @@ const exchangeCodeGithub = async ({ code }: { code: string }) => {
let res: ExchangeCodeGithubResponse;
try {
res = (
await axios.get(INTEGRATION_GITHUB_TOKEN_URL, {
await request.get(INTEGRATION_GITHUB_TOKEN_URL, {
params: {
client_id: CLIENT_ID_GITHUB,
client_secret: CLIENT_SECRET_GITHUB,
client_id: getClientIdGitHub(),
client_secret: getClientSecretGitHub(),
code: code,
redirect_uri: `${SITE_URL}/integrations/github/oauth2/callback`
redirect_uri: `${getSiteURL()}/integrations/github/oauth2/callback`
},
headers: {
'Accept': 'application/json',
@@ -341,4 +358,53 @@ const exchangeCodeGithub = async ({ code }: { code: string }) => {
};
};
/**
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Gitlab
* code-token exchange
* @param {Object} obj1
* @param {Object} obj1.code - code for code-token exchange
* @returns {Object} obj2
* @returns {String} obj2.accessToken - access token for Gitlab API
* @returns {String} obj2.refreshToken - refresh token for Gitlab API
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
*/
const exchangeCodeGitlab = async ({ code }: { code: string }) => {
let res: ExchangeCodeGitlabResponse;
const accessExpiresAt = new Date();
try {
res = (
await request.post(
INTEGRATION_GITLAB_TOKEN_URL,
new URLSearchParams({
grant_type: 'authorization_code',
code: code,
client_id: getClientIdGitLab(),
client_secret: getClientSecretGitLab(),
redirect_uri: `${getSiteURL()}/integrations/gitlab/oauth2/callback`
} as any),
{
headers: {
"Accept-Encoding": "application/json",
}
}
)
).data;
accessExpiresAt.setSeconds(
accessExpiresAt.getSeconds() + res.expires_in
);
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed OAuth2 code-token exchange with Gitlab');
}
return {
accessToken: res.access_token,
refreshToken: res.refresh_token,
accessExpiresAt
};
}
export { exchangeCode };

View File

@@ -1,6 +1,7 @@
import { exchangeCode } from './exchange';
import { exchangeRefresh } from './refresh';
import { getApps } from './apps';
import { getTeams } from './teams';
import { syncSecrets } from './sync';
import { revokeAccess } from './revoke';
@@ -8,6 +9,7 @@ export {
exchangeCode,
exchangeRefresh,
getApps,
getTeams,
syncSecrets,
revokeAccess
}

View File

@@ -1,16 +1,29 @@
import axios from 'axios';
import * as Sentry from '@sentry/node';
import { INTEGRATION_AZURE_KEY_VAULT, INTEGRATION_HEROKU } from '../variables';
import request from '../config/request';
import {
SITE_URL,
CLIENT_ID_AZURE,
CLIENT_SECRET_AZURE,
CLIENT_SECRET_HEROKU
} from '../config';
IIntegrationAuth
} from '../models';
import {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_HEROKU,
INTEGRATION_GITLAB,
} from '../variables';
import {
INTEGRATION_AZURE_TOKEN_URL,
INTEGRATION_HEROKU_TOKEN_URL
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_GITLAB_TOKEN_URL
} from '../variables';
import {
IntegrationService
} from '../services';
import {
getSiteURL,
getClientIdAzure,
getClientSecretAzure,
getClientSecretHeroku,
getClientIdGitLab,
getClientSecretGitLab
} from '../config';
interface RefreshTokenAzureResponse {
token_type: string;
@@ -21,6 +34,23 @@ interface RefreshTokenAzureResponse {
refresh_token: string;
}
interface RefreshTokenHerokuResponse {
access_token: string;
expires_in: number;
refresh_token: string;
token_type: string;
user_id: string;
}
interface RefreshTokenGitLabResponse {
token_type: string;
scope: string;
expires_in: number;
access_token: string;
refresh_token: string;
created_at: number;
}
/**
* Return new access token by exchanging refresh token [refreshToken] for integration
* named [integration]
@@ -29,33 +59,61 @@ interface RefreshTokenAzureResponse {
* @param {String} obj.refreshToken - refresh token to use to get new access token for Heroku
*/
const exchangeRefresh = async ({
integration,
integrationAuth,
refreshToken
}: {
integration: string;
integrationAuth: IIntegrationAuth;
refreshToken: string;
}) => {
let accessToken;
interface TokenDetails {
accessToken: string;
refreshToken: string;
accessExpiresAt: Date;
}
let tokenDetails: TokenDetails;
try {
switch (integration) {
switch (integrationAuth.integration) {
case INTEGRATION_AZURE_KEY_VAULT:
accessToken = await exchangeRefreshAzure({
tokenDetails = await exchangeRefreshAzure({
refreshToken
});
break;
case INTEGRATION_HEROKU:
accessToken = await exchangeRefreshHeroku({
tokenDetails = await exchangeRefreshHeroku({
refreshToken
});
break;
case INTEGRATION_GITLAB:
tokenDetails = await exchangeRefreshGitLab({
refreshToken
});
break;
default:
throw new Error('Failed to exchange token for incompatible integration');
}
if (tokenDetails?.accessToken && tokenDetails?.refreshToken && tokenDetails?.accessExpiresAt) {
await IntegrationService.setIntegrationAuthAccess({
integrationAuthId: integrationAuth._id.toString(),
accessId: null,
accessToken: tokenDetails.accessToken,
accessExpiresAt: tokenDetails.accessExpiresAt
});
await IntegrationService.setIntegrationAuthRefresh({
integrationAuthId: integrationAuth._id.toString(),
refreshToken: tokenDetails.refreshToken
});
}
return tokenDetails.accessToken;
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to get new OAuth2 access token');
}
return accessToken;
};
/**
@@ -71,18 +129,27 @@ const exchangeRefreshAzure = async ({
refreshToken: string;
}) => {
try {
const res: RefreshTokenAzureResponse = (await axios.post(
const accessExpiresAt = new Date();
const { data }: { data: RefreshTokenAzureResponse } = await request.post(
INTEGRATION_AZURE_TOKEN_URL,
new URLSearchParams({
client_id: CLIENT_ID_AZURE,
client_id: getClientIdAzure(),
scope: 'openid offline_access',
refresh_token: refreshToken,
grant_type: 'refresh_token',
client_secret: CLIENT_SECRET_AZURE
client_secret: getClientSecretAzure()
} as any)
)).data;
);
return res.access_token;
accessExpiresAt.setSeconds(
accessExpiresAt.getSeconds() + data.expires_in
);
return ({
accessToken: data.access_token,
refreshToken: data.refresh_token,
accessExpiresAt
});
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
@@ -102,26 +169,84 @@ const exchangeRefreshHeroku = async ({
}: {
refreshToken: string;
}) => {
let accessToken;
try {
const res = await axios.post(
const accessExpiresAt = new Date();
const {
data
}: {
data: RefreshTokenHerokuResponse
} = await request.post(
INTEGRATION_HEROKU_TOKEN_URL,
new URLSearchParams({
grant_type: 'refresh_token',
refresh_token: refreshToken,
client_secret: CLIENT_SECRET_HEROKU
client_secret: getClientSecretHeroku()
} as any)
);
accessToken = res.data.access_token;
accessExpiresAt.setSeconds(
accessExpiresAt.getSeconds() + data.expires_in
);
return ({
accessToken: data.access_token,
refreshToken: data.refresh_token,
accessExpiresAt
});
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to refresh OAuth2 access token for Heroku');
}
};
return accessToken;
/**
* Return new access token by exchanging refresh token [refreshToken] for the
* GitLab integration
* @param {Object} obj
* @param {String} obj.refreshToken - refresh token to use to get new access token for GitLab
* @returns
*/
const exchangeRefreshGitLab = async ({
refreshToken
}: {
refreshToken: string;
}) => {
try {
const accessExpiresAt = new Date();
const {
data
}: {
data: RefreshTokenGitLabResponse
} = await request.post(
INTEGRATION_GITLAB_TOKEN_URL,
new URLSearchParams({
grant_type: 'refresh_token',
refresh_token: refreshToken,
client_id: getClientIdGitLab,
client_secret: getClientSecretGitLab(),
redirect_uri: `${getSiteURL()}/integrations/gitlab/oauth2/callback`
} as any),
{
headers: {
"Accept-Encoding": "application/json",
}
});
accessExpiresAt.setSeconds(
accessExpiresAt.getSeconds() + data.expires_in
);
return ({
accessToken: data.access_token,
refreshToken: data.refresh_token,
accessExpiresAt
});
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to refresh OAuth2 access token for GitLab');
}
};
export { exchangeRefresh };

View File

@@ -10,7 +10,8 @@ import {
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
} from '../variables';
const revokeAccess = async ({
@@ -32,6 +33,8 @@ const revokeAccess = async ({
break;
case INTEGRATION_GITHUB:
break;
case INTEGRATION_GITLAB:
break;
}
deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({

View File

@@ -1,4 +1,3 @@
import axios from "axios";
import * as Sentry from "@sentry/node";
import _ from 'lodash';
import AWS from 'aws-sdk';
@@ -20,16 +19,21 @@ import {
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_GITLAB_API_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
INTEGRATION_TRAVISCI_API_URL,
} from "../variables";
import request from '../config/request';
/**
* Sync/push [secrets] to [app] in integration named [integration]
@@ -108,6 +112,13 @@ const syncSecrets = async ({
accessToken,
});
break;
case INTEGRATION_GITLAB:
await syncSecretsGitLab({
integration,
secrets,
accessToken,
});
break;
case INTEGRATION_RENDER:
await syncSecretsRender({
integration,
@@ -128,6 +139,14 @@ const syncSecrets = async ({
secrets,
accessToken,
});
break;
case INTEGRATION_TRAVISCI:
await syncSecretsTravisCI({
integration,
secrets,
accessToken,
});
break;
}
} catch (err) {
Sentry.setUser(null);
@@ -153,7 +172,6 @@ const syncSecretsAzureKeyVault = async ({
accessToken: string;
}) => {
try {
interface GetAzureKeyVaultSecret {
id: string; // secret URI
attributes: {
@@ -176,17 +194,22 @@ const syncSecretsAzureKeyVault = async ({
*/
const paginateAzureKeyVaultSecrets = async (url: string) => {
let result: GetAzureKeyVaultSecret[] = [];
while (url) {
const res = await axios.get(url, {
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
}
});
try {
while (url) {
const res = await request.get(url, {
headers: {
Authorization: `Bearer ${accessToken}`
}
});
result = result.concat(res.data.value);
url = res.data.nextLink;
}
result = result.concat(res.data.value);
url = res.data.nextLink;
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
}
return result;
@@ -200,10 +223,9 @@ const syncSecretsAzureKeyVault = async ({
lastSlashIndex = getAzureKeyVaultSecret.id.lastIndexOf('/');
}
const azureKeyVaultSecret = await axios.get(`${getAzureKeyVaultSecret.id}?api-version=7.3`, {
const azureKeyVaultSecret = await request.get(`${getAzureKeyVaultSecret.id}?api-version=7.3`, {
headers: {
'Authorization': `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
'Authorization': `Bearer ${accessToken}`
}
});
@@ -249,33 +271,75 @@ const syncSecretsAzureKeyVault = async ({
deleteSecrets.push(res[key]);
}
});
const setSecretAzureKeyVault = async ({
key,
value,
integration,
accessToken
}: {
key: string;
value: string;
integration: IIntegration;
accessToken: string;
}) => {
let isSecretSet = false;
let maxTries = 6;
while (!isSecretSet && maxTries > 0) {
// try to set secret
try {
await request.put(
`${integration.app}/secrets/${key}?api-version=7.3`,
{
value
},
{
headers: {
Authorization: `Bearer ${accessToken}`
}
}
);
isSecretSet = true;
} catch (err) {
const error: any = err;
if (error?.response?.data?.error?.innererror?.code === 'ObjectIsDeletedButRecoverable') {
await request.post(
`${integration.app}/deletedsecrets/${key}/recover?api-version=7.3`, {},
{
headers: {
Authorization: `Bearer ${accessToken}`
}
}
);
await new Promise(resolve => setTimeout(resolve, 10000));
} else {
await new Promise(resolve => setTimeout(resolve, 10000));
maxTries--;
}
}
}
}
// Sync/push set secrets
if (setSecrets.length > 0) {
setSecrets.forEach(async ({ key, value }) => {
await axios.put(
`${integration.app}/secrets/${key}?api-version=7.3`,
{
value
},
{
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
}
}
);
for await (const setSecret of setSecrets) {
const { key, value } = setSecret;
setSecretAzureKeyVault({
key,
value,
integration,
accessToken
});
}
if (deleteSecrets.length > 0) {
deleteSecrets.forEach(async (secret) => {
await axios.delete(`${integration.app}/secrets/${secret.key}?api-version=7.3`, {
headers: {
'Authorization': `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
}
});
for await (const deleteSecret of deleteSecrets) {
const { key } = deleteSecret;
await request.delete(`${integration.app}/secrets/${key}?api-version=7.3`, {
headers: {
'Authorization': `Bearer ${accessToken}`
}
});
}
} catch (err) {
@@ -485,7 +549,7 @@ const syncSecretsHeroku = async ({
}) => {
try {
const herokuSecrets = (
await axios.get(
await request.get(
`${INTEGRATION_HEROKU_API_URL}/apps/${integration.app}/config-vars`,
{
headers: {
@@ -503,7 +567,7 @@ const syncSecretsHeroku = async ({
}
});
await axios.patch(
await request.patch(
`${INTEGRATION_HEROKU_API_URL}/apps/${integration.app}/config-vars`,
secrets,
{
@@ -545,7 +609,7 @@ const syncSecretsVercel = async ({
value: string;
target: string[];
}
try {
// Get all (decrypted) secrets back from Vercel in
// decrypted format
@@ -558,40 +622,84 @@ const syncSecretsVercel = async ({
: {}),
};
const res = (
await Promise.all(
(
await axios.get(
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env`,
// const res = (
// await Promise.all(
// (
// await request.get(
// `${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env`,
// {
// params,
// headers: {
// Authorization: `Bearer ${accessToken}`,
// 'Accept-Encoding': 'application/json'
// }
// }
// ))
// .data
// .envs
// .filter((secret: VercelSecret) => secret.target.includes(integration.targetEnvironment))
// .map(async (secret: VercelSecret) => {
// if (secret.type === 'encrypted') {
// // case: secret is encrypted -> need to decrypt
// const decryptedSecret = (await request.get(
// `${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
// {
// params,
// headers: {
// Authorization: `Bearer ${accessToken}`,
// 'Accept-Encoding': 'application/json'
// }
// }
// )).data;
// return decryptedSecret;
// }
// return secret;
// }))).reduce((obj: any, secret: any) => ({
// ...obj,
// [secret.key]: secret
// }), {});
const vercelSecrets: VercelSecret[] = (await request.get(
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
}
}
))
.data
.envs
.filter((secret: VercelSecret) => secret.target.includes(integration.targetEnvironment));
const res: { [key: string]: VercelSecret } = {};
for await (const vercelSecret of vercelSecrets) {
if (vercelSecret.type === 'encrypted') {
// case: secret is encrypted -> need to decrypt
const decryptedSecret = (await request.get(
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${vercelSecret.id}`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
}
}
))
.data
.envs
.filter((secret: VercelSecret) => secret.target.includes(integration.targetEnvironment))
.map(async (secret: VercelSecret) => (await axios.get(
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
}
}
)).data)
)).reduce((obj: any, secret: any) => ({
...obj,
[secret.key]: secret
}), {});
}
)).data;
const updateSecrets: VercelSecret[] = [];
const deleteSecrets: VercelSecret[] = [];
const newSecrets: VercelSecret[] = [];
res[vercelSecret.key] = decryptedSecret;
} else {
res[vercelSecret.key] = vercelSecret;
}
}
const updateSecrets: VercelSecret[] = [];
const deleteSecrets: VercelSecret[] = [];
const newSecrets: VercelSecret[] = [];
// Identify secrets to create
Object.keys(secrets).map((key) => {
@@ -615,8 +723,10 @@ const syncSecretsVercel = async ({
id: res[key].id,
key: key,
value: secrets[key],
type: "encrypted",
target: [integration.targetEnvironment],
type: res[key].type,
target: res[key].target.includes(integration.targetEnvironment)
? [...res[key].target]
: [...res[key].target, integration.targetEnvironment]
});
}
} else {
@@ -625,7 +735,7 @@ const syncSecretsVercel = async ({
id: res[key].id,
key: key,
value: res[key].value,
type: "encrypted",
type: "encrypted", // value doesn't matter
target: [integration.targetEnvironment],
});
}
@@ -633,7 +743,7 @@ const syncSecretsVercel = async ({
// Sync/push new secrets
if (newSecrets.length > 0) {
await axios.post(
await request.post(
`${INTEGRATION_VERCEL_API_URL}/v10/projects/${integration.app}/env`,
newSecrets,
{
@@ -645,12 +755,11 @@ const syncSecretsVercel = async ({
}
);
}
// Sync/push updated secrets
if (updateSecrets.length > 0) {
updateSecrets.forEach(async (secret: VercelSecret) => {
for await (const secret of updateSecrets) {
if (secret.type !== 'sensitive') {
const { id, ...updatedSecret } = secret;
await axios.patch(
await request.patch(
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
updatedSecret,
{
@@ -661,23 +770,20 @@ const syncSecretsVercel = async ({
},
}
);
});
}
}
// Delete secrets
if (deleteSecrets.length > 0) {
deleteSecrets.forEach(async (secret: VercelSecret) => {
await axios.delete(
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
},
}
);
});
for await (const secret of deleteSecrets) {
await request.delete(
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
},
}
);
}
} catch (err) {
Sentry.setUser(null);
@@ -727,7 +833,7 @@ const syncSecretsNetlify = async ({
});
const res = (
await axios.get(
await request.get(
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env`,
{
params: getParams,
@@ -841,7 +947,7 @@ const syncSecretsNetlify = async ({
});
if (newSecrets.length > 0) {
await axios.post(
await request.post(
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env`,
newSecrets,
{
@@ -856,7 +962,7 @@ const syncSecretsNetlify = async ({
if (updateSecrets.length > 0) {
updateSecrets.forEach(async (secret: NetlifySecret) => {
await axios.patch(
await request.patch(
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${secret.key}`,
{
context: secret.values[0].context,
@@ -875,7 +981,7 @@ const syncSecretsNetlify = async ({
if (deleteSecrets.length > 0) {
deleteSecrets.forEach(async (key: string) => {
await axios.delete(
await request.delete(
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${key}`,
{
params: syncParams,
@@ -890,7 +996,7 @@ const syncSecretsNetlify = async ({
if (deleteSecretValues.length > 0) {
deleteSecretValues.forEach(async (secret: NetlifySecret) => {
await axios.delete(
await request.delete(
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${secret.key}/value/${secret.values[0].id}`,
{
params: syncParams,
@@ -1041,7 +1147,7 @@ const syncSecretsRender = async ({
accessToken: string;
}) => {
try {
await axios.put(
await request.put(
`${INTEGRATION_RENDER_API_URL}/v1/services/${integration.appId}/env-vars`,
Object.keys(secrets).map((key) => ({
key,
@@ -1099,24 +1205,21 @@ const syncSecretsFlyio = async ({
}
`;
await axios({
url: INTEGRATION_FLYIO_API_URL,
method: "post",
await request.post(INTEGRATION_FLYIO_API_URL, {
query: SetSecrets,
variables: {
input: {
appId: integration.app,
secrets: Object.entries(secrets).map(([key, value]) => ({
key,
value,
})),
},
},
}, {
headers: {
Authorization: "Bearer " + accessToken,
'Accept-Encoding': 'application/json'
},
data: {
query: SetSecrets,
variables: {
input: {
appId: integration.app,
secrets: Object.entries(secrets).map(([key, value]) => ({
key,
value,
})),
},
},
'Accept-Encoding': 'application/json',
},
});
@@ -1137,23 +1240,18 @@ const syncSecretsFlyio = async ({
}
}`;
const getSecretsRes = (
await axios({
method: "post",
url: INTEGRATION_FLYIO_API_URL,
headers: {
'Authorization': 'Bearer ' + accessToken,
'Content-Type': 'application/json',
'Accept-Encoding': 'application/json'
},
data: {
query: GetSecrets,
variables: {
appName: integration.app,
},
},
})
).data.data.app.secrets;
const getSecretsRes = (await request.post(INTEGRATION_FLYIO_API_URL, {
query: GetSecrets,
variables: {
appName: integration.app,
},
}, {
headers: {
Authorization: "Bearer " + accessToken,
'Content-Type': 'application/json',
'Accept-Encoding': 'application/json',
},
})).data.data.app.secrets;
const deleteSecretsKeys = getSecretsRes
.filter((secret: FlyioSecret) => !(secret.name in secrets))
@@ -1178,24 +1276,22 @@ const syncSecretsFlyio = async ({
}
}`;
await axios({
method: "post",
url: INTEGRATION_FLYIO_API_URL,
await request.post(INTEGRATION_FLYIO_API_URL, {
query: DeleteSecrets,
variables: {
input: {
appId: integration.app,
keys: deleteSecretsKeys,
},
},
}, {
headers: {
Authorization: "Bearer " + accessToken,
"Content-Type": "application/json",
'Accept-Encoding': 'application/json'
},
data: {
query: DeleteSecrets,
variables: {
input: {
appId: integration.app,
keys: deleteSecretsKeys,
},
},
'Accept-Encoding': 'application/json',
},
});
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
@@ -1221,7 +1317,7 @@ const syncSecretsCircleCI = async ({
}) => {
try {
const circleciOrganizationDetail = (
await axios.get(`${INTEGRATION_CIRCLECI_API_URL}/v2/me/collaborations`, {
await request.get(`${INTEGRATION_CIRCLECI_API_URL}/v2/me/collaborations`, {
headers: {
"Circle-Token": accessToken,
"Accept-Encoding": "application/json",
@@ -1234,7 +1330,7 @@ const syncSecretsCircleCI = async ({
// sync secrets to CircleCI
Object.keys(secrets).forEach(
async (key) =>
await axios.post(
await request.post(
`${INTEGRATION_CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`,
{
name: key,
@@ -1251,7 +1347,7 @@ const syncSecretsCircleCI = async ({
// get secrets from CircleCI
const getSecretsRes = (
await axios.get(
await request.get(
`${INTEGRATION_CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`,
{
headers: {
@@ -1265,7 +1361,7 @@ const syncSecretsCircleCI = async ({
// delete secrets from CircleCI
getSecretsRes.forEach(async (sec: any) => {
if (!(sec.name in secrets)) {
await axios.delete(
await request.delete(
`${INTEGRATION_CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar/${sec.name}`,
{
headers: {
@@ -1283,4 +1379,196 @@ const syncSecretsCircleCI = async ({
}
};
/**
* Sync/push [secrets] to TravisCI project
* @param {Object} obj
* @param {IIntegration} obj.integration - integration details
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
* @param {String} obj.accessToken - access token for TravisCI integration
*/
const syncSecretsTravisCI = async ({
integration,
secrets,
accessToken,
}: {
integration: IIntegration;
secrets: any;
accessToken: string;
}) => {
try {
// get secrets from travis-ci
const getSecretsRes = (
await request.get(
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars?repository_id=${integration.appId}`,
{
headers: {
"Authorization": `token ${accessToken}`,
"Accept-Encoding": "application/json",
},
}
)
)
.data
?.env_vars
.reduce((obj: any, secret: any) => ({
...obj,
[secret.name]: secret
}), {});
// add secrets
for await (const key of Object.keys(secrets)) {
if (!(key in getSecretsRes)) {
// case: secret does not exist in travis ci
// -> add secret
await request.post(
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars?repository_id=${integration.appId}`,
{
env_var: {
name: key,
value: secrets[key]
}
},
{
headers: {
"Authorization": `token ${accessToken}`,
"Content-Type": "application/json",
"Accept-Encoding": "application/json",
},
}
);
} else {
// case: secret exists in travis ci
// -> update/set secret
await request.patch(
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars/${getSecretsRes[key].id}?repository_id=${getSecretsRes[key].repository_id}`,
{
env_var: {
name: key,
value: secrets[key],
}
},
{
headers: {
"Authorization": `token ${accessToken}`,
"Content-Type": "application/json",
"Accept-Encoding": "application/json",
},
}
);
}
}
for await (const key of Object.keys(getSecretsRes)) {
if (!(key in secrets)){
// delete secret
await request.delete(
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars/${getSecretsRes[key].id}?repository_id=${getSecretsRes[key].repository_id}`,
{
headers: {
"Authorization": `token ${accessToken}`,
"Content-Type": "application/json",
"Accept-Encoding": "application/json",
},
}
);
}
}
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to sync secrets to GitLab");
}
}
/**
* Sync/push [secrets] to GitLab repo with name [integration.app]
* @param {Object} obj
* @param {IIntegration} obj.integration - integration details
* @param {IIntegrationAuth} obj.integrationAuth - integration auth details
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
* @param {String} obj.accessToken - access token for GitLab integration
*/
const syncSecretsGitLab = async ({
integration,
secrets,
accessToken,
}: {
integration: IIntegration;
secrets: any;
accessToken: string;
}) => {
try {
// get secrets from gitlab
const getSecretsRes = (
await request.get(
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables`,
{
headers: {
"Authorization": `Bearer ${accessToken}`,
"Accept-Encoding": "application/json",
},
}
)
).data;
for await (const key of Object.keys(secrets)) {
const existingSecret = getSecretsRes.find((s: any) => s.key == key);
if (!existingSecret) {
await request.post(
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables`,
{
key: key,
value: secrets[key],
protected: false,
masked: false,
raw: false,
environment_scope:'*'
},
{
headers: {
"Authorization": `Bearer ${accessToken}`,
"Content-Type": "application/json",
"Accept-Encoding": "application/json",
},
}
)
} else {
// udpate secret
await request.put(
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables/${existingSecret.key}`,
{
...existingSecret,
value: secrets[existingSecret.key]
},
{
headers: {
"Authorization": `Bearer ${accessToken}`,
"Content-Type": "application/json",
"Accept-Encoding": "application/json",
},
}
)
}
}
// delete secrets
for await (const sec of getSecretsRes) {
if (!(sec.key in secrets)) {
await request.delete(
`${INTEGRATION_GITLAB_API_URL}/v4/projects/${integration?.appId}/variables/${sec.key}`,
{
headers: {
"Authorization": `Bearer ${accessToken}`,
},
}
);
}
}
}catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to sync secrets to GitLab");
}
}
export { syncSecrets };

View File

@@ -0,0 +1,92 @@
import * as Sentry from "@sentry/node";
import {
IIntegrationAuth
} from '../models';
import {
INTEGRATION_GITLAB,
INTEGRATION_GITLAB_API_URL
} from '../variables';
import request from '../config/request';
interface Team {
name: string;
teamId: string;
}
/**
* Return list of teams for integration authorization [integrationAuth]
* @param {Object} obj
* @param {String} obj.integrationAuth - integration authorization to get teams for
* @param {String} obj.accessToken - access token for integration authorization
* @returns {Object[]} teams - teams of integration authorization
* @returns {String} teams.name - name of team
* @returns {String} teams.teamId - id of team
*/
const getTeams = async ({
integrationAuth,
accessToken
}: {
integrationAuth: IIntegrationAuth;
accessToken: string;
}) => {
let teams: Team[] = [];
try {
switch (integrationAuth.integration) {
case INTEGRATION_GITLAB:
teams = await getTeamsGitLab({
accessToken
});
break;
}
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to get integration teams');
}
return teams;
}
/**
* Return list of teams for GitLab integration
* @param {Object} obj
* @param {String} obj.accessToken - access token for GitLab API
* @returns {Object[]} teams - teams that user is part of in GitLab
* @returns {String} teams.name - name of team
* @returns {String} teams.teamId - id of team
*/
const getTeamsGitLab = async ({
accessToken
}: {
accessToken: string;
}) => {
let teams: Team[] = [];
try {
const res = (await request.get(
`${INTEGRATION_GITLAB_API_URL}/v4/groups`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
}
)).data;
teams = res.map((t: any) => ({
name: t.name,
teamId: t.id
}));
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get GitLab integration teams");
}
return teams;
}
export {
getTeams
}

View File

@@ -1,16 +1,13 @@
import { ErrorRequestHandler } from "express";
import * as Sentry from '@sentry/node';
import { InternalServerError, UnauthorizedRequestError } from "../utils/errors";
import { ErrorRequestHandler } from "express";
import { InternalServerError } from "../utils/errors";
import { getLogger } from "../utils/logger";
import RequestError, { LogLevel } from "../utils/requestError";
import { NODE_ENV } from "../config";
import { TokenExpiredError } from 'jsonwebtoken';
import { getNodeEnv } from '../config';
export const requestErrorHandler: ErrorRequestHandler = (error: RequestError | Error, req, res, next) => {
if (res.headersSent) return next();
if (NODE_ENV !== "production") {
if (getNodeEnv() !== "production") {
/* eslint-disable no-console */
console.log(error)
/* eslint-enable no-console */

View File

@@ -1,12 +1,14 @@
import jwt from 'jsonwebtoken';
import { Request, Response, NextFunction } from 'express';
import { User, ServiceTokenData } from '../models';
import {
validateAuthMode,
getAuthUserPayload,
getAuthSTDPayload,
getAuthAPIKeyPayload
} from '../helpers/auth';
import {
UnauthorizedRequestError
} from '../utils/errors';
declare module 'jsonwebtoken' {
export interface UserIDJwtPayload extends jwt.JwtPayload {
@@ -25,9 +27,11 @@ declare module 'jsonwebtoken' {
* @returns
*/
const requireAuth = ({
acceptedAuthModes = ['jwt']
acceptedAuthModes = ['jwt'],
requiredServiceTokenPermissions = []
}: {
acceptedAuthModes: string[];
requiredServiceTokenPermissions?: string[];
}) => {
return async (req: Request, res: Response, next: NextFunction) => {
// validate auth token against accepted auth modes [acceptedAuthModes]
@@ -38,11 +42,22 @@ const requireAuth = ({
});
// attach auth payloads
let serviceTokenData: any;
switch (authTokenType) {
case 'serviceToken':
req.serviceTokenData = await getAuthSTDPayload({
serviceTokenData = await getAuthSTDPayload({
authTokenValue
});
requiredServiceTokenPermissions.forEach((requiredServiceTokenPermission) => {
if (!serviceTokenData.permissions.includes(requiredServiceTokenPermission)) {
return next(UnauthorizedRequestError({ message: 'Failed to authorize service token for endpoint' }));
}
});
req.serviceTokenData = serviceTokenData;
req.user = serviceTokenData?.user;
break;
case 'apiKey':
req.user = await getAuthAPIKeyPayload({

View File

@@ -1,8 +1,8 @@
import jwt from 'jsonwebtoken';
import { Request, Response, NextFunction } from 'express';
import { User } from '../models';
import { JWT_MFA_SECRET } from '../config';
import { BadRequestError, UnauthorizedRequestError } from '../utils/errors';
import { getJwtMfaSecret } from '../config';
declare module 'jsonwebtoken' {
export interface UserIDJwtPayload extends jwt.JwtPayload {
@@ -26,7 +26,7 @@ const requireMfaAuth = async (
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
const decodedToken = <jwt.UserIDJwtPayload>(
jwt.verify(AUTH_TOKEN_VALUE, JWT_MFA_SECRET)
jwt.verify(AUTH_TOKEN_VALUE, getJwtMfaSecret())
);
const user = await User.findOne({

View File

@@ -23,7 +23,7 @@ const requireSecretsAuth = ({
// case: validate 1 secret
secrets = await validateSecrets({
userId: req.user._id.toString(),
secretIds: req.body.secrets.id
secretIds: [req.body.secrets.id]
});
} else if (Array.isArray(req.body.secretIds)) {
secrets = await validateSecrets({

View File

@@ -1,8 +1,8 @@
import jwt from 'jsonwebtoken';
import { Request, Response, NextFunction } from 'express';
import { ServiceToken } from '../models';
import { JWT_SERVICE_SECRET } from '../config';
import { BadRequestError, UnauthorizedRequestError } from '../utils/errors';
import { getJwtServiceSecret } from '../config';
// TODO: deprecate
declare module 'jsonwebtoken' {
@@ -33,7 +33,7 @@ const requireServiceTokenAuth = async (
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
const decodedToken = <jwt.UserIDJwtPayload>(
jwt.verify(AUTH_TOKEN_VALUE, JWT_SERVICE_SECRET)
jwt.verify(AUTH_TOKEN_VALUE, getJwtServiceSecret())
);
const serviceToken = await ServiceToken.findOne({

View File

@@ -1,8 +1,8 @@
import jwt from 'jsonwebtoken';
import { Request, Response, NextFunction } from 'express';
import { User } from '../models';
import { JWT_SIGNUP_SECRET } from '../config';
import { BadRequestError, UnauthorizedRequestError } from '../utils/errors';
import { getJwtSignupSecret } from '../config';
declare module 'jsonwebtoken' {
export interface UserIDJwtPayload extends jwt.JwtPayload {
@@ -27,7 +27,7 @@ const requireSignupAuth = async (
if(AUTH_TOKEN_VALUE === null) return next(BadRequestError({message: 'Missing Authorization Body in the request header'}))
const decodedToken = <jwt.UserIDJwtPayload>(
jwt.verify(AUTH_TOKEN_VALUE, JWT_SIGNUP_SECRET)
jwt.verify(AUTH_TOKEN_VALUE, getJwtSignupSecret())
);
const user = await User.findOne({

View File

@@ -21,7 +21,7 @@ const requireWorkspaceAuth = ({
return async (req: Request, res: Response, next: NextFunction) => {
try {
const { workspaceId } = req[location];
if (req.user) {
// case: jwt auth
const membership = await validateMembership({
@@ -32,11 +32,11 @@ const requireWorkspaceAuth = ({
req.membership = membership;
}
if (
req.serviceTokenData
&& req.serviceTokenData.workspace !== workspaceId
&& req.serviceTokenData.environment !== req.query.environment
&& req.serviceTokenData.workspace.toString() !== workspaceId
&& req.serviceTokenData.environment !== req.body.environment
) {
next(UnauthorizedRequestError({message: 'Unable to authenticate workspace'}))
}

View File

@@ -7,9 +7,11 @@ import {
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
} from "../variables";
export interface IIntegration {
@@ -30,10 +32,12 @@ export interface IIntegration {
| 'heroku'
| 'vercel'
| 'netlify'
| 'github'
| 'github'
| 'gitlab'
| 'render'
| 'flyio'
| 'circleci';
| 'circleci'
| 'travisci';
integrationAuth: Types.ObjectId;
}
@@ -58,13 +62,11 @@ const integrationSchema = new Schema<IIntegration>(
default: null,
},
appId: {
// (new)
// id of app in provider
type: String,
default: null,
},
targetEnvironment: {
// (new)
// target environment
type: String,
default: null,
@@ -94,9 +96,11 @@ const integrationSchema = new Schema<IIntegration>(
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
],
required: true,
},

View File

@@ -7,23 +7,25 @@ import {
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
} from "../variables";
export interface IIntegrationAuth {
_id: Types.ObjectId;
workspace: Types.ObjectId;
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio' | 'azure-key-vault' | 'circleci' | 'aws-parameter-store' | 'aws-secret-manager';
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'gitlab' | 'render' | 'flyio' | 'azure-key-vault' | 'circleci' | 'travisci' | 'aws-parameter-store' | 'aws-secret-manager';
teamId: string;
accountId: string;
refreshCiphertext?: string;
refreshIV?: string;
refreshTag?: string;
accessIdCiphertext?: string; // new
accessIdIV?: string; // new
accessIdTag?: string; // new
accessIdCiphertext?: string;
accessIdIV?: string;
accessIdTag?: string;
accessCiphertext?: string;
accessIV?: string;
accessTag?: string;
@@ -47,9 +49,11 @@ const integrationAuthSchema = new Schema<IIntegrationAuth>(
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
],
required: true,
},

View File

@@ -3,13 +3,14 @@ import { Schema, model, Types } from 'mongoose';
export interface IServiceTokenData {
name: string;
workspace: Types.ObjectId;
environment: string; // TODO: adapt to upcoming environment id
environment: string;
user: Types.ObjectId;
expiresAt: Date;
secretHash: string;
encryptedKey: string;
iv: string;
tag: string;
permissions: string[];
}
const serviceTokenDataSchema = new Schema<IServiceTokenData>(
@@ -51,6 +52,11 @@ const serviceTokenDataSchema = new Schema<IServiceTokenData>(
tag: {
type: String,
select: false
},
permissions: {
type: [String],
enum: ['read', 'write'],
default: ['read']
}
},
{

View File

@@ -1,5 +1,4 @@
import { Schema, model } from 'mongoose';
import { EMAIL_TOKEN_LIFETIME } from '../config';
export interface IToken {
email: string;

View File

@@ -16,9 +16,9 @@ const tokenDataSchema = new Schema<ITokenData>({
type: {
type: String,
enum: [
'emailConfirmation',
'emailMfa',
'organizationInvitation',
'emailConfirmation',
'emailMfa',
'organizationInvitation',
'passwordReset'
],
required: true
@@ -50,12 +50,6 @@ const tokenDataSchema = new Schema<ITokenData>({
timestamps: true
});
tokenDataSchema.index({
expiresAt: 1
}, {
expireAfterSeconds: 0
});
const TokenData = model<ITokenData>('TokenData', tokenDataSchema);
export default TokenData;

View File

@@ -1,4 +1,5 @@
import express, { Request, Response } from 'express';
import { getSmtpConfigured } from '../../config';
const router = express.Router();
@@ -8,6 +9,7 @@ router.get(
res.status(200).json({
date: new Date(),
message: 'Ok',
emailConfigured: getSmtpConfigured()
})
}
);

View File

@@ -1,6 +1,6 @@
import express from 'express';
const router = express.Router();
import { body, param } from 'express-validator';
import { body, param, query } from 'express-validator';
import {
requireAuth,
requireWorkspaceAuth,
@@ -73,10 +73,24 @@ router.get(
acceptedRoles: [ADMIN, MEMBER]
}),
param('integrationAuthId'),
query('teamId'),
validateRequest,
integrationAuthController.getIntegrationAuthApps
);
router.get(
'/:integrationAuthId/teams',
requireAuth({
acceptedAuthModes: ['jwt']
}),
requireIntegrationAuthorizationAuth({
acceptedRoles: [ADMIN, MEMBER]
}),
param('integrationAuthId'),
validateRequest,
integrationAuthController.getIntegrationAuthTeams
);
router.delete(
'/:integrationAuthId',
requireAuth({

View File

@@ -22,7 +22,8 @@ import {
router.post(
'/batch',
requireAuth({
acceptedAuthModes: ['jwt', 'apiKey']
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken'],
requiredServiceTokenPermissions: ['read', 'write']
}),
requireWorkspaceAuth({
acceptedRoles: [ADMIN, MEMBER],
@@ -99,7 +100,8 @@ router.post(
}),
validateRequest,
requireAuth({
acceptedAuthModes: ['jwt', 'apiKey']
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken'],
requiredServiceTokenPermissions: ['write']
}),
requireWorkspaceAuth({
acceptedRoles: [ADMIN, MEMBER],
@@ -115,7 +117,8 @@ router.get(
query('tagSlugs'),
validateRequest,
requireAuth({
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken']
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken'],
requiredServiceTokenPermissions: ['read']
}),
requireWorkspaceAuth({
acceptedRoles: [ADMIN, MEMBER],
@@ -154,7 +157,8 @@ router.patch(
}),
validateRequest,
requireAuth({
acceptedAuthModes: ['jwt', 'apiKey']
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken'],
requiredServiceTokenPermissions: ['write']
}),
requireSecretsAuth({
acceptedRoles: [ADMIN, MEMBER]
@@ -182,7 +186,8 @@ router.delete(
.isEmpty(),
validateRequest,
requireAuth({
acceptedAuthModes: ['jwt', 'apiKey']
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken'],
requiredServiceTokenPermissions: ['write']
}),
requireSecretsAuth({
acceptedRoles: [ADMIN, MEMBER]
@@ -192,5 +197,3 @@ router.delete(
export default router;

View File

@@ -30,13 +30,22 @@ router.post(
acceptedRoles: [ADMIN, MEMBER],
location: 'body'
}),
body('name').exists().trim(),
body('workspaceId'),
body('environment'),
body('encryptedKey'),
body('iv'),
body('tag'),
body('expiresIn'), // measured in ms
body('name').exists().isString().trim(),
body('workspaceId').exists().isString().trim(),
body('environment').exists().isString().trim(),
body('encryptedKey').exists().isString().trim(),
body('iv').exists().isString().trim(),
body('tag').exists().isString().trim(),
body('expiresIn').exists().isNumeric(), // measured in ms
body('permissions').isArray({ min: 1 }).custom((value: string[]) => {
const allowedPermissions = ['read', 'write'];
const invalidValues = value.filter((v) => !allowedPermissions.includes(v));
if (invalidValues.length > 0) {
throw new Error(`permissions contains invalid values: ${invalidValues.join(', ')}`);
}
return true
}),
validateRequest,
serviceTokenDataController.createServiceTokenData
);

View File

@@ -118,7 +118,6 @@ router.delete( // TODO - rewire dashboard to this route
workspaceController.deleteWorkspaceMembership
);
router.patch(
'/:workspaceId/auto-capitalization',
requireAuth({

View File

@@ -1,16 +1,32 @@
import mongoose from 'mongoose';
import { getLogger } from '../utils/logger';
import { initDatabaseHelper } from '../helpers/database';
import {
initDatabaseHelper,
closeDatabaseHelper
} from '../helpers/database';
/**
* Class to handle database actions
*/
class DatabaseService {
/**
* Initialize database connection
* @param {Object} obj
* @param {String} obj.mongoURL - mongo connection string
* @returns
*/
static async initDatabase(MONGO_URL: string) {
return await initDatabaseHelper({
mongoURL: MONGO_URL
});
}
/**
* Close database conection
*/
static async closeDatabase() {
return await closeDatabaseHelper();
}
}
export default DatabaseService;

View File

@@ -1,5 +1,3 @@
import { Bot, IBot } from '../models';
import * as Sentry from '@sentry/node';
import { handleEventHelper } from '../helpers/event';
interface Event {

View File

@@ -7,9 +7,6 @@ import {
setIntegrationAuthAccessHelper,
} from '../helpers/integration';
// should sync stuff be here too? Probably.
// TODO: move bot functions to IntegrationService.
/**
* Class to handle integrations
*/

View File

@@ -1,27 +1,44 @@
import { PostHog } from 'posthog-node';
import {
NODE_ENV,
POSTHOG_HOST,
POSTHOG_PROJECT_API_KEY,
TELEMETRY_ENABLED
} from '../config';
import { getLogger } from '../utils/logger';
import {
getNodeEnv,
getTelemetryEnabled,
getPostHogProjectApiKey,
getPostHogHost
} from '../config';
if(!TELEMETRY_ENABLED){
getLogger("backend-main").info([
"",
"To improve, Infisical collects telemetry data about general usage.",
"This helps us understand how the product is doing and guide our product development to create the best possible platform; it also helps us demonstrate growth as we support Infisical as open-source software.",
"To opt into telemetry, you can set `TELEMETRY_ENABLED=true` within the environment variables.",
].join('\n'))
/**
* Logs telemetry enable/disable notice.
*/
const logTelemetryMessage = () => {
if(!getTelemetryEnabled()){
getLogger("backend-main").info([
"",
"To improve, Infisical collects telemetry data about general usage.",
"This helps us understand how the product is doing and guide our product development to create the best possible platform; it also helps us demonstrate growth as we support Infisical as open-source software.",
"To opt into telemetry, you can set `TELEMETRY_ENABLED=true` within the environment variables.",
].join('\n'))
}
}
let postHogClient: any;
if (NODE_ENV === 'production' && TELEMETRY_ENABLED) {
// case: enable opt-out telemetry in production
postHogClient = new PostHog(POSTHOG_PROJECT_API_KEY, {
host: POSTHOG_HOST
});
/**
* Return an instance of the PostHog client initialized.
* @returns
*/
const getPostHogClient = () => {
let postHogClient: any;
if (getNodeEnv() === 'production' && getTelemetryEnabled()) {
// case: enable opt-out telemetry in production
postHogClient = new PostHog(getPostHogProjectApiKey(), {
host: getPostHogHost()
});
}
return postHogClient;
}
export {
logTelemetryMessage,
getPostHogClient
}
export default postHogClient;

View File

@@ -1,13 +1,14 @@
import DatabaseService from './DatabaseService';
import postHogClient from './PostHogClient';
import { logTelemetryMessage, getPostHogClient } from './PostHogClient';
import BotService from './BotService';
import EventService from './EventService';
import IntegrationService from './IntegrationService';
import TokenService from './TokenService';
export {
logTelemetryMessage,
getPostHogClient,
DatabaseService,
postHogClient,
BotService,
EventService,
IntegrationService,

View File

@@ -1,55 +1,68 @@
import nodemailer from 'nodemailer';
import { SMTP_HOST, SMTP_PORT, SMTP_USERNAME, SMTP_PASSWORD, SMTP_SECURE } from '../config';
import {
SMTP_HOST_SENDGRID,
SMTP_HOST_MAILGUN,
SMTP_HOST_SOCKETLABS
SMTP_HOST_SOCKETLABS,
SMTP_HOST_ZOHOMAIL
} from '../variables';
import SMTPConnection from 'nodemailer/lib/smtp-connection';
import * as Sentry from '@sentry/node';
import {
getSmtpHost,
getSmtpUsername,
getSmtpPassword,
getSmtpSecure,
getSmtpPort
} from '../config';
const mailOpts: SMTPConnection.Options = {
host: SMTP_HOST,
port: SMTP_PORT as number
};
if (SMTP_USERNAME && SMTP_PASSWORD) {
mailOpts.auth = {
user: SMTP_USERNAME,
pass: SMTP_PASSWORD
export const initSmtp = () => {
const mailOpts: SMTPConnection.Options = {
host: getSmtpHost(),
port: getSmtpPort()
};
}
if (SMTP_SECURE) {
switch (SMTP_HOST) {
case SMTP_HOST_SENDGRID:
mailOpts.requireTLS = true;
break;
case SMTP_HOST_MAILGUN:
mailOpts.requireTLS = true;
mailOpts.tls = {
ciphers: 'TLSv1.2'
}
break;
case SMTP_HOST_SOCKETLABS:
mailOpts.requireTLS = true;
mailOpts.tls = {
ciphers: 'TLSv1.2'
}
break;
default:
if (SMTP_HOST.includes('amazonaws.com')) {
if (getSmtpUsername() && getSmtpPassword()) {
mailOpts.auth = {
user: getSmtpUsername(),
pass: getSmtpPassword()
};
}
if (getSmtpSecure() ? getSmtpSecure() : false) {
switch (getSmtpHost()) {
case SMTP_HOST_SENDGRID:
mailOpts.requireTLS = true;
break;
case SMTP_HOST_MAILGUN:
mailOpts.requireTLS = true;
mailOpts.tls = {
ciphers: 'TLSv1.2'
}
} else {
mailOpts.secure = true;
}
break;
break;
case SMTP_HOST_SOCKETLABS:
mailOpts.requireTLS = true;
mailOpts.tls = {
ciphers: 'TLSv1.2'
}
break;
case SMTP_HOST_ZOHOMAIL:
mailOpts.requireTLS = true;
mailOpts.tls = {
ciphers: 'TLSv1.2'
}
break;
default:
if (getSmtpHost().includes('amazonaws.com')) {
mailOpts.tls = {
ciphers: 'TLSv1.2'
}
} else {
mailOpts.secure = true;
}
break;
}
}
}
export const initSmtp = () => {
const transporter = nodemailer.createTransport(mailOpts);
transporter
.verify()
@@ -60,7 +73,7 @@ export const initSmtp = () => {
.catch((err) => {
Sentry.setUser(null);
Sentry.captureException(
`SMTP - Failed to connect to ${SMTP_HOST}:${SMTP_PORT} \n\t${err}`
`SMTP - Failed to connect to ${getSmtpHost()}:${getSmtpPort()} \n\t${err}`
);
});

View File

@@ -4,12 +4,12 @@
*
************************************************************************************************/
import { NODE_ENV } from "../config"
import { Key, Membership, MembershipOrg, Organization, User, Workspace } from "../models";
import { Types } from 'mongoose';
import { getNodeEnv } from '../config';
export const createTestUserForDevelopment = async () => {
if (NODE_ENV === "development") {
if (getNodeEnv() === "development") {
const testUserEmail = "test@localhost.local"
const testUserPassword = "testInfisical1"
const testUserId = "63cefa6ec8d3175601cfa980"

View File

@@ -1,7 +1,7 @@
/* eslint-disable no-console */
import { createLogger, format, transports } from 'winston';
import LokiTransport from 'winston-loki';
import { LOKI_HOST, NODE_ENV } from '../config';
import { getLokiHost, getNodeEnv } from '../config';
const { combine, colorize, label, printf, splat, timestamp } = format;
@@ -25,10 +25,10 @@ const createLoggerWithLabel = (level: string, label: string) => {
})
]
//* Add LokiTransport if it's enabled
if(LOKI_HOST !== undefined){
if(getLokiHost() !== undefined){
_transports.push(
new LokiTransport({
host: LOKI_HOST,
host: getLokiHost(),
handleExceptions: true,
handleRejections: true,
batching: true,
@@ -37,7 +37,11 @@ const createLoggerWithLabel = (level: string, label: string) => {
format: format.combine(
format.json()
),
labels: {app: process.env.npm_package_name, version: process.env.npm_package_version, environment: NODE_ENV},
labels: {
app: process.env.npm_package_name,
version: process.env.npm_package_version,
environment: getNodeEnv()
},
onConnectionError: (err: Error)=> console.error('Connection error while connecting to Loki Server.\n', err)
})
)

View File

@@ -1,5 +1,5 @@
import { Request } from 'express'
import { VERBOSE_ERROR_OUTPUT } from '../config'
import { getVerboseErrorOutput } from '../config';
export enum LogLevel {
DEBUG = 100,
@@ -87,7 +87,7 @@ export default class RequestError extends Error{
}, this.context)
//* Omit sensitive information from context that can leak internal workings of this program if user is not developer
if(!VERBOSE_ERROR_OUTPUT){
if(!getVerboseErrorOutput()){
_context = this._omit(_context, [
'stacktrace',
'exception',

View File

@@ -13,9 +13,11 @@ import {
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
INTEGRATION_SET,
INTEGRATION_OAUTH2,
INTEGRATION_AZURE_TOKEN_URL,
@@ -23,13 +25,16 @@ import {
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
INTEGRATION_GITHUB_TOKEN_URL,
INTEGRATION_GITLAB_TOKEN_URL,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_GITLAB_API_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
INTEGRATION_OPTIONS,
INTEGRATION_TRAVISCI_API_URL,
getIntegrationOptions
} from "./integration";
import { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED } from "./organization";
import { SECRET_SHARED, SECRET_PERSONAL } from "./secret";
@@ -45,7 +50,8 @@ import {
import {
SMTP_HOST_SENDGRID,
SMTP_HOST_MAILGUN,
SMTP_HOST_SOCKETLABS
SMTP_HOST_SOCKETLABS,
SMTP_HOST_ZOHOMAIL
} from './smtp';
import { PLAN_STARTER, PLAN_PRO } from './stripe';
import {
@@ -78,9 +84,11 @@ export {
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
INTEGRATION_SET,
INTEGRATION_OAUTH2,
INTEGRATION_AZURE_TOKEN_URL,
@@ -88,12 +96,15 @@ export {
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
INTEGRATION_GITHUB_TOKEN_URL,
INTEGRATION_GITLAB_TOKEN_URL,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_GITLAB_API_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
INTEGRATION_TRAVISCI_API_URL,
EVENT_PUSH_SECRETS,
EVENT_PULL_SECRETS,
ACTION_LOGIN,
@@ -102,10 +113,11 @@ export {
ACTION_UPDATE_SECRETS,
ACTION_DELETE_SECRETS,
ACTION_READ_SECRETS,
INTEGRATION_OPTIONS,
getIntegrationOptions,
SMTP_HOST_SENDGRID,
SMTP_HOST_MAILGUN,
SMTP_HOST_SOCKETLABS,
SMTP_HOST_ZOHOMAIL,
PLAN_STARTER,
PLAN_PRO,
MFA_METHOD_EMAIL,

View File

@@ -1,13 +1,11 @@
import {
CLIENT_ID_AZURE,
TENANT_ID_AZURE
getClientIdHeroku,
getClientSlugVercel,
getClientIdNetlify,
getClientIdAzure,
getClientIdGitLab,
getClientIdGitHub
} from '../config';
import {
CLIENT_ID_HEROKU,
CLIENT_ID_NETLIFY,
CLIENT_ID_GITHUB,
CLIENT_SLUG_VERCEL,
} from "../config";
// integrations
const INTEGRATION_AZURE_KEY_VAULT = 'azure-key-vault';
@@ -17,176 +15,201 @@ const INTEGRATION_HEROKU = "heroku";
const INTEGRATION_VERCEL = "vercel";
const INTEGRATION_NETLIFY = "netlify";
const INTEGRATION_GITHUB = "github";
const INTEGRATION_GITLAB = "gitlab";
const INTEGRATION_RENDER = "render";
const INTEGRATION_FLYIO = "flyio";
const INTEGRATION_CIRCLECI = "circleci";
const INTEGRATION_TRAVISCI = "travisci";
const INTEGRATION_SET = new Set([
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
]);
// integration types
const INTEGRATION_OAUTH2 = "oauth2";
// integration oauth endpoints
const INTEGRATION_AZURE_TOKEN_URL = `https://login.microsoftonline.com/${TENANT_ID_AZURE}/oauth2/v2.0/token`;
const INTEGRATION_AZURE_TOKEN_URL = `https://login.microsoftonline.com/common/oauth2/v2.0/token`;
const INTEGRATION_HEROKU_TOKEN_URL = 'https://id.heroku.com/oauth/token';
const INTEGRATION_VERCEL_TOKEN_URL =
"https://api.vercel.com/v2/oauth/access_token";
const INTEGRATION_NETLIFY_TOKEN_URL = "https://api.netlify.com/oauth/token";
const INTEGRATION_GITHUB_TOKEN_URL =
"https://github.com/login/oauth/access_token";
const INTEGRATION_GITLAB_TOKEN_URL = "https://gitlab.com/oauth/token";
// integration apps endpoints
const INTEGRATION_HEROKU_API_URL = "https://api.heroku.com";
const INTEGRATION_GITLAB_API_URL = "https://gitlab.com/api";
const INTEGRATION_VERCEL_API_URL = "https://api.vercel.com";
const INTEGRATION_NETLIFY_API_URL = "https://api.netlify.com";
const INTEGRATION_RENDER_API_URL = "https://api.render.com";
const INTEGRATION_FLYIO_API_URL = "https://api.fly.io/graphql";
const INTEGRATION_CIRCLECI_API_URL = "https://circleci.com/api";
const INTEGRATION_TRAVISCI_API_URL = "https://api.travis-ci.com";
const getIntegrationOptions = () => {
const INTEGRATION_OPTIONS = [
{
name: 'Heroku',
slug: 'heroku',
image: 'Heroku.png',
isAvailable: true,
type: 'oauth',
clientId: getClientIdHeroku(),
docsLink: ''
},
{
name: 'Vercel',
slug: 'vercel',
image: 'Vercel.png',
isAvailable: true,
type: 'oauth',
clientId: '',
clientSlug: getClientSlugVercel(),
docsLink: ''
},
{
name: 'Netlify',
slug: 'netlify',
image: 'Netlify.png',
isAvailable: true,
type: 'oauth',
clientId: getClientIdNetlify(),
docsLink: ''
},
{
name: 'GitHub',
slug: 'github',
image: 'GitHub.png',
isAvailable: true,
type: 'oauth',
clientId: getClientIdGitHub(),
docsLink: ''
},
{
name: 'Render',
slug: 'render',
image: 'Render.png',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'Fly.io',
slug: 'flyio',
image: 'Flyio.svg',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'AWS Parameter Store',
slug: 'aws-parameter-store',
image: 'Amazon Web Services.png',
isAvailable: true,
type: 'custom',
clientId: '',
docsLink: ''
},
{
name: 'AWS Secret Manager',
slug: 'aws-secret-manager',
image: 'Amazon Web Services.png',
isAvailable: true,
type: 'custom',
clientId: '',
docsLink: ''
},
{
name: 'Azure Key Vault',
slug: 'azure-key-vault',
image: 'Microsoft Azure.png',
isAvailable: true,
type: 'oauth',
clientId: getClientIdAzure(),
docsLink: ''
},
{
name: 'Circle CI',
slug: 'circleci',
image: 'Circle CI.png',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'GitLab',
slug: 'gitlab',
image: 'GitLab.png',
isAvailable: true,
type: 'custom',
clientId: getClientIdGitLab(),
docsLink: ''
},
{
name: 'Travis CI',
slug: 'travisci',
image: 'Travis CI.png',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'Google Cloud Platform',
slug: 'gcp',
image: 'Google Cloud Platform.png',
isAvailable: false,
type: '',
clientId: '',
docsLink: ''
}
]
return INTEGRATION_OPTIONS;
}
const INTEGRATION_OPTIONS = [
{
name: 'Heroku',
slug: 'heroku',
image: 'Heroku.png',
isAvailable: true,
type: 'oauth',
clientId: CLIENT_ID_HEROKU,
docsLink: ''
},
{
name: 'Vercel',
slug: 'vercel',
image: 'Vercel.png',
isAvailable: true,
type: 'oauth',
clientId: '',
clientSlug: CLIENT_SLUG_VERCEL,
docsLink: ''
},
{
name: 'Netlify',
slug: 'netlify',
image: 'Netlify.png',
isAvailable: true,
type: 'oauth',
clientId: CLIENT_ID_NETLIFY,
docsLink: ''
},
{
name: 'GitHub',
slug: 'github',
image: 'GitHub.png',
isAvailable: true,
type: 'oauth',
clientId: CLIENT_ID_GITHUB,
docsLink: ''
},
{
name: 'Render',
slug: 'render',
image: 'Render.png',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'Fly.io',
slug: 'flyio',
image: 'Flyio.svg',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'AWS Parameter Store',
slug: 'aws-parameter-store',
image: 'Amazon Web Services.png',
isAvailable: true,
type: 'custom',
clientId: '',
docsLink: ''
},
{
name: 'AWS Secret Manager',
slug: 'aws-secret-manager',
image: 'Amazon Web Services.png',
isAvailable: true,
type: 'custom',
clientId: '',
docsLink: ''
},
{
name: 'Circle CI',
slug: 'circleci',
image: 'Circle CI.png',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'Azure Key Vault',
slug: 'azure-key-vault',
image: 'Microsoft Azure.png',
isAvailable: false,
type: 'oauth',
clientId: CLIENT_ID_AZURE,
tenantId: TENANT_ID_AZURE,
docsLink: ''
},
{
name: 'Google Cloud Platform',
slug: 'gcp',
image: 'Google Cloud Platform.png',
isAvailable: false,
type: '',
clientId: '',
docsLink: ''
},
{
name: 'Travis CI',
slug: 'travisci',
image: 'Travis CI.png',
isAvailable: false,
type: '',
clientId: '',
docsLink: ''
}
]
export {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_SET,
INTEGRATION_OAUTH2,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_GITLAB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_TRAVISCI,
INTEGRATION_SET,
INTEGRATION_OAUTH2,
INTEGRATION_AZURE_TOKEN_URL,
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
INTEGRATION_GITHUB_TOKEN_URL,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
INTEGRATION_OPTIONS,
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
INTEGRATION_GITHUB_TOKEN_URL,
INTEGRATION_GITLAB_API_URL,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_GITLAB_TOKEN_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
INTEGRATION_TRAVISCI_API_URL,
getIntegrationOptions
};

View File

@@ -1,9 +1,11 @@
const SMTP_HOST_SENDGRID = 'smtp.sendgrid.net';
const SMTP_HOST_MAILGUN = 'smtp.mailgun.org';
const SMTP_HOST_SOCKETLABS = 'smtp.socketlabs.com';
const SMTP_HOST_ZOHOMAIL = 'smtp.zoho.com';
export {
SMTP_HOST_SENDGRID,
SMTP_HOST_MAILGUN,
SMTP_HOST_SOCKETLABS
SMTP_HOST_SOCKETLABS,
SMTP_HOST_ZOHOMAIL
}

View File

@@ -197,6 +197,23 @@ const generateOpenAPISpec = async () => {
secretValueCiphertext: '',
secretValueIV: '',
secretValueTag: '',
},
ServiceTokenData: {
_id: '',
name: '',
workspace: '',
environment: '',
user: {
_id: '',
firstName: '',
lastName: ''
},
expiresAt: '2023-01-13T14:16:12.210Z',
encryptedKey: '',
iv: '',
tag: '',
updatedAt: '2023-01-13T14:16:12.210Z',
createdAt: '2023-01-13T14:16:12.210Z'
}
}
};

View File

@@ -0,0 +1,5 @@
import { it, expect } from '@jest/globals';
it('should return true', () => {
expect(true).toBeTruthy();
});

View File

@@ -0,0 +1,21 @@
import { Server } from 'http';
import main from '../src';
import { describe, expect, it, beforeAll, afterAll } from '@jest/globals';
import request from 'supertest';
let server: Server;
beforeAll(async () => {
server = await main;
});
afterAll(async () => {
server.close();
});
describe('Healthcheck endpoint', () => {
it('GET /healthcheck should return OK', async () => {
const res = await request(server).get('/healthcheck');
expect(res.status).toEqual(200);
});
});

View File

@@ -7,7 +7,7 @@ require (
github.com/muesli/mango-cobra v1.2.0
github.com/muesli/roff v0.1.0
github.com/spf13/cobra v1.6.1
golang.org/x/crypto v0.6.0
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d
golang.org/x/term v0.5.0
)
@@ -31,7 +31,7 @@ require (
github.com/oklog/ulid v1.3.1 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
go.mongodb.org/mongo-driver v1.10.0 // indirect
golang.org/x/net v0.6.0 // indirect
golang.org/x/net v0.7.0 // indirect
golang.org/x/sys v0.5.0 // indirect
)

View File

@@ -103,17 +103,13 @@ github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgk
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
go.mongodb.org/mongo-driver v1.10.0 h1:UtV6N5k14upNp4LTduX0QCufG124fSu25Wz9tu94GLg=
go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A=
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw9KJexJuRLI9G7Hp5wE=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.2.0 h1:sZfSu1wtKLGlWI4ZZayP0ck9Y73K1ynO6gqzTdBVdPU=
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
golang.org/x/net v0.6.0 h1:L4ZwwTvKW9gr0ZMS1yrHD9GZhIuVjOBBnaKH+SPQK0Q=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -125,13 +121,9 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210819135213-f52c844e1c1c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
golang.org/x/term v0.5.0 h1:n2a8QNdAb0sZNpU9R1ALUXBbY+w51fCQDN+7EdxNBsY=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=

View File

@@ -0,0 +1,49 @@
package cmd
import (
"testing"
"github.com/Infisical/infisical-merge/packages/models"
)
func TestFilterReservedEnvVars(t *testing.T) {
// some test env vars.
// HOME and PATH are reserved key words and should be filtered out
// XDG_SESSION_ID and LC_CTYPE are reserved key word prefixes and should be filtered out
// The filter function only checks the keys of the env map, so we dont need to set any values
env := map[string]models.SingleEnvironmentVariable{
"test": {},
"test2": {},
"HOME": {},
"PATH": {},
"XDG_SESSION_ID": {},
"LC_CTYPE": {},
}
// check to see if there are any reserved key words in secrets to inject
filterReservedEnvVars(env)
if len(env) != 2 {
t.Errorf("Expected 2 secrets to be returned, got %d", len(env))
}
if _, ok := env["test"]; !ok {
t.Errorf("Expected test to be returned")
}
if _, ok := env["test2"]; !ok {
t.Errorf("Expected test2 to be returned")
}
if _, ok := env["HOME"]; ok {
t.Errorf("Expected HOME to be filtered out")
}
if _, ok := env["PATH"]; ok {
t.Errorf("Expected PATH to be filtered out")
}
if _, ok := env["XDG_SESSION_ID"]; ok {
t.Errorf("Expected XDG_SESSION_ID to be filtered out")
}
if _, ok := env["LC_CTYPE"]; ok {
t.Errorf("Expected LC_CTYPE to be filtered out")
}
}

View File

@@ -38,7 +38,7 @@ var exportCmd = &cobra.Command{
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
@@ -49,6 +49,11 @@ var exportCmd = &cobra.Command{
util.HandleError(err)
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err)
}
format, err := cmd.Flags().GetString("format")
if err != nil {
util.HandleError(err)
@@ -69,7 +74,7 @@ var exportCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, WorkspaceId: projectId})
if err != nil {
util.HandleError(err, "Unable to fetch secrets")
}
@@ -106,6 +111,7 @@ func init() {
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
exportCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets from")
}
// Format according to the format flag

View File

@@ -6,7 +6,6 @@ package cmd
import (
"encoding/json"
"fmt"
"os"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/models"
@@ -91,12 +90,12 @@ func writeWorkspaceFile(selectedWorkspace models.Workspace) error {
WorkspaceId: selectedWorkspace.ID,
}
marshalledWorkspaceFile, err := json.Marshal(workspaceFileToSave)
marshalledWorkspaceFile, err := json.MarshalIndent(workspaceFileToSave, "", " ")
if err != nil {
return err
}
err = util.WriteToFile(util.INFISICAL_WORKSPACE_CONFIG_FILE_NAME, marshalledWorkspaceFile, os.ModePerm)
err = util.WriteToFile(util.INFISICAL_WORKSPACE_CONFIG_FILE_NAME, marshalledWorkspaceFile, 0600)
if err != nil {
return err
}

View File

@@ -41,13 +41,14 @@ var loginCmd = &cobra.Command{
PreRun: toggleDebug,
Run: func(cmd *cobra.Command, args []string) {
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
if err != nil && (strings.Contains(err.Error(), "The specified item could not be found in the keyring") || strings.Contains(err.Error(), "unable to get key from Keyring")) { // if the key can't be found allow them to override
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
if err != nil && (strings.Contains(err.Error(), "The specified item could not be found in the keyring") || strings.Contains(err.Error(), "unable to get key from Keyring") || strings.Contains(err.Error(), "GetUserCredsFromKeyRing")) {
log.Debug(err)
} else if err != nil {
util.HandleError(err)
}
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired { // if you are logged in but not expired
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired && len(currentLoggedInUserDetails.UserCredentials.PrivateKey) != 0 {
shouldOverride, err := shouldOverrideLoginPrompt(currentLoggedInUserDetails.UserCredentials.Email)
if err != nil {
util.HandleError(err)
@@ -120,6 +121,7 @@ var loginCmd = &cobra.Command{
var decryptedPrivateKey []byte
if loginTwoResponse.EncryptionVersion == 1 {
log.Debug("Login version 1")
encryptedPrivateKey, _ := base64.StdEncoding.DecodeString(loginTwoResponse.EncryptedPrivateKey)
tag, err := base64.StdEncoding.DecodeString(loginTwoResponse.Tag)
if err != nil {
@@ -134,11 +136,15 @@ var loginCmd = &cobra.Command{
paddedPassword := fmt.Sprintf("%032s", password)
key := []byte(paddedPassword)
decryptedPrivateKey, err := crypto.DecryptSymmetric(key, encryptedPrivateKey, tag, IV)
if err != nil || len(decryptedPrivateKey) == 0 {
computedDecryptedPrivateKey, err := crypto.DecryptSymmetric(key, encryptedPrivateKey, tag, IV)
if err != nil || len(computedDecryptedPrivateKey) == 0 {
util.HandleError(err)
}
decryptedPrivateKey = computedDecryptedPrivateKey
} else if loginTwoResponse.EncryptionVersion == 2 {
log.Debug("Login version 2")
protectedKey, err := base64.StdEncoding.DecodeString(loginTwoResponse.ProtectedKey)
if err != nil {
util.HandleError(err)
@@ -191,14 +197,21 @@ var loginCmd = &cobra.Command{
util.HandleError(err)
}
decryptedPrivateKey, err = crypto.DecryptSymmetric(decryptedProtectedKeyInHex, encryptedPrivateKey, nonProtectedTag, nonProtectedIv)
computedDecryptedPrivateKey, err := crypto.DecryptSymmetric(decryptedProtectedKeyInHex, encryptedPrivateKey, nonProtectedTag, nonProtectedIv)
if err != nil {
util.HandleError(err)
}
decryptedPrivateKey = computedDecryptedPrivateKey
} else {
util.PrintErrorMessageAndExit("Insufficient details to decrypt private key")
}
if string(decryptedPrivateKey) == "" || email == "" || loginTwoResponse.Token == "" {
log.Debugf("[decryptedPrivateKey=%s] [email=%s] [loginTwoResponse.Token=%s]", string(decryptedPrivateKey), email, loginTwoResponse.Token)
util.PrintErrorMessageAndExit("We were unable to fetch required details to complete your login. Run with -d to see more info")
}
userCredentialsToBeStored := &models.UserCredentials{
Email: email,
PrivateKey: string(decryptedPrivateKey),
@@ -222,8 +235,16 @@ var loginCmd = &cobra.Command{
// clear backed up secrets from prev account
util.DeleteBackupSecrets()
color.Green("Nice! You are logged in as: %v", email)
whilte := color.New(color.FgGreen)
boldWhite := whilte.Add(color.Bold)
boldWhite.Printf(">>>> Welcome to Infisical!")
boldWhite.Printf(" You are now logged in as %v <<<< \n", email)
plainBold := color.New(color.Bold)
plainBold.Println("\nQuick links")
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
},
}

View File

@@ -36,6 +36,9 @@ var resetCmd = &cobra.Command{
keyringInstance.Remove(util.KEYRING_SERVICE_NAME)
// delete secrets backup
util.DeleteBackupSecrets()
util.PrintSuccessMessage("Reset successful")
},
}

View File

@@ -56,7 +56,7 @@ var runCmd = &cobra.Command{
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
@@ -110,13 +110,7 @@ var runCmd = &cobra.Command{
}
// check to see if there are any reserved key words in secrets to inject
reservedEnvironmentVariables := []string{"HOME", "PATH", "PS1", "PS2"}
for _, reservedEnvName := range reservedEnvironmentVariables {
if _, ok := secretsByKey[reservedEnvName]; ok {
delete(secretsByKey, reservedEnvName)
util.PrintWarning(fmt.Sprintf("Infisical secret named [%v] has been removed because it is a reserved secret name", reservedEnvName))
}
}
filterReservedEnvVars(secretsByKey)
// now add infisical secrets
for k, v := range secretsByKey {
@@ -149,6 +143,37 @@ var runCmd = &cobra.Command{
},
}
var (
reservedEnvVars = []string{
"HOME", "PATH", "PS1", "PS2",
"PWD", "EDITOR", "XAUTHORITY", "USER",
"TERM", "TERMINFO", "SHELL", "MAIL",
}
reservedEnvVarPrefixes = []string{
"XDG_",
"LC_",
}
)
func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
for _, reservedEnvName := range reservedEnvVars {
if _, ok := env[reservedEnvName]; ok {
delete(env, reservedEnvName)
util.PrintWarning(fmt.Sprintf("Infisical secret named [%v] has been removed because it is a reserved secret name", reservedEnvName))
}
}
for _, reservedEnvPrefix := range reservedEnvVarPrefixes {
for envName := range env {
if strings.HasPrefix(envName, reservedEnvPrefix) {
delete(env, envName)
util.PrintWarning(fmt.Sprintf("Infisical secret named [%v] has been removed because it contains a reserved prefix", envName))
}
}
}
}
func init() {
rootCmd.AddCommand(runCmd)
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")

View File

@@ -19,6 +19,7 @@ import (
"github.com/Infisical/infisical-merge/packages/util"
"github.com/Infisical/infisical-merge/packages/visualize"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
@@ -32,7 +33,7 @@ var secretsCmd = &cobra.Command{
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
@@ -98,7 +99,7 @@ var secretsSetCmd = &cobra.Command{
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
@@ -132,6 +133,11 @@ var secretsSetCmd = &cobra.Command{
encryptedWorkspaceKeyNonce, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce)
currentUsersPrivateKey, _ := base64.StdEncoding.DecodeString(loggedInUserDetails.UserCredentials.PrivateKey)
if len(currentUsersPrivateKey) == 0 || len(encryptedWorkspaceKeySenderPublicKey) == 0 {
log.Debugf("Missing credentials for generating plainTextEncryptionKey: [currentUsersPrivateKey=%s] [encryptedWorkspaceKeySenderPublicKey=%s]", currentUsersPrivateKey, encryptedWorkspaceKeySenderPublicKey)
util.PrintErrorMessageAndExit("Some required user credentials are missing to generate your [plainTextEncryptionKey]. Please run [infisical login] then try again")
}
// decrypt workspace key
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
@@ -277,7 +283,7 @@ var secretsDeleteCmd = &cobra.Command{
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
@@ -338,7 +344,7 @@ var secretsDeleteCmd = &cobra.Command{
func getSecretsByNames(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
@@ -361,10 +367,7 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
requestedSecrets := []models.SingleEnvironmentVariable{}
secretsMap := make(map[string]models.SingleEnvironmentVariable)
for _, secret := range secrets {
secretsMap[secret.Key] = secret
}
secretsMap := getSecretsByKeys(secrets)
for _, secretKeyFromArg := range args {
if value, ok := secretsMap[strings.ToUpper(secretKeyFromArg)]; ok {
@@ -384,7 +387,7 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
func generateExampleEnv(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvelopmentBasedOnGitBranch()
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
@@ -587,7 +590,7 @@ func addHash(input string) string {
}
func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]models.SingleEnvironmentVariable {
secretMapByName := make(map[string]models.SingleEnvironmentVariable)
secretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
for _, secret := range secrets {
secretMapByName[secret.Key] = secret

View File

@@ -55,4 +55,5 @@ type GetAllSecretsParameters struct {
EnvironmentPassedViaFlag bool
InfisicalToken string
TagSlugs string
WorkspaceId string
}

View File

@@ -4,18 +4,47 @@ import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
log "github.com/sirupsen/logrus"
"io"
"net/http"
"os"
"os/exec"
"runtime"
"github.com/fatih/color"
)
func CheckForUpdate() {
if checkEnv := os.Getenv("INFISICAL_DISABLE_UPDATE_CHECK"); checkEnv != "" {
return
}
latestVersion, err := getLatestTag("Infisical", "infisical")
if err != nil {
log.Debug(err)
// do nothing and continue
return
}
if latestVersion != CLI_VERSION {
PrintWarning(fmt.Sprintf("Please update your CLI. You are running version %s but the latest version is %s", CLI_VERSION, latestVersion))
yellow := color.New(color.FgYellow).SprintFunc()
blue := color.New(color.FgCyan).SprintFunc()
black := color.New(color.FgBlack).SprintFunc()
msg := fmt.Sprintf("%s %s %s %s",
yellow("A new release of infisical is available:"),
blue(CLI_VERSION),
black("->"),
blue(latestVersion),
)
fmt.Fprintln(os.Stderr, msg)
updateInstructions := GetUpdateInstructions()
if updateInstructions != "" {
msg = fmt.Sprintf("\n%s\n", GetUpdateInstructions())
fmt.Fprintln(os.Stderr, msg)
}
}
}
@@ -26,12 +55,12 @@ func getLatestTag(repoOwner string, repoName string) (string, error) {
return "", err
}
if resp.StatusCode != 200 {
return "", errors.New(fmt.Sprintf("GitHub API returned status code %d", resp.StatusCode))
return "", errors.New(fmt.Sprintf("gitHub API returned status code %d", resp.StatusCode))
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
@@ -40,7 +69,59 @@ func getLatestTag(repoOwner string, repoName string) (string, error) {
Name string `json:"name"`
}
json.Unmarshal(body, &tags)
if err := json.Unmarshal(body, &tags); err != nil {
return "", fmt.Errorf("failed to unmarshal github response: %w", err)
}
return tags[0].Name[1:], nil
}
func GetUpdateInstructions() string {
os := runtime.GOOS
switch os {
case "darwin":
return "To update, run: brew update && brew upgrade infisical"
case "windows":
return "To update, run: scoop update infisical"
case "linux":
pkgManager := getLinuxPackageManager()
switch pkgManager {
case "apt-get":
return "To update, run: sudo apt-get update && sudo apt-get install infisical"
case "yum":
return "To update, run: sudo yum update infisical"
case "apk":
return "To update, run: sudo apk update && sudo apk upgrade infisical"
case "yay":
return "To update, run: yay -Syu infisical"
default:
return ""
}
default:
return ""
}
}
func getLinuxPackageManager() string {
cmd := exec.Command("apt-get", "--version")
if err := cmd.Run(); err == nil {
return "apt-get"
}
cmd = exec.Command("yum", "--version")
if err := cmd.Run(); err == nil {
return "yum"
}
cmd = exec.Command("yay", "--version")
if err := cmd.Run(); err == nil {
return "yay"
}
cmd = exec.Command("apk", "--version")
if err := cmd.Run(); err == nil {
return "apk"
}
return ""
}

View File

@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"os"
"path/filepath"
"github.com/Infisical/infisical-merge/packages/models"
log "github.com/sirupsen/logrus"
@@ -41,7 +42,7 @@ func WriteInitalConfig(userCredentials *models.UserCredentials) error {
}
// Create file in directory
err = WriteToFile(fullConfigFilePath, configFileMarshalled, os.ModePerm)
err = WriteToFile(fullConfigFilePath, configFileMarshalled, 0600)
if err != nil {
return err
}
@@ -73,7 +74,12 @@ func WorkspaceConfigFileExistsInCurrentPath() bool {
}
func GetWorkSpaceFromFile() (models.WorkspaceConfigFile, error) {
configFileAsBytes, err := os.ReadFile(INFISICAL_WORKSPACE_CONFIG_FILE_NAME)
cfgFile, err := FindWorkspaceConfigFile()
if err != nil {
return models.WorkspaceConfigFile{}, err
}
configFileAsBytes, err := os.ReadFile(cfgFile)
if err != nil {
return models.WorkspaceConfigFile{}, err
}
@@ -87,6 +93,37 @@ func GetWorkSpaceFromFile() (models.WorkspaceConfigFile, error) {
return workspaceConfigFile, nil
}
// FindWorkspaceConfigFile searches for a .infisical.json file in the current directory and all parent directories.
func FindWorkspaceConfigFile() (string, error) {
dir, err := os.Getwd()
if err != nil {
return "", err
}
for {
path := filepath.Join(dir, INFISICAL_WORKSPACE_CONFIG_FILE_NAME)
_, err := os.Stat(path)
if err == nil {
// file found
log.Debugf("FindWorkspaceConfigFile: workspace file found at [path=%s]", path)
return path, nil
}
// check if we have reached the root directory
if dir == filepath.Dir(dir) {
break
}
// move up one directory
dir = filepath.Dir(dir)
}
// file not found
return "", fmt.Errorf("file not found: %s", INFISICAL_WORKSPACE_CONFIG_FILE_NAME)
}
func GetFullConfigFilePath() (fullPathToFile string, fullPathToDirectory string, err error) {
homeDir, err := GetHomeDir()
if err != nil {
@@ -114,52 +151,6 @@ func GetWorkspaceConfigByPath(path string) (workspaceConfig models.WorkspaceConf
return workspaceConfigFile, nil
}
// Will get the list of .infisical.json files that are located
// within the root of each sub folder from where the CLI is ran from
func GetAllWorkSpaceConfigsStartingFromCurrentPath() (workspaces []models.WorkspaceConfigFile, err error) {
currentDir, err := os.Getwd()
if err != nil {
return nil, fmt.Errorf("GetAllProjectConfigs: unable to get the current directory because [%s]", err)
}
files, err := os.ReadDir(currentDir)
if err != nil {
return nil, fmt.Errorf("GetAllProjectConfigs: unable to read the contents of the current directory because [%s]", err)
}
listOfWorkSpaceConfigs := []models.WorkspaceConfigFile{}
for _, file := range files {
if !file.IsDir() && file.Name() == INFISICAL_WORKSPACE_CONFIG_FILE_NAME {
pathToWorkspaceConfigFile := currentDir + "/" + INFISICAL_WORKSPACE_CONFIG_FILE_NAME
workspaceConfig, err := GetWorkspaceConfigByPath(pathToWorkspaceConfigFile)
if err != nil {
return nil, fmt.Errorf("GetAllProjectConfigs: Unable to get config file because [%s]", err)
}
listOfWorkSpaceConfigs = append(listOfWorkSpaceConfigs, workspaceConfig)
} else if file.IsDir() {
pathToSubFolder := currentDir + "/" + file.Name()
pathToMaybeWorkspaceConfigFile := pathToSubFolder + "/" + INFISICAL_WORKSPACE_CONFIG_FILE_NAME
_, err := os.Stat(pathToMaybeWorkspaceConfigFile)
if err != nil {
continue // workspace config file doesn't exist
}
workspaceConfig, err := GetWorkspaceConfigByPath(pathToMaybeWorkspaceConfigFile)
if err != nil {
return nil, fmt.Errorf("GetAllProjectConfigs: Unable to get config file because [%s]", err)
}
listOfWorkSpaceConfigs = append(listOfWorkSpaceConfigs, workspaceConfig)
}
}
return listOfWorkSpaceConfigs, nil
}
// Get the infisical config file and if it doesn't exist, return empty config model, otherwise raise error
func GetConfigFile() (models.ConfigFile, error) {
fullConfigFilePath, _, err := GetFullConfigFilePath()
@@ -206,7 +197,7 @@ func WriteConfigFile(configFile *models.ConfigFile) error {
}
// Create file in directory
err = os.WriteFile(fullConfigFilePath, configFileMarshalled, os.ModePerm)
err = os.WriteFile(fullConfigFilePath, configFileMarshalled, 0600)
if err != nil {
return fmt.Errorf("writeConfigFile: Unable to write to file [err=%s]", err)
}

View File

@@ -89,8 +89,8 @@ func RequireServiceToken() {
}
func RequireLocalWorkspaceFile() {
workspaceFileExists := WorkspaceConfigFileExistsInCurrentPath()
if !workspaceFileExists {
workspaceFilePath, _ := FindWorkspaceConfigFile()
if workspaceFilePath == "" {
PrintErrorMessageAndExit("It looks you have not yet connected this project to Infisical", "To do so, run [infisical init] then run your command again")
}
@@ -115,8 +115,20 @@ func GetHashFromStringList(list []string) string {
return fmt.Sprintf("%x", sum)
}
// execCmd is a struct that holds the command and arguments to be executed.
// By using this struct, we can easily mock the command and arguments.
type execCmd struct {
cmd string
args []string
}
var getCurrentBranchCmd = execCmd{
cmd: "git",
args: []string{"symbolic-ref", "--short", "HEAD"},
}
func getCurrentBranch() (string, error) {
cmd := exec.Command("git", "symbolic-ref", "--short", "HEAD")
cmd := exec.Command(getCurrentBranchCmd.cmd, getCurrentBranchCmd.args...)
var out bytes.Buffer
cmd.Stdout = &out
err := cmd.Run()

View File

@@ -20,6 +20,9 @@ func PrintErrorAndExit(exitCode int, err error, messages ...string) {
}
}
supportMsg := fmt.Sprintf("\n\nIf this issue continues, get support at https://infisical.com/slack")
fmt.Fprintln(os.Stderr, supportMsg)
os.Exit(exitCode)
}

View File

@@ -76,10 +76,31 @@ func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, work
return nil, fmt.Errorf("unable to get your encrypted workspace key. [err=%v]", err)
}
encryptedWorkspaceKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.EncryptedKey)
encryptedWorkspaceKeySenderPublicKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Sender.PublicKey)
encryptedWorkspaceKeyNonce, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce)
currentUsersPrivateKey, _ := base64.StdEncoding.DecodeString(receiversPrivateKey)
encryptedWorkspaceKey, err := base64.StdEncoding.DecodeString(workspaceKeyResponse.EncryptedKey)
if err != nil {
HandleError(err, "Unable to get bytes represented by the base64 for encryptedWorkspaceKey")
}
encryptedWorkspaceKeySenderPublicKey, err := base64.StdEncoding.DecodeString(workspaceKeyResponse.Sender.PublicKey)
if err != nil {
HandleError(err, "Unable to get bytes represented by the base64 for encryptedWorkspaceKeySenderPublicKey")
}
encryptedWorkspaceKeyNonce, err := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce)
if err != nil {
HandleError(err, "Unable to get bytes represented by the base64 for encryptedWorkspaceKeyNonce")
}
currentUsersPrivateKey, err := base64.StdEncoding.DecodeString(receiversPrivateKey)
if err != nil {
HandleError(err, "Unable to get bytes represented by the base64 for currentUsersPrivateKey")
}
if len(currentUsersPrivateKey) == 0 || len(encryptedWorkspaceKeySenderPublicKey) == 0 {
log.Debugf("Missing credentials for generating plainTextEncryptionKey: [currentUsersPrivateKey=%s] [encryptedWorkspaceKeySenderPublicKey=%s]", currentUsersPrivateKey, encryptedWorkspaceKeySenderPublicKey)
PrintErrorMessageAndExit("Some required user credentials are missing to generate your [plainTextEncryptionKey]. Please run [infisical login] then try again")
}
plainTextWorkspaceKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
encryptedSecrets, err := api.CallGetSecretsV2(httpClient, api.GetEncryptedSecretsV2Request{
@@ -131,6 +152,10 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models
return nil, err
}
if params.WorkspaceId != "" {
workspaceFile.WorkspaceId = params.WorkspaceId
}
// Verify environment
err = ValidateEnvironmentName(params.Environment, workspaceFile.WorkspaceId, loggedInUserDetails.UserCredentials)
if err != nil {
@@ -415,7 +440,7 @@ func WriteBackupSecrets(workspace string, environment string, encryptionKey []by
}
listOfSecretsMarshalled, _ := json.Marshal(encryptedSecrets)
err = os.WriteFile(fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName), listOfSecretsMarshalled, os.ModePerm)
err = os.WriteFile(fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName), listOfSecretsMarshalled, 0600)
if err != nil {
return fmt.Errorf("WriteBackupSecrets: Unable to write backup secrets to file [err=%s]", err)
}
@@ -482,21 +507,29 @@ func DeleteBackupSecrets() error {
return os.RemoveAll(fullPathToSecretsBackupFolder)
}
func GetEnvelopmentBasedOnGitBranch() string {
func GetEnvFromWorkspaceFile() string {
workspaceFile, err := GetWorkSpaceFromFile()
if err != nil {
log.Debugf("getEnvFromWorkspaceFile: [err=%s]", err)
return ""
}
if env := GetEnvelopmentBasedOnGitBranch(workspaceFile); env != "" {
return env
}
return workspaceFile.DefaultEnvironment
}
func GetEnvelopmentBasedOnGitBranch(workspaceFile models.WorkspaceConfigFile) string {
branch, err := getCurrentBranch()
if err != nil {
log.Debugf("getEnvelopmentBasedOnGitBranch: [err=%s]", err)
}
workspaceFile, err := GetWorkSpaceFromFile()
if err != nil {
log.Debugf("getEnvelopmentBasedOnGitBranch: [err=%s]", err)
return ""
}
envBasedOnGitBranch, ok := workspaceFile.GitBranchToEnvironmentMapping[branch]
log.Debugf("GetEnvelopmentBasedOnGitBranch: [envBasedOnGitBranch=%s] [ok=%s]", envBasedOnGitBranch, ok)
log.Debugf("GetEnvelopmentBasedOnGitBranch: [envBasedOnGitBranch=%s] [ok=%t]", envBasedOnGitBranch, ok)
if err == nil && ok {
return envBasedOnGitBranch

View File

@@ -1,6 +1,9 @@
package util
import (
"io"
"os"
"path"
"testing"
"github.com/Infisical/infisical-merge/packages/models"
@@ -158,3 +161,98 @@ func Test_SubstituteSecrets_When_No_SubstituteNeeded(t *testing.T) {
}
}
}
func Test_Read_Env_From_File(t *testing.T) {
type testCase struct {
TestFile string
ExpectedEnv string
}
var cases = []testCase{
{
TestFile: "testdata/infisical-default-env.json",
ExpectedEnv: "myDefaultEnv",
},
{
TestFile: "testdata/infisical-branch-env.json",
ExpectedEnv: "myMainEnv",
},
{
TestFile: "testdata/infisical-no-matching-branch-env.json",
ExpectedEnv: "myDefaultEnv",
},
}
// create a tmp directory for testing
testDir, err := os.MkdirTemp(os.TempDir(), "infisical-test")
if err != nil {
t.Errorf("Test_Read_DefaultEnv_From_File: Failed to create temp directory: %s", err)
}
// safe the current working directory
originalDir, err := os.Getwd()
if err != nil {
t.Errorf("Test_Read_DefaultEnv_From_File: Failed to get current working directory: %s", err)
}
// backup the original git command
originalGitCmd := getCurrentBranchCmd
// make sure to clean up after the test
t.Cleanup(func() {
os.Chdir(originalDir)
os.RemoveAll(testDir)
getCurrentBranchCmd = originalGitCmd
})
// mock the git command to return "main" as the current branch
getCurrentBranchCmd = execCmd{cmd: "echo", args: []string{"main"}}
for _, c := range cases {
// make sure we start in the original directory
err = os.Chdir(originalDir)
if err != nil {
t.Errorf("Test_Read_DefaultEnv_From_File: Failed to change working directory: %s", err)
}
// remove old test file if it exists
err = os.Remove(path.Join(testDir, INFISICAL_WORKSPACE_CONFIG_FILE_NAME))
if err != nil && !os.IsNotExist(err) {
t.Errorf("Test_Read_DefaultEnv_From_File: Failed to remove old test file: %s", err)
}
// deploy the test file
copyTestFile(t, c.TestFile, path.Join(testDir, INFISICAL_WORKSPACE_CONFIG_FILE_NAME))
// change the working directory to the tmp directory
err = os.Chdir(testDir)
if err != nil {
t.Errorf("Test_Read_DefaultEnv_From_File: Failed to change working directory: %s", err)
}
// get env from file
env := GetEnvFromWorkspaceFile()
if env != c.ExpectedEnv {
t.Errorf("Test_Read_DefaultEnv_From_File: Expected env to be %s but got %s", c.ExpectedEnv, env)
}
}
}
func copyTestFile(t *testing.T, src, dst string) {
srcFile, err := os.Open(src)
if err != nil {
t.Errorf("Test_Read_Env_From_File_By_Branch: Failed to open source file: %s", err)
}
defer srcFile.Close()
dstFile, err := os.Create(dst)
if err != nil {
t.Errorf("Test_Read_Env_From_File_By_Branch: Failed to create destination file: %s", err)
}
defer dstFile.Close()
_, err = io.Copy(dstFile, srcFile)
if err != nil {
t.Errorf("Test_Read_Env_From_File_By_Branch: Failed to copy file: %s", err)
}
}

View File

@@ -0,0 +1,7 @@
{
"workspaceId": "12345678",
"defaultEnvironment": "myDefaultEnv",
"gitBranchToEnvironmentMapping": {
"main": "myMainEnv"
}
}

View File

@@ -0,0 +1,5 @@
{
"workspaceId": "12345678",
"defaultEnvironment": "myDefaultEnv",
"gitBranchToEnvironmentMapping": null
}

View File

@@ -0,0 +1,7 @@
{
"workspaceId": "12345678",
"defaultEnvironment": "myDefaultEnv",
"gitBranchToEnvironmentMapping": {
"notmain": "myMainEnv"
}
}

Some files were not shown because too many files have changed in this diff Show More