mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-10 07:25:40 +00:00
Compare commits
160 Commits
Author | SHA1 | Date | |
---|---|---|---|
2bff7bbb5a | |||
b09ae054dd | |||
dc9c6b9d13 | |||
f01e8cb33b | |||
3ba62d1d97 | |||
5816ce3bf7 | |||
2d77fe9ca3 | |||
6bb24933bf | |||
fbc38c553a | |||
6ba4701db8 | |||
c15a9301af | |||
91052df5f9 | |||
6ea26c135a | |||
5444382d5a | |||
200bb12ad8 | |||
1447e055d1 | |||
4dac03ab94 | |||
20ea50bfaf | |||
5474096ca9 | |||
06c1827f38 | |||
9b7f036fd0 | |||
547555591b | |||
516819507a | |||
02e5be20c2 | |||
de11c50563 | |||
33dddd440c | |||
19b909cd12 | |||
cd59ca745d | |||
e013a4ab93 | |||
19daf1410a | |||
4c29c88fde | |||
6af59e47f5 | |||
8183e61403 | |||
b4c616edd6 | |||
c12eeac9b3 | |||
033275ed69 | |||
a799e1bffc | |||
36300cd19d | |||
c8633bf546 | |||
7fe2e15a98 | |||
72bf160f2e | |||
0ef9db99b4 | |||
805f733499 | |||
595dc78e75 | |||
6c7d232a9e | |||
35fd1520e2 | |||
3626ef2ec2 | |||
a49fcf49f1 | |||
787e54fb91 | |||
90537f2e6d | |||
c9448656bf | |||
f3900213b5 | |||
fe17d8459b | |||
2f54c4dd7e | |||
c33b043f5f | |||
5db60c0dad | |||
1a3d3906da | |||
d86c335671 | |||
62f0b3f6df | |||
3e623922b4 | |||
d1c38513f7 | |||
63253d515f | |||
584d309b80 | |||
07bb3496e7 | |||
c83c75db96 | |||
bcd18ab0af | |||
1ea75eb840 | |||
271c810692 | |||
dd05e2ac01 | |||
c6c2cfaaa5 | |||
6f90064400 | |||
397c15d61e | |||
9e3ac6c31d | |||
10d57e9d88 | |||
95a1e9560e | |||
11e0790f13 | |||
099ddd6805 | |||
182db69ee1 | |||
01982b585f | |||
752ebaa3eb | |||
117acce3f6 | |||
f94bf1f206 | |||
74d5586005 | |||
8896e1232b | |||
d456dcef28 | |||
eae2fc813a | |||
7cdafe0eed | |||
d410b42a34 | |||
bacf9f2d91 | |||
3fc6b0c194 | |||
9dc1645559 | |||
158c51ff3c | |||
864757e428 | |||
fa53a9e41d | |||
36372ebef3 | |||
b3a50d657d | |||
c2f5f19f55 | |||
2b6e69ce1b | |||
c4b4829694 | |||
d503102f75 | |||
4a14753b8c | |||
2c834040b4 | |||
6f682250b6 | |||
52285a1f38 | |||
31d6191251 | |||
d14ed06d4f | |||
e2a84ce52e | |||
999f668f39 | |||
6b546034f4 | |||
c2eaea21f0 | |||
436f408fa8 | |||
da999107f3 | |||
00ea296138 | |||
968af64ee7 | |||
589e811e9b | |||
4f312cfd1a | |||
3dccfc5404 | |||
2b3a996114 | |||
93da106dbc | |||
7e544fcac8 | |||
c2eac43b4f | |||
138acd28e8 | |||
1195398f15 | |||
1fff273abb | |||
b586fcfd2e | |||
45ad639eaf | |||
e5342bd757 | |||
834c32aa7e | |||
df4dcc87e7 | |||
d883c7ea96 | |||
5ed02955f8 | |||
064a9eb9cb | |||
f805691c4f | |||
4548931df3 | |||
6d1dc3845b | |||
40b42fdcb5 | |||
ad907fa373 | |||
ae1088d3f6 | |||
9a3caac75f | |||
1808ab6db8 | |||
aa554405c1 | |||
cd70128ff8 | |||
f49fe3962d | |||
9ee0c8f1b7 | |||
a9bd878057 | |||
a58f91f06b | |||
059f15b172 | |||
caddb45394 | |||
8266c4dd6d | |||
9f82220f4e | |||
3d25baa319 | |||
a8dfcae777 | |||
228c8a7609 | |||
a763d8b8ed | |||
d4e0a4992c | |||
1757f0d690 | |||
b25908d91f | |||
68d51d402a | |||
aa218d2ddc | |||
46fe724012 |
@ -40,14 +40,19 @@ SITE_URL=http://localhost:8080
|
||||
# Required to send emails
|
||||
# By default, SMTP_HOST is set to smtp.gmail.com
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_NAME=Team
|
||||
SMTP_USERNAME=team@infisical.com
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Integration
|
||||
# Optional only if integration is used
|
||||
OAUTH_CLIENT_SECRET_HEROKU=
|
||||
OAUTH_TOKEN_URL_HEROKU=
|
||||
CLIENT_ID_HEROKU=
|
||||
CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
SENTRY_DSN=
|
||||
|
3
.eslintignore
Normal file
3
.eslintignore
Normal file
@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
built
|
||||
healthcheck.js
|
30
.github/resources/docker-compose.be-test.yml
vendored
Normal file
30
.github/resources/docker-compose.be-test.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
backend:
|
||||
container_name: infisical-backend-test
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
image: infisical/backend:test
|
||||
command: npm run start
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- MONGO_URL=mongodb://test:example@mongo:27017/?authSource=admin
|
||||
- MONGO_USERNAME=test
|
||||
- MONGO_PASSWORD=example
|
||||
networks:
|
||||
- infisical-test
|
||||
|
||||
mongo:
|
||||
container_name: infisical-mongo-test
|
||||
image: mongo
|
||||
restart: always
|
||||
environment:
|
||||
- MONGO_INITDB_ROOT_USERNAME=test
|
||||
- MONGO_INITDB_ROOT_PASSWORD=example
|
||||
networks:
|
||||
- infisical-test
|
||||
|
||||
networks:
|
||||
infisical-test:
|
26
.github/resources/healthcheck.sh
vendored
Executable file
26
.github/resources/healthcheck.sh
vendored
Executable file
@ -0,0 +1,26 @@
|
||||
# Name of the target container to check
|
||||
container_name="$1"
|
||||
# Timeout in seconds. Default: 60
|
||||
timeout=$((${2:-60}));
|
||||
|
||||
if [ -z $container_name ]; then
|
||||
echo "No container name specified";
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
echo "Container: $container_name";
|
||||
echo "Timeout: $timeout sec";
|
||||
|
||||
try=0;
|
||||
is_healthy="false";
|
||||
while [ $is_healthy != "true" ];
|
||||
do
|
||||
try=$(($try + 1));
|
||||
printf "■";
|
||||
is_healthy=$(docker inspect --format='{{json .State.Health}}' $container_name | jq '.Status == "healthy"');
|
||||
sleep 1;
|
||||
if [[ $try -eq $timeout ]]; then
|
||||
echo " Container was not ready within timeout";
|
||||
exit 1;
|
||||
fi
|
||||
done
|
88
.github/workflows/docker-image.yml
vendored
88
.github/workflows/docker-image.yml
vendored
@ -3,40 +3,38 @@ name: Push to Docker Hub
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
-
|
||||
name: ☁️ Checkout source
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: 🔧 Set up QEMU
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
-
|
||||
name: 🔧 Set up Docker Buildx
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
-
|
||||
name: 🐋 Login to Docker Hub
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# -
|
||||
# name: 📦 Build backend and export to Docker
|
||||
# uses: docker/build-push-action@v3
|
||||
# with:
|
||||
# load: true
|
||||
# context: backend
|
||||
# tags: infisical/backend:test
|
||||
# -
|
||||
# name: 🧪 Test backend image
|
||||
# run: |
|
||||
# docker run --rm infisical/backend:test
|
||||
-
|
||||
name: 🏗️ Build backend and push
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/backend:test
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
- name: 🧪 Test backend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-backend-test
|
||||
- name: ⏻ Shut down backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
@ -44,42 +42,40 @@ jobs:
|
||||
tags: infisical/backend:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
-
|
||||
name: ☁️ Checkout source
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: 🔧 Set up QEMU
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
-
|
||||
name: 🔧 Set up Docker Buildx
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
-
|
||||
name: 🐋 Login to Docker Hub
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# -
|
||||
# name: 📦 Build frontend and export to Docker
|
||||
# uses: docker/build-push-action@v3
|
||||
# with:
|
||||
# load: true
|
||||
# context: frontend
|
||||
# tags: infisical/frontend:test
|
||||
# build-args: |
|
||||
# POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
# -
|
||||
# name: 🧪 Test frontend image
|
||||
# run: |
|
||||
# docker run --rm infisical/frontend:test
|
||||
-
|
||||
name: 🏗️ Build frontend and push
|
||||
- name: 📦 Build frontend and export to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
load: true
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
- name: ⏻ Spawn frontend container
|
||||
run: |
|
||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
||||
- name: 🧪 Test frontend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
||||
- name: ⏻ Shut down frontend container
|
||||
run: |
|
||||
docker stop infisical-frontend-test
|
||||
- name: 🏗️ Build frontend and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
|
29
.github/workflows/release_docker_k8_operator.yaml
vendored
Normal file
29
.github/workflows/release_docker_k8_operator.yaml
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
name: Release Docker image for K8 operator
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: infisical/kubernetes-operator:latest
|
5
.husky/pre-commit
Executable file
5
.husky/pre-commit
Executable file
@ -0,0 +1,5 @@
|
||||
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
npx lint-staged
|
62
README.md
62
README.md
@ -1,5 +1,5 @@
|
||||
<h1 align="center">
|
||||
<img width="300" src="/img/logoname-black.svg#gh-light-mode-only" alt="ifnisical">
|
||||
<img width="300" src="/img/logoname-black.svg#gh-light-mode-only" alt="infisical">
|
||||
<img width="300" src="/img/logoname-white.svg#gh-dark-mode-only" alt="infisical">
|
||||
</h1>
|
||||
<p align="center">
|
||||
@ -27,6 +27,9 @@
|
||||
<a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g">
|
||||
<img src="https://img.shields.io/badge/chat-on%20Slack-blueviolet" alt="Slack community channel" />
|
||||
</a>
|
||||
<a href="https://twitter.com/infisical">
|
||||
<img src="https://img.shields.io/twitter/follow/infisical?label=Follow" alt="Infisical Twitter" />
|
||||
</a>
|
||||
</h4>
|
||||
|
||||
<img src="/img/infisical_github_repo.png" width="100%" alt="Dashboard" />
|
||||
@ -60,7 +63,7 @@ To quickly get started, visit our [get started guide](https://infisical.com/docs
|
||||
|
||||
## 🔥 What's cool about this?
|
||||
|
||||
Infisical makes secret management simple and end-to-end encrypted by default. We're on a mission to make it more accessible to all developers, <i>not just security teams</i>.
|
||||
Infisical makes secret management simple and end-to-end encrypted by default. We're on a mission to make it more accessible to all developers, <i>not just security teams</i>.
|
||||
|
||||
According to a [report](https://www.ekransystem.com/en/blog/secrets-management) in 2019, only 10% of organizations use secret management solutions despite all using digital secrets to some extent.
|
||||
|
||||
@ -73,6 +76,7 @@ We are currently working hard to make Infisical more extensive. Need any integra
|
||||
Whether it's big or small, we love contributions ❤️ Check out our guide to see how to [get started](https://infisical.com/docs/contributing/overview).
|
||||
|
||||
Not sure where to get started? You can:
|
||||
|
||||
- [Book a free, non-pressure pairing sessions with one of our teammates](mailto:tony@infisical.com?subject=Pairing%20session&body=I'd%20like%20to%20do%20a%20pairing%20session!)!
|
||||
- Join our <a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g">Slack</a>, and ask us any questions there.
|
||||
|
||||
@ -81,7 +85,7 @@ Not sure where to get started? You can:
|
||||
- [Slack](https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g) (For live discussion with the community and the Infisical team)
|
||||
- [GitHub Discussions](https://github.com/Infisical/infisical/discussions) (For help with building and deeper conversations about features)
|
||||
- [GitHub Issues](https://github.com/Infisical/infisical-cli/issues) (For any bugs and errors you encounter using Infisical)
|
||||
- [Twitter](https://twitter.com/infisical) (Get news fast)
|
||||
- [Twitter](https://twitter.com/infisical) (Get news fast)
|
||||
|
||||
## 🐥 Status
|
||||
|
||||
@ -91,12 +95,6 @@ Not sure where to get started? You can:
|
||||
|
||||
We're currently in Public Alpha.
|
||||
|
||||
## 🚨 Stay Up-to-Date
|
||||
|
||||
Infisical officially launched as v.1.0 on November 21st, 2022. However, a lot of new features are coming very quickly. Watch **releases** of this repository to be notified about future updates:
|
||||
|
||||

|
||||
|
||||
## 🔌 Integrations
|
||||
|
||||
We're currently setting the foundation and building [integrations](https://infisical.com/docs/integrations/overview) so secrets can be synced everywhere. Any help is welcome! :)
|
||||
@ -130,10 +128,14 @@ We're currently setting the foundation and building [integrations](https://infis
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Vercel (https://github.com/Infisical/infisical/issues/60)
|
||||
<a href="https://infisical.com/docs/integrations/cloud/vercel?ref=github.com">
|
||||
✔️ Vercel
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 GitLab CI/CD
|
||||
<a href="https://infisical.com/docs/integrations/platforms/kubernetes?ref=github.com">
|
||||
✔️ Kubernetes
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Fly.io
|
||||
@ -155,7 +157,7 @@ We're currently setting the foundation and building [integrations](https://infis
|
||||
🔜 GCP
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Kubernetes
|
||||
🔜 GitLab CI/CD
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 CircleCI
|
||||
@ -179,6 +181,20 @@ We're currently setting the foundation and building [integrations](https://infis
|
||||
<td align="left" valign="middle">
|
||||
🔜 Netlify (https://github.com/Infisical/infisical/issues/55)
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Railway
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Bitbucket
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Supabase
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
🔜 Serverless
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
@ -186,7 +202,6 @@ We're currently setting the foundation and building [integrations](https://infis
|
||||
</td>
|
||||
<td>
|
||||
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
@ -261,6 +276,18 @@ We're currently setting the foundation and building [integrations](https://infis
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/fiber?ref=github.com">
|
||||
✔️ Fiber
|
||||
</a>
|
||||
</td>
|
||||
<td align="left" valign="middle">
|
||||
<a href="https://infisical.com/docs/integrations/frameworks/nuxt?ref=github.com">
|
||||
✔️ Nuxt
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
@ -268,7 +295,6 @@ We're currently setting the foundation and building [integrations](https://infis
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
## 🏘 Open-source vs. paid
|
||||
|
||||
This repo is entirely MIT licensed, with the exception of the `ee` directory which will contain premium enterprise features requiring a Infisical license in the future. We're currently focused on developing non-enterprise offerings first that should suit most use-cases.
|
||||
@ -277,6 +303,12 @@ This repo is entirely MIT licensed, with the exception of the `ee` directory whi
|
||||
|
||||
Looking to report a security vulnerability? Please don't post about it in GitHub issue. Instead, refer to our [SECURITY.md](./SECURITY.md) file.
|
||||
|
||||
## 🚨 Stay Up-to-Date
|
||||
|
||||
Infisical officially launched as v.1.0 on November 21st, 2022. However, a lot of new features are coming very quickly. Watch **releases** of this repository to be notified about future updates:
|
||||
|
||||

|
||||
|
||||
## 🦸 Contributors
|
||||
|
||||
[//]: contributor-faces
|
||||
@ -285,4 +317,4 @@ Looking to report a security vulnerability? Please don't post about it in GitHub
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
|
||||
<a href="https://github.com/dangtony98"><img src="https://avatars.githubusercontent.com/u/25857006?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/mv-turtle"><img src="https://avatars.githubusercontent.com/u/78047717?s=96&v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/maidul98"><img src="https://avatars.githubusercontent.com/u/9300960?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/gangjun06"><img src="https://avatars.githubusercontent.com/u/50910815?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/reginaldbondoc"><img src="https://avatars.githubusercontent.com/u/7693108?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/SH5H"><img src="https://avatars.githubusercontent.com/u/25437192?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/asharonbaltazar"><img src="https://avatars.githubusercontent.com/u/58940073?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/edgarrmondragon"><img src="https://avatars.githubusercontent.com/u/16805946?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/hanywang2"><img src="https://avatars.githubusercontent.com/u/44352119?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/tobias-mintlify"><img src="https://avatars.githubusercontent.com/u/110702161?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/0xflotus"><img src="https://avatars.githubusercontent.com/u/26602940?v=4" width="50" height="50" alt=""/></a>
|
||||
<a href="https://github.com/dangtony98"><img src="https://avatars.githubusercontent.com/u/25857006?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/mv-turtle"><img src="https://avatars.githubusercontent.com/u/78047717?s=96&v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/maidul98"><img src="https://avatars.githubusercontent.com/u/9300960?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/gangjun06"><img src="https://avatars.githubusercontent.com/u/50910815?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/reginaldbondoc"><img src="https://avatars.githubusercontent.com/u/7693108?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/SH5H"><img src="https://avatars.githubusercontent.com/u/25437192?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/asharonbaltazar"><img src="https://avatars.githubusercontent.com/u/58940073?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/edgarrmondragon"><img src="https://avatars.githubusercontent.com/u/16805946?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/arjunyel"><img src="https://avatars.githubusercontent.com/u/11153289?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/LemmyMwaura"><img src="https://avatars.githubusercontent.com/u/20738858?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/adrianmarinwork"><img src="https://avatars.githubusercontent.com/u/118568289?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/hanywang2"><img src="https://avatars.githubusercontent.com/u/44352119?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/tobias-mintlify"><img src="https://avatars.githubusercontent.com/u/110702161?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/0xflotus"><img src="https://avatars.githubusercontent.com/u/26602940?v=4" width="50" height="50" alt=""/></a> <a href="https://github.com/wanjohiryan"><img src="https://avatars.githubusercontent.com/u/71614375?v=4" width="50" height="50" alt=""/></a>
|
||||
|
@ -1,18 +1,12 @@
|
||||
{
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": [
|
||||
"@typescript-eslint",
|
||||
"prettier"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"prettier"
|
||||
],
|
||||
"rules": {
|
||||
"no-console": 2,
|
||||
"prettier/prettier": 2
|
||||
}
|
||||
}
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"no-console": 2
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +0,0 @@
|
||||
{
|
||||
"semi": true,
|
||||
"trailingComma": "none",
|
||||
"singleQuote": true,
|
||||
"printWidth": 80,
|
||||
"useTabs": true
|
||||
}
|
@ -2,11 +2,14 @@ FROM node:16-bullseye-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json .
|
||||
COPY package.json package-lock.json ./
|
||||
|
||||
RUN npm install
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["npm", "run", "start"]
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
|
||||
CMD ["npm", "run", "start"]
|
||||
|
8
backend/environment.d.ts
vendored
8
backend/environment.d.ts
vendored
@ -14,8 +14,12 @@ declare global {
|
||||
JWT_SIGNUP_SECRET: string;
|
||||
MONGO_URL: string;
|
||||
NODE_ENV: 'development' | 'staging' | 'testing' | 'production';
|
||||
OAUTH_CLIENT_SECRET_HEROKU: string;
|
||||
OAUTH_TOKEN_URL_HEROKU: string;
|
||||
CLIENT_ID_HEROKU: string;
|
||||
CLIENT_ID_VERCEL: string;
|
||||
CLIENT_ID_NETLIFY: string;
|
||||
CLIENT_SECRET_HEROKU: string;
|
||||
CLIENT_SECRET_VERCEL: string;
|
||||
CLIENT_SECRET_NETLIFY: string;
|
||||
POSTHOG_HOST: string;
|
||||
POSTHOG_PROJECT_API_KEY: string;
|
||||
PRIVATE_KEY: string;
|
||||
|
24
backend/healthcheck.js
Normal file
24
backend/healthcheck.js
Normal file
@ -0,0 +1,24 @@
|
||||
const http = require('http');
|
||||
const PORT = process.env.PORT || 4000;
|
||||
const options = {
|
||||
host: 'localhost',
|
||||
port: PORT,
|
||||
timeout: 2000,
|
||||
path: '/healthcheck'
|
||||
};
|
||||
|
||||
const healthCheck = http.request(options, (res) => {
|
||||
console.log(`HEALTHCHECK STATUS: ${res.statusCode}`);
|
||||
if (res.statusCode == 200) {
|
||||
process.exit(0);
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
healthCheck.on('error', function (err) {
|
||||
console.error(`HEALTH CHECK ERROR: ${err}`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
healthCheck.end();
|
384
backend/package-lock.json
generated
384
backend/package-lock.json
generated
@ -9,6 +9,7 @@
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@sentry/node": "^7.14.0",
|
||||
"@sentry/tracing": "^7.19.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
@ -28,7 +29,7 @@
|
||||
"mongoose": "^6.7.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.1.0",
|
||||
"query-string": "^7.1.1",
|
||||
"query-string": "^7.1.3",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
@ -48,14 +49,10 @@
|
||||
"@typescript-eslint/eslint-plugin": "^5.40.1",
|
||||
"@typescript-eslint/parser": "^5.40.1",
|
||||
"eslint": "^8.26.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-prettier": "^4.2.1",
|
||||
"husky": "^8.0.1",
|
||||
"install": "^0.13.0",
|
||||
"jest": "^29.3.1",
|
||||
"nodemon": "^2.0.19",
|
||||
"npm": "^8.19.3",
|
||||
"prettier": "^2.7.1",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
},
|
||||
@ -2029,6 +2026,14 @@
|
||||
"url": "https://opencollective.com/eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@godaddy/terminus": {
|
||||
"version": "4.11.2",
|
||||
"resolved": "https://registry.npmjs.org/@godaddy/terminus/-/terminus-4.11.2.tgz",
|
||||
"integrity": "sha512-e/kbOWpGKME42eltM/wXM3RxSUOrfureZxEd6Dt6NXyFoJ7E8lnmm7znXydJsL3B7ky4HRFZI+eHrep54NZbeQ==",
|
||||
"dependencies": {
|
||||
"stoppable": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@humanwhocodes/config-array": {
|
||||
"version": "0.11.7",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.7.tgz",
|
||||
@ -2594,19 +2599,6 @@
|
||||
"@maxmind/geoip2-node": "^3.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@sentry/core": {
|
||||
"version": "7.17.4",
|
||||
"resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.17.4.tgz",
|
||||
"integrity": "sha512-U3ABSJBKGK8dJ01nEG2+qNOb6Wv7U3VqoajiZxfV4lpPWNFGCoEhiTytxBlFTOCmdUH8209zSZiWJZaDLy+TSA==",
|
||||
"dependencies": {
|
||||
"@sentry/types": "7.17.4",
|
||||
"@sentry/utils": "7.17.4",
|
||||
"tslib": "^1.9.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/@sentry/node": {
|
||||
"version": "7.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@sentry/node/-/node-7.19.0.tgz",
|
||||
@ -2704,26 +2696,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/@sentry/types": {
|
||||
"version": "7.17.4",
|
||||
"resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.17.4.tgz",
|
||||
"integrity": "sha512-QJj8vO4AtxuzQfJIzDnECSmoxwnS+WJsm1Ta2Cwdy+TUCBJyWpW7aIJJGta76zb9gNPGb3UcAbeEjhMJBJeRMQ==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/@sentry/utils": {
|
||||
"version": "7.17.4",
|
||||
"resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.17.4.tgz",
|
||||
"integrity": "sha512-ioG0ANy8uiWzig82/e7cc+6C9UOxkyBzJDi1luoQVDH6P0/PvM8GzVU+1iUVUipf8+OL1Jh09GrWnd5wLm3XNQ==",
|
||||
"dependencies": {
|
||||
"@sentry/types": "7.17.4",
|
||||
"tslib": "^1.9.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/@sinclair/typebox": {
|
||||
"version": "0.24.51",
|
||||
"resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.51.tgz",
|
||||
@ -2943,12 +2915,6 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/prettier": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.1.tgz",
|
||||
"integrity": "sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/qs": {
|
||||
"version": "6.9.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz",
|
||||
@ -4035,9 +4001,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/decode-uri-component": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz",
|
||||
"integrity": "sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og==",
|
||||
"version": "0.2.2",
|
||||
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz",
|
||||
"integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==",
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
}
|
||||
@ -4291,39 +4257,6 @@
|
||||
"url": "https://opencollective.com/eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-config-prettier": {
|
||||
"version": "8.5.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz",
|
||||
"integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"eslint-config-prettier": "bin/cli.js"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-prettier": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz",
|
||||
"integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"prettier-linter-helpers": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": ">=7.28.0",
|
||||
"prettier": ">=2.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"eslint-config-prettier": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-scope": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
|
||||
@ -4641,12 +4574,6 @@
|
||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/fast-diff": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz",
|
||||
"integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/fast-glob": {
|
||||
"version": "3.2.12",
|
||||
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz",
|
||||
@ -5146,21 +5073,6 @@
|
||||
"node": ">=10.17.0"
|
||||
}
|
||||
},
|
||||
"node_modules/husky": {
|
||||
"version": "8.0.1",
|
||||
"resolved": "https://registry.npmjs.org/husky/-/husky-8.0.1.tgz",
|
||||
"integrity": "sha512-xs7/chUH/CKdOCs7Zy0Aev9e/dKOMZf3K1Az1nar3tzlv0jfqnYtu235bstsWTmXOR0EfINrPa97yy4Lz6RiKw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"husky": "lib/bin.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/typicode"
|
||||
}
|
||||
},
|
||||
"node_modules/iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
@ -5920,7 +5832,6 @@
|
||||
"@jest/transform": "^29.3.1",
|
||||
"@jest/types": "^29.3.1",
|
||||
"@types/babel__traverse": "^7.0.6",
|
||||
"@types/prettier": "^2.1.5",
|
||||
"babel-preset-current-node-syntax": "^1.0.0",
|
||||
"chalk": "^4.0.0",
|
||||
"expect": "^29.3.1",
|
||||
@ -6780,7 +6691,129 @@
|
||||
"treeverse",
|
||||
"validate-npm-package-name",
|
||||
"which",
|
||||
"write-file-atomic"
|
||||
"write-file-atomic",
|
||||
"@colors/colors",
|
||||
"@gar/promisify",
|
||||
"@npmcli/disparity-colors",
|
||||
"@npmcli/git",
|
||||
"@npmcli/installed-package-contents",
|
||||
"@npmcli/metavuln-calculator",
|
||||
"@npmcli/move-file",
|
||||
"@npmcli/name-from-folder",
|
||||
"@npmcli/node-gyp",
|
||||
"@npmcli/promise-spawn",
|
||||
"@npmcli/query",
|
||||
"@tootallnate/once",
|
||||
"agent-base",
|
||||
"agentkeepalive",
|
||||
"aggregate-error",
|
||||
"ansi-regex",
|
||||
"ansi-styles",
|
||||
"aproba",
|
||||
"are-we-there-yet",
|
||||
"asap",
|
||||
"balanced-match",
|
||||
"bin-links",
|
||||
"binary-extensions",
|
||||
"brace-expansion",
|
||||
"builtins",
|
||||
"cidr-regex",
|
||||
"clean-stack",
|
||||
"clone",
|
||||
"cmd-shim",
|
||||
"color-convert",
|
||||
"color-name",
|
||||
"color-support",
|
||||
"common-ancestor-path",
|
||||
"concat-map",
|
||||
"console-control-strings",
|
||||
"cssesc",
|
||||
"debug",
|
||||
"debuglog",
|
||||
"defaults",
|
||||
"delegates",
|
||||
"depd",
|
||||
"dezalgo",
|
||||
"diff",
|
||||
"emoji-regex",
|
||||
"encoding",
|
||||
"env-paths",
|
||||
"err-code",
|
||||
"fs.realpath",
|
||||
"function-bind",
|
||||
"gauge",
|
||||
"has",
|
||||
"has-flag",
|
||||
"has-unicode",
|
||||
"http-cache-semantics",
|
||||
"http-proxy-agent",
|
||||
"https-proxy-agent",
|
||||
"humanize-ms",
|
||||
"iconv-lite",
|
||||
"ignore-walk",
|
||||
"imurmurhash",
|
||||
"indent-string",
|
||||
"infer-owner",
|
||||
"inflight",
|
||||
"inherits",
|
||||
"ip",
|
||||
"ip-regex",
|
||||
"is-core-module",
|
||||
"is-fullwidth-code-point",
|
||||
"is-lambda",
|
||||
"isexe",
|
||||
"json-stringify-nice",
|
||||
"jsonparse",
|
||||
"just-diff",
|
||||
"just-diff-apply",
|
||||
"lru-cache",
|
||||
"minipass-collect",
|
||||
"minipass-fetch",
|
||||
"minipass-flush",
|
||||
"minipass-json-stream",
|
||||
"minipass-sized",
|
||||
"minizlib",
|
||||
"mute-stream",
|
||||
"negotiator",
|
||||
"normalize-package-data",
|
||||
"npm-bundled",
|
||||
"npm-normalize-package-bin",
|
||||
"npm-packlist",
|
||||
"once",
|
||||
"path-is-absolute",
|
||||
"postcss-selector-parser",
|
||||
"promise-all-reject-late",
|
||||
"promise-call-limit",
|
||||
"promise-inflight",
|
||||
"promise-retry",
|
||||
"promzard",
|
||||
"read-cmd-shim",
|
||||
"readable-stream",
|
||||
"retry",
|
||||
"safe-buffer",
|
||||
"safer-buffer",
|
||||
"set-blocking",
|
||||
"signal-exit",
|
||||
"smart-buffer",
|
||||
"socks",
|
||||
"socks-proxy-agent",
|
||||
"spdx-correct",
|
||||
"spdx-exceptions",
|
||||
"spdx-expression-parse",
|
||||
"spdx-license-ids",
|
||||
"string_decoder",
|
||||
"string-width",
|
||||
"strip-ansi",
|
||||
"supports-color",
|
||||
"unique-filename",
|
||||
"unique-slug",
|
||||
"util-deprecate",
|
||||
"validate-npm-package-license",
|
||||
"walk-up-path",
|
||||
"wcwidth",
|
||||
"wide-align",
|
||||
"wrappy",
|
||||
"yallist"
|
||||
],
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
@ -9591,33 +9624,6 @@
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz",
|
||||
"integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"prettier": "bin-prettier.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.13.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/prettier/prettier?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/prettier-linter-helpers": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz",
|
||||
"integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"fast-diff": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pretty-format": {
|
||||
"version": "29.3.1",
|
||||
"resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.3.1.tgz",
|
||||
@ -9703,11 +9709,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/query-string": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.1.tgz",
|
||||
"integrity": "sha512-MplouLRDHBZSG9z7fpuAAcI7aAYjDLhtsiVZsevsfaHWDS2IDdORKbSd1kWUA+V4zyva/HZoSfpwnYMMQDhb0w==",
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz",
|
||||
"integrity": "sha512-hh2WYhq4fi8+b+/2Kg9CEge4fDPvHS534aOOvOZeQ3+Vf2mCFsaFBYj0i+iXcAq6I9Vzp5fjMFBlONvayDC1qg==",
|
||||
"dependencies": {
|
||||
"decode-uri-component": "^0.2.0",
|
||||
"decode-uri-component": "^0.2.2",
|
||||
"filter-obj": "^1.1.0",
|
||||
"split-on-first": "^1.0.0",
|
||||
"strict-uri-encode": "^2.0.0"
|
||||
@ -10241,6 +10247,15 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/stoppable": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz",
|
||||
"integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==",
|
||||
"engines": {
|
||||
"node": ">=4",
|
||||
"npm": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/strict-uri-encode": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz",
|
||||
@ -12656,6 +12671,14 @@
|
||||
"strip-json-comments": "^3.1.1"
|
||||
}
|
||||
},
|
||||
"@godaddy/terminus": {
|
||||
"version": "4.11.2",
|
||||
"resolved": "https://registry.npmjs.org/@godaddy/terminus/-/terminus-4.11.2.tgz",
|
||||
"integrity": "sha512-e/kbOWpGKME42eltM/wXM3RxSUOrfureZxEd6Dt6NXyFoJ7E8lnmm7znXydJsL3B7ky4HRFZI+eHrep54NZbeQ==",
|
||||
"requires": {
|
||||
"stoppable": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"@humanwhocodes/config-array": {
|
||||
"version": "0.11.7",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.7.tgz",
|
||||
@ -13113,16 +13136,6 @@
|
||||
"@maxmind/geoip2-node": "^3.4.0"
|
||||
}
|
||||
},
|
||||
"@sentry/core": {
|
||||
"version": "7.17.4",
|
||||
"resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.17.4.tgz",
|
||||
"integrity": "sha512-U3ABSJBKGK8dJ01nEG2+qNOb6Wv7U3VqoajiZxfV4lpPWNFGCoEhiTytxBlFTOCmdUH8209zSZiWJZaDLy+TSA==",
|
||||
"requires": {
|
||||
"@sentry/types": "7.17.4",
|
||||
"@sentry/utils": "7.17.4",
|
||||
"tslib": "^1.9.3"
|
||||
}
|
||||
},
|
||||
"@sentry/node": {
|
||||
"version": "7.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@sentry/node/-/node-7.19.0.tgz",
|
||||
@ -13200,20 +13213,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"@sentry/types": {
|
||||
"version": "7.17.4",
|
||||
"resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.17.4.tgz",
|
||||
"integrity": "sha512-QJj8vO4AtxuzQfJIzDnECSmoxwnS+WJsm1Ta2Cwdy+TUCBJyWpW7aIJJGta76zb9gNPGb3UcAbeEjhMJBJeRMQ=="
|
||||
},
|
||||
"@sentry/utils": {
|
||||
"version": "7.17.4",
|
||||
"resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.17.4.tgz",
|
||||
"integrity": "sha512-ioG0ANy8uiWzig82/e7cc+6C9UOxkyBzJDi1luoQVDH6P0/PvM8GzVU+1iUVUipf8+OL1Jh09GrWnd5wLm3XNQ==",
|
||||
"requires": {
|
||||
"@sentry/types": "7.17.4",
|
||||
"tslib": "^1.9.3"
|
||||
}
|
||||
},
|
||||
"@sinclair/typebox": {
|
||||
"version": "0.24.51",
|
||||
"resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.51.tgz",
|
||||
@ -13433,12 +13432,6 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/prettier": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.1.tgz",
|
||||
"integrity": "sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/qs": {
|
||||
"version": "6.9.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz",
|
||||
@ -14219,9 +14212,9 @@
|
||||
}
|
||||
},
|
||||
"decode-uri-component": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz",
|
||||
"integrity": "sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og=="
|
||||
"version": "0.2.2",
|
||||
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz",
|
||||
"integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ=="
|
||||
},
|
||||
"dedent": {
|
||||
"version": "0.7.0",
|
||||
@ -14429,22 +14422,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"eslint-config-prettier": {
|
||||
"version": "8.5.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz",
|
||||
"integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
},
|
||||
"eslint-plugin-prettier": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz",
|
||||
"integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"prettier-linter-helpers": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"eslint-scope": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
|
||||
@ -14667,12 +14644,6 @@
|
||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
|
||||
"dev": true
|
||||
},
|
||||
"fast-diff": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz",
|
||||
"integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==",
|
||||
"dev": true
|
||||
},
|
||||
"fast-glob": {
|
||||
"version": "3.2.12",
|
||||
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz",
|
||||
@ -15034,12 +15005,6 @@
|
||||
"integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==",
|
||||
"dev": true
|
||||
},
|
||||
"husky": {
|
||||
"version": "8.0.1",
|
||||
"resolved": "https://registry.npmjs.org/husky/-/husky-8.0.1.tgz",
|
||||
"integrity": "sha512-xs7/chUH/CKdOCs7Zy0Aev9e/dKOMZf3K1Az1nar3tzlv0jfqnYtu235bstsWTmXOR0EfINrPa97yy4Lz6RiKw==",
|
||||
"dev": true
|
||||
},
|
||||
"iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
@ -15603,7 +15568,6 @@
|
||||
"@jest/transform": "^29.3.1",
|
||||
"@jest/types": "^29.3.1",
|
||||
"@types/babel__traverse": "^7.0.6",
|
||||
"@types/prettier": "^2.1.5",
|
||||
"babel-preset-current-node-syntax": "^1.0.0",
|
||||
"chalk": "^4.0.0",
|
||||
"expect": "^29.3.1",
|
||||
@ -18246,21 +18210,6 @@
|
||||
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
||||
"dev": true
|
||||
},
|
||||
"prettier": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz",
|
||||
"integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==",
|
||||
"dev": true
|
||||
},
|
||||
"prettier-linter-helpers": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz",
|
||||
"integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fast-diff": "^1.1.2"
|
||||
}
|
||||
},
|
||||
"pretty-format": {
|
||||
"version": "29.3.1",
|
||||
"resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.3.1.tgz",
|
||||
@ -18324,11 +18273,11 @@
|
||||
}
|
||||
},
|
||||
"query-string": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.1.tgz",
|
||||
"integrity": "sha512-MplouLRDHBZSG9z7fpuAAcI7aAYjDLhtsiVZsevsfaHWDS2IDdORKbSd1kWUA+V4zyva/HZoSfpwnYMMQDhb0w==",
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz",
|
||||
"integrity": "sha512-hh2WYhq4fi8+b+/2Kg9CEge4fDPvHS534aOOvOZeQ3+Vf2mCFsaFBYj0i+iXcAq6I9Vzp5fjMFBlONvayDC1qg==",
|
||||
"requires": {
|
||||
"decode-uri-component": "^0.2.0",
|
||||
"decode-uri-component": "^0.2.2",
|
||||
"filter-obj": "^1.1.0",
|
||||
"split-on-first": "^1.0.0",
|
||||
"strict-uri-encode": "^2.0.0"
|
||||
@ -18710,6 +18659,11 @@
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="
|
||||
},
|
||||
"stoppable": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz",
|
||||
"integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw=="
|
||||
},
|
||||
"strict-uri-encode": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz",
|
||||
|
@ -1,5 +1,6 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@godaddy/terminus": "^4.11.2",
|
||||
"@sentry/node": "^7.14.0",
|
||||
"@sentry/tracing": "^7.19.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
@ -19,7 +20,7 @@
|
||||
"mongoose": "^6.7.2",
|
||||
"nodemailer": "^6.8.0",
|
||||
"posthog-node": "^2.1.0",
|
||||
"query-string": "^7.1.1",
|
||||
"query-string": "^7.1.3",
|
||||
"rimraf": "^3.0.2",
|
||||
"stripe": "^10.7.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
@ -30,12 +31,13 @@
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
"scripts": {
|
||||
"prepare": "cd .. && npm install",
|
||||
"start": "npm run build && node build/index.js",
|
||||
"dev": "nodemon",
|
||||
"build": "rimraf ./build && tsc && cp -R ./src/templates ./src/json ./build",
|
||||
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"lint-and-fix": "eslint . --ext .ts --fix",
|
||||
"prettier-format": "prettier --config .prettierrc 'src/**/*.ts' --write"
|
||||
"lint-staged": "lint-staged"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -61,14 +63,10 @@
|
||||
"@typescript-eslint/eslint-plugin": "^5.40.1",
|
||||
"@typescript-eslint/parser": "^5.40.1",
|
||||
"eslint": "^8.26.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-prettier": "^4.2.1",
|
||||
"husky": "^8.0.1",
|
||||
"install": "^0.13.0",
|
||||
"jest": "^29.3.1",
|
||||
"nodemon": "^2.0.19",
|
||||
"npm": "^8.19.3",
|
||||
"prettier": "^2.7.1",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
}
|
||||
|
@ -10,15 +10,23 @@ const JWT_SIGNUP_LIFETIME = process.env.JWT_SIGNUP_LIFETIME! || '15m';
|
||||
const JWT_SIGNUP_SECRET = process.env.JWT_SIGNUP_SECRET!;
|
||||
const MONGO_URL = process.env.MONGO_URL!;
|
||||
const NODE_ENV = process.env.NODE_ENV! || 'production';
|
||||
const OAUTH_CLIENT_SECRET_HEROKU = process.env.OAUTH_CLIENT_SECRET_HEROKU!;
|
||||
const OAUTH_TOKEN_URL_HEROKU = process.env.OAUTH_TOKEN_URL_HEROKU!;
|
||||
const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
|
||||
const CLIENT_ID_HEROKU = process.env.CLIENT_ID_HEROKU!;
|
||||
const CLIENT_ID_VERCEL = process.env.CLIENT_ID_VERCEL!;
|
||||
const CLIENT_ID_NETLIFY = process.env.CLIENT_ID_NETLIFY!;
|
||||
const CLIENT_SECRET_VERCEL = process.env.CLIENT_SECRET_VERCEL!;
|
||||
const CLIENT_SECRET_NETLIFY = process.env.CLIENT_SECRET_NETLIFY!;
|
||||
const CLIENT_SLUG_VERCEL= process.env.CLIENT_SLUG_VERCEL!;
|
||||
const POSTHOG_HOST = process.env.POSTHOG_HOST! || 'https://app.posthog.com';
|
||||
const POSTHOG_PROJECT_API_KEY = process.env.POSTHOG_PROJECT_API_KEY! || 'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
const POSTHOG_PROJECT_API_KEY =
|
||||
process.env.POSTHOG_PROJECT_API_KEY! ||
|
||||
'phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE';
|
||||
const PRIVATE_KEY = process.env.PRIVATE_KEY!;
|
||||
const PUBLIC_KEY = process.env.PUBLIC_KEY!;
|
||||
const SENTRY_DSN = process.env.SENTRY_DSN!;
|
||||
const SITE_URL = process.env.SITE_URL!;
|
||||
const SMTP_HOST = process.env.SMTP_HOST! || 'smtp.gmail.com';
|
||||
const SMTP_PORT = process.env.SMTP_PORT! || 587;
|
||||
const SMTP_NAME = process.env.SMTP_NAME!;
|
||||
const SMTP_USERNAME = process.env.SMTP_USERNAME!;
|
||||
const SMTP_PASSWORD = process.env.SMTP_PASSWORD!;
|
||||
@ -28,38 +36,44 @@ const STRIPE_PRODUCT_STARTER = process.env.STRIPE_PRODUCT_STARTER!;
|
||||
const STRIPE_PUBLISHABLE_KEY = process.env.STRIPE_PUBLISHABLE_KEY!;
|
||||
const STRIPE_SECRET_KEY = process.env.STRIPE_SECRET_KEY!;
|
||||
const STRIPE_WEBHOOK_SECRET = process.env.STRIPE_WEBHOOK_SECRET!;
|
||||
const TELEMETRY_ENABLED = (process.env.TELEMETRY_ENABLED! !== 'false') && true;
|
||||
const TELEMETRY_ENABLED = process.env.TELEMETRY_ENABLED! !== 'false' && true;
|
||||
|
||||
export {
|
||||
PORT,
|
||||
EMAIL_TOKEN_LIFETIME,
|
||||
ENCRYPTION_KEY,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_LIFETIME,
|
||||
JWT_REFRESH_SECRET,
|
||||
JWT_SERVICE_SECRET,
|
||||
JWT_SIGNUP_LIFETIME,
|
||||
JWT_SIGNUP_SECRET,
|
||||
MONGO_URL,
|
||||
NODE_ENV,
|
||||
OAUTH_CLIENT_SECRET_HEROKU,
|
||||
OAUTH_TOKEN_URL_HEROKU,
|
||||
POSTHOG_HOST,
|
||||
POSTHOG_PROJECT_API_KEY,
|
||||
PRIVATE_KEY,
|
||||
PUBLIC_KEY,
|
||||
SENTRY_DSN,
|
||||
SITE_URL,
|
||||
SMTP_HOST,
|
||||
SMTP_NAME,
|
||||
SMTP_USERNAME,
|
||||
SMTP_PASSWORD,
|
||||
STRIPE_PRODUCT_CARD_AUTH,
|
||||
STRIPE_PRODUCT_PRO,
|
||||
STRIPE_PRODUCT_STARTER,
|
||||
STRIPE_PUBLISHABLE_KEY,
|
||||
STRIPE_SECRET_KEY,
|
||||
STRIPE_WEBHOOK_SECRET,
|
||||
TELEMETRY_ENABLED
|
||||
PORT,
|
||||
EMAIL_TOKEN_LIFETIME,
|
||||
ENCRYPTION_KEY,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_LIFETIME,
|
||||
JWT_REFRESH_SECRET,
|
||||
JWT_SERVICE_SECRET,
|
||||
JWT_SIGNUP_LIFETIME,
|
||||
JWT_SIGNUP_SECRET,
|
||||
MONGO_URL,
|
||||
NODE_ENV,
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_VERCEL,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_SECRET_HEROKU,
|
||||
CLIENT_SECRET_VERCEL,
|
||||
CLIENT_SECRET_NETLIFY,
|
||||
CLIENT_SLUG_VERCEL,
|
||||
POSTHOG_HOST,
|
||||
POSTHOG_PROJECT_API_KEY,
|
||||
PRIVATE_KEY,
|
||||
PUBLIC_KEY,
|
||||
SENTRY_DSN,
|
||||
SITE_URL,
|
||||
SMTP_HOST,
|
||||
SMTP_PORT,
|
||||
SMTP_NAME,
|
||||
SMTP_USERNAME,
|
||||
SMTP_PASSWORD,
|
||||
STRIPE_PRODUCT_CARD_AUTH,
|
||||
STRIPE_PRODUCT_PRO,
|
||||
STRIPE_PRODUCT_STARTER,
|
||||
STRIPE_PUBLISHABLE_KEY,
|
||||
STRIPE_SECRET_KEY,
|
||||
STRIPE_WEBHOOK_SECRET,
|
||||
TELEMETRY_ENABLED
|
||||
};
|
||||
|
@ -1,3 +1,4 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
import { Request, Response } from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import * as Sentry from '@sentry/node';
|
||||
@ -5,17 +6,17 @@ import * as bigintConversion from 'bigint-conversion';
|
||||
const jsrp = require('jsrp');
|
||||
import { User } from '../models';
|
||||
import { createToken, issueTokens, clearTokens } from '../helpers/auth';
|
||||
import {
|
||||
NODE_ENV,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_SECRET
|
||||
import {
|
||||
NODE_ENV,
|
||||
JWT_AUTH_LIFETIME,
|
||||
JWT_AUTH_SECRET,
|
||||
JWT_REFRESH_SECRET
|
||||
} from '../config';
|
||||
|
||||
declare module 'jsonwebtoken' {
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
export interface UserIDJwtPayload extends jwt.JwtPayload {
|
||||
userId: string;
|
||||
}
|
||||
}
|
||||
|
||||
const clientPublicKeys: any = {};
|
||||
@ -27,47 +28,45 @@ const clientPublicKeys: any = {};
|
||||
* @returns
|
||||
*/
|
||||
export const login1 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
clientPublicKey
|
||||
}: { email: string; clientPublicKey: string } = req.body;
|
||||
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
try {
|
||||
const {
|
||||
email,
|
||||
clientPublicKey
|
||||
}: { email: string; clientPublicKey: string } = req.body;
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
() => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
clientPublicKeys[email] = {
|
||||
clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt)
|
||||
};
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
salt: user.salt
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to start authentication process'
|
||||
});
|
||||
}
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier
|
||||
},
|
||||
() => {
|
||||
// generate server-side public key
|
||||
const serverPublicKey = server.getPublicKey();
|
||||
clientPublicKeys[email] = {
|
||||
clientPublicKey,
|
||||
serverBInt: bigintConversion.bigintToBuf(server.bInt)
|
||||
};
|
||||
|
||||
return res.status(200).send({
|
||||
serverPublicKey,
|
||||
salt: user.salt
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to start authentication process'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
@ -78,59 +77,59 @@ export const login1 = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const login2 = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, clientProof } = req.body;
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +publicKey +encryptedPrivateKey +iv +tag');
|
||||
try {
|
||||
const { email, clientProof } = req.body;
|
||||
const user = await User.findOne({
|
||||
email
|
||||
}).select('+salt +verifier +publicKey +encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
if (!user) throw new Error('Failed to find user');
|
||||
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: clientPublicKeys[email].serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(clientPublicKeys[email].clientPublicKey);
|
||||
const server = new jsrp.server();
|
||||
server.init(
|
||||
{
|
||||
salt: user.salt,
|
||||
verifier: user.verifier,
|
||||
b: clientPublicKeys[email].serverBInt
|
||||
},
|
||||
async () => {
|
||||
server.setClientPublicKey(clientPublicKeys[email].clientPublicKey);
|
||||
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({ userId: user._id.toString() });
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/token',
|
||||
sameSite: "strict",
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
// compare server and client shared keys
|
||||
if (server.checkClientProof(clientProof)) {
|
||||
// issue tokens
|
||||
const tokens = await issueTokens({ userId: user._id.toString() });
|
||||
|
||||
// return (access) token in response
|
||||
return res.status(200).send({
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag
|
||||
});
|
||||
}
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie('jid', tokens.refreshToken, {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
// return (access) token in response
|
||||
return res.status(200).send({
|
||||
token: tokens.token,
|
||||
publicKey: user.publicKey,
|
||||
encryptedPrivateKey: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to authenticate. Try again?'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
@ -140,29 +139,29 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const logout = async (req: Request, res: Response) => {
|
||||
try {
|
||||
await clearTokens({
|
||||
userId: req.user._id.toString()
|
||||
});
|
||||
|
||||
// clear httpOnly cookie
|
||||
res.cookie('jid', '', {
|
||||
httpOnly: true,
|
||||
path: '/token',
|
||||
sameSite: "strict",
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to logout'
|
||||
});
|
||||
}
|
||||
try {
|
||||
await clearTokens({
|
||||
userId: req.user._id.toString()
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully logged out.'
|
||||
});
|
||||
// clear httpOnly cookie
|
||||
res.cookie('jid', '', {
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: 'strict',
|
||||
secure: NODE_ENV === 'production' ? true : false
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to logout'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully logged out.'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -172,9 +171,9 @@ export const logout = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const checkAuth = async (req: Request, res: Response) =>
|
||||
res.status(200).send({
|
||||
message: 'Authenticated'
|
||||
});
|
||||
res.status(200).send({
|
||||
message: 'Authenticated'
|
||||
});
|
||||
|
||||
/**
|
||||
* Return new token by redeeming refresh token
|
||||
@ -183,42 +182,41 @@ export const checkAuth = async (req: Request, res: Response) =>
|
||||
* @returns
|
||||
*/
|
||||
export const getNewToken = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const refreshToken = req.cookies.jid;
|
||||
|
||||
if (!refreshToken) {
|
||||
throw new Error('Failed to find token in request cookies');
|
||||
}
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(refreshToken, JWT_REFRESH_SECRET)
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
_id: decodedToken.userId
|
||||
}).select('+publicKey');
|
||||
try {
|
||||
const refreshToken = req.cookies.jid;
|
||||
|
||||
if (!user) throw new Error('Failed to authenticate unfound user');
|
||||
if (!user?.publicKey)
|
||||
throw new Error('Failed to authenticate not fully set up account');
|
||||
|
||||
const token = createToken({
|
||||
payload: {
|
||||
userId: decodedToken.userId
|
||||
},
|
||||
expiresIn: JWT_AUTH_LIFETIME,
|
||||
secret: JWT_AUTH_SECRET
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
token
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Invalid request'
|
||||
});
|
||||
}
|
||||
if (!refreshToken) {
|
||||
throw new Error('Failed to find token in request cookies');
|
||||
}
|
||||
|
||||
const decodedToken = <jwt.UserIDJwtPayload>(
|
||||
jwt.verify(refreshToken, JWT_REFRESH_SECRET)
|
||||
);
|
||||
|
||||
const user = await User.findOne({
|
||||
_id: decodedToken.userId
|
||||
}).select('+publicKey');
|
||||
|
||||
if (!user) throw new Error('Failed to authenticate unfound user');
|
||||
if (!user?.publicKey)
|
||||
throw new Error('Failed to authenticate not fully set up account');
|
||||
|
||||
const token = createToken({
|
||||
payload: {
|
||||
userId: decodedToken.userId
|
||||
},
|
||||
expiresIn: JWT_AUTH_LIFETIME,
|
||||
secret: JWT_AUTH_SECRET
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
token
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Invalid request'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
107
backend/src/controllers/botController.ts
Normal file
107
backend/src/controllers/botController.ts
Normal file
@ -0,0 +1,107 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Bot, BotKey } from '../models';
|
||||
import { createBot } from '../helpers/bot';
|
||||
|
||||
interface BotKey {
|
||||
encryptedKey: string;
|
||||
nonce: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bot for workspace with id [workspaceId]. If a workspace bot doesn't exist,
|
||||
* then create and return a new bot.
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getBotByWorkspaceId = async (req: Request, res: Response) => {
|
||||
let bot;
|
||||
try {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
bot = await Bot.findOne({
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!bot) {
|
||||
// case: bot doesn't exist for workspace with id [workspaceId]
|
||||
// -> create a new bot and return it
|
||||
bot = await createBot({
|
||||
name: 'Infisical Bot',
|
||||
workspaceId
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get bot for workspace'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
bot
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return bot with id [req.bot._id] with active state set to [isActive].
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const setBotActiveState = async (req: Request, res: Response) => {
|
||||
let bot;
|
||||
try {
|
||||
const { isActive, botKey }: { isActive: boolean, botKey: BotKey } = req.body;
|
||||
|
||||
if (isActive) {
|
||||
// bot state set to active -> share workspace key with bot
|
||||
if (!botKey?.encryptedKey || !botKey?.nonce) {
|
||||
return res.status(400).send({
|
||||
message: 'Failed to set bot state to active - missing bot key'
|
||||
});
|
||||
}
|
||||
|
||||
await BotKey.findOneAndUpdate({
|
||||
workspace: req.bot.workspace
|
||||
}, {
|
||||
encryptedKey: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
sender: req.user._id,
|
||||
bot: req.bot._id,
|
||||
workspace: req.bot.workspace
|
||||
}, {
|
||||
upsert: true,
|
||||
new: true
|
||||
});
|
||||
} else {
|
||||
// case: bot state set to inactive -> delete bot's workspace key
|
||||
await BotKey.deleteOne({
|
||||
bot: req.bot._id
|
||||
});
|
||||
}
|
||||
|
||||
bot = await Bot.findOneAndUpdate({
|
||||
_id: req.bot._id
|
||||
}, {
|
||||
isActive
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
|
||||
if (!bot) throw new Error('Failed to update bot active state');
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update bot active state'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
bot
|
||||
});
|
||||
};
|
@ -1,4 +1,5 @@
|
||||
import * as authController from './authController';
|
||||
import * as botController from './botController';
|
||||
import * as integrationAuthController from './integrationAuthController';
|
||||
import * as integrationController from './integrationController';
|
||||
import * as keyController from './keyController';
|
||||
@ -16,6 +17,7 @@ import * as workspaceController from './workspaceController';
|
||||
|
||||
export {
|
||||
authController,
|
||||
botController,
|
||||
integrationAuthController,
|
||||
integrationController,
|
||||
keyController,
|
||||
|
@ -3,69 +3,45 @@ import * as Sentry from '@sentry/node';
|
||||
import axios from 'axios';
|
||||
import { readFileSync } from 'fs';
|
||||
import { IntegrationAuth, Integration } from '../models';
|
||||
import { processOAuthTokenRes } from '../helpers/integrationAuth';
|
||||
import { INTEGRATION_SET, ENV_DEV } from '../variables';
|
||||
import { OAUTH_CLIENT_SECRET_HEROKU, OAUTH_TOKEN_URL_HEROKU } from '../config';
|
||||
import { INTEGRATION_SET, INTEGRATION_OPTIONS, ENV_DEV } from '../variables';
|
||||
import { IntegrationService } from '../services';
|
||||
import { getApps, revokeAccess } from '../integrations';
|
||||
|
||||
export const getIntegrationOptions = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
return res.status(200).send({
|
||||
integrationOptions: INTEGRATION_OPTIONS
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange as part of integration [integration] for workspace with id [workspaceId]
|
||||
* Note: integration [integration] must be set up compatible/designed for OAuth2
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const integrationAuthOauthExchange = async (
|
||||
export const oAuthExchange = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
let clientSecret;
|
||||
|
||||
const { workspaceId, code, integration } = req.body;
|
||||
|
||||
if (!INTEGRATION_SET.has(integration))
|
||||
throw new Error('Failed to validate integration');
|
||||
|
||||
// use correct client secret
|
||||
switch (integration) {
|
||||
case 'heroku':
|
||||
clientSecret = OAUTH_CLIENT_SECRET_HEROKU;
|
||||
}
|
||||
|
||||
// TODO: unfinished - make compatible with other integration types
|
||||
const res = await axios.post(
|
||||
OAUTH_TOKEN_URL_HEROKU!,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_secret: clientSecret
|
||||
} as any)
|
||||
);
|
||||
|
||||
const integrationAuth = await processOAuthTokenRes({
|
||||
|
||||
await IntegrationService.handleOAuthExchange({
|
||||
workspaceId,
|
||||
integration,
|
||||
res
|
||||
code
|
||||
});
|
||||
|
||||
// create or replace integration
|
||||
const integrationObj = await Integration.findOneAndUpdate(
|
||||
{ workspace: workspaceId, integration },
|
||||
{
|
||||
workspace: workspaceId,
|
||||
environment: ENV_DEV,
|
||||
isActive: false,
|
||||
app: null,
|
||||
integration,
|
||||
integrationAuth: integrationAuth._id
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get OAuth2 token'
|
||||
message: 'Failed to get OAuth2 code-token exchange'
|
||||
});
|
||||
}
|
||||
|
||||
@ -75,26 +51,25 @@ export const integrationAuthOauthExchange = async (
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of applications allowed for integration with id [integrationAuthId]
|
||||
* Return list of applications allowed for integration with integration authorization id [integrationAuthId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
// TODO: unfinished - make compatible with other integration types
|
||||
let apps;
|
||||
try {
|
||||
const res = await axios.get('https://api.heroku.com/apps', {
|
||||
headers: {
|
||||
Accept: 'application/vnd.heroku+json; version=3',
|
||||
Authorization: 'Bearer ' + req.accessToken
|
||||
}
|
||||
apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
|
||||
apps = res.data.map((a: any) => ({
|
||||
name: a.name
|
||||
}));
|
||||
} catch (err) {}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integration authorization applications'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
apps
|
||||
@ -108,46 +83,22 @@ export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
|
||||
// TODO: unfinished - disable application via Heroku API and make compatible with other integration types
|
||||
try {
|
||||
const { integrationAuthId } = req.params;
|
||||
|
||||
// TODO: disable application via Heroku API; figure out what authorization id is
|
||||
|
||||
const integrations = JSON.parse(
|
||||
readFileSync('./src/json/integrations.json').toString()
|
||||
);
|
||||
|
||||
let authorizationId;
|
||||
switch (req.integrationAuth.integration) {
|
||||
case 'heroku':
|
||||
authorizationId = integrations.heroku.clientId;
|
||||
}
|
||||
|
||||
// not sure what authorizationId is?
|
||||
// // revoke authorization
|
||||
// const res2 = await axios.delete(
|
||||
// `https://api.heroku.com/oauth/authorizations/${authorizationId}`,
|
||||
// {
|
||||
// headers: {
|
||||
// 'Accept': 'application/vnd.heroku+json; version=3',
|
||||
// 'Authorization': 'Bearer ' + req.accessToken
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
|
||||
const deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
_id: integrationAuthId
|
||||
await revokeAccess({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken
|
||||
});
|
||||
|
||||
if (deletedIntegrationAuth) {
|
||||
await Integration.deleteMany({
|
||||
integrationAuth: deletedIntegrationAuth._id
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to delete integration authorization'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully deleted integration authorization'
|
||||
});
|
||||
}
|
@ -1,11 +1,9 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { readFileSync } from 'fs';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import axios from 'axios';
|
||||
import { Integration } from '../models';
|
||||
import { decryptAsymmetric } from '../utils/crypto';
|
||||
import { decryptSecrets } from '../helpers/secret';
|
||||
import { PRIVATE_KEY } from '../config';
|
||||
import { Integration, Bot, BotKey } from '../models';
|
||||
import { EventService } from '../services';
|
||||
import { eventPushSecrets } from '../events';
|
||||
|
||||
interface Key {
|
||||
encryptedKey: string;
|
||||
@ -24,104 +22,58 @@ interface PushSecret {
|
||||
type: 'shared' | 'personal';
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of all available integrations on Infisical
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrations = async (req: Request, res: Response) => {
|
||||
let integrations;
|
||||
try {
|
||||
integrations = JSON.parse(
|
||||
readFileSync('./src/json/integrations.json').toString()
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get integrations'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integrations
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync secrets [secrets] to integration with id [integrationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const syncIntegration = async (req: Request, res: Response) => {
|
||||
// TODO: unfinished - make more versatile to accomodate for other integrations
|
||||
try {
|
||||
const { key, secrets }: { key: Key; secrets: PushSecret[] } = req.body;
|
||||
const symmetricKey = decryptAsymmetric({
|
||||
ciphertext: key.encryptedKey,
|
||||
nonce: key.nonce,
|
||||
publicKey: req.user.publicKey,
|
||||
privateKey: PRIVATE_KEY
|
||||
});
|
||||
|
||||
// decrypt secrets with symmetric key
|
||||
const content = decryptSecrets({
|
||||
secrets,
|
||||
key: symmetricKey,
|
||||
format: 'object'
|
||||
});
|
||||
|
||||
// TODO: make integration work for other integrations as well
|
||||
const res = await axios.patch(
|
||||
`https://api.heroku.com/apps/${req.integration.app}/config-vars`,
|
||||
content,
|
||||
{
|
||||
headers: {
|
||||
Accept: 'application/vnd.heroku+json; version=3',
|
||||
Authorization: 'Bearer ' + req.accessToken
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to sync secrets with integration'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully synced secrets with integration'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change environment or name of integration with id [integrationId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const modifyIntegration = async (req: Request, res: Response) => {
|
||||
export const updateIntegration = async (req: Request, res: Response) => {
|
||||
let integration;
|
||||
|
||||
// TODO: add integration-specific validation to ensure that each
|
||||
// integration has the correct fields populated in [Integration]
|
||||
|
||||
try {
|
||||
const { update } = req.body;
|
||||
|
||||
const {
|
||||
app,
|
||||
environment,
|
||||
isActive,
|
||||
target, // vercel-specific integration param
|
||||
context, // netlify-specific integration param
|
||||
siteId // netlify-specific integration param
|
||||
} = req.body;
|
||||
|
||||
integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id
|
||||
},
|
||||
update,
|
||||
{
|
||||
environment,
|
||||
isActive,
|
||||
app,
|
||||
target,
|
||||
context,
|
||||
siteId
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: integration.workspace.toString()
|
||||
})
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to modify integration'
|
||||
message: 'Failed to update integration'
|
||||
});
|
||||
}
|
||||
|
||||
@ -131,7 +83,8 @@ export const modifyIntegration = async (req: Request, res: Response) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete integration with id [integrationId]
|
||||
* Delete integration with id [integrationId] and deactivate bot if there are
|
||||
* no integrations left
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
@ -144,6 +97,29 @@ export const deleteIntegration = async (req: Request, res: Response) => {
|
||||
deletedIntegration = await Integration.findOneAndDelete({
|
||||
_id: integrationId
|
||||
});
|
||||
|
||||
if (!deletedIntegration) throw new Error('Failed to find integration');
|
||||
|
||||
const integrations = await Integration.find({
|
||||
workspace: deletedIntegration.workspace
|
||||
});
|
||||
|
||||
if (integrations.length === 0) {
|
||||
// case: no integrations left, deactivate bot
|
||||
const bot = await Bot.findOneAndUpdate({
|
||||
workspace: deletedIntegration.workspace
|
||||
}, {
|
||||
isActive: false
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
|
||||
if (bot) {
|
||||
await BotKey.deleteOne({
|
||||
bot: bot._id
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
|
@ -17,16 +17,6 @@ export const uploadKey = async (req: Request, res: Response) => {
|
||||
const { workspaceId } = req.params;
|
||||
const { key } = req.body;
|
||||
|
||||
// validate membership of sender
|
||||
const senderMembership = await findMembership({
|
||||
user: req.user._id,
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!senderMembership) {
|
||||
throw new Error('Failed sender membership validation for workspace');
|
||||
}
|
||||
|
||||
// validate membership of receiver
|
||||
const receiverMembership = await findMembership({
|
||||
user: key.userId,
|
||||
|
@ -217,7 +217,7 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, code } = req.body;
|
||||
|
||||
user = await User.findOne({ email });
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
if (user && user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
return res.status(403).send({
|
||||
@ -257,7 +257,7 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed email magic link confirmation'
|
||||
error: 'Failed email magic link verification for organization invitation'
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1,11 +1,121 @@
|
||||
import { Request, Response } from 'express';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import crypto from 'crypto';
|
||||
const jsrp = require('jsrp');
|
||||
import * as bigintConversion from 'bigint-conversion';
|
||||
import { User, BackupPrivateKey } from '../models';
|
||||
import { User, Token, BackupPrivateKey } from '../models';
|
||||
import { checkEmailVerification } from '../helpers/signup';
|
||||
import { createToken } from '../helpers/auth';
|
||||
import { sendMail } from '../helpers/nodemailer';
|
||||
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, SITE_URL } from '../config';
|
||||
|
||||
const clientPublicKeys: any = {};
|
||||
|
||||
/**
|
||||
* Password reset step 1: Send email verification link to email [email]
|
||||
* for account recovery.
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const emailPasswordReset = async (req: Request, res: Response) => {
|
||||
let email: string;
|
||||
try {
|
||||
email = req.body.email;
|
||||
|
||||
const user = await User.findOne({ email }).select('+publicKey');
|
||||
if (!user || !user?.publicKey) {
|
||||
// case: user has already completed account
|
||||
|
||||
return res.status(403).send({
|
||||
error: 'Failed to send email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
const token = crypto.randomBytes(16).toString('hex');
|
||||
|
||||
await Token.findOneAndUpdate(
|
||||
{ email },
|
||||
{
|
||||
email,
|
||||
token,
|
||||
createdAt: new Date()
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
|
||||
await sendMail({
|
||||
template: 'passwordReset.handlebars',
|
||||
subjectLine: 'Infisical password reset',
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
email,
|
||||
token,
|
||||
callback_url: SITE_URL + '/password-reset'
|
||||
}
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to send email for account recovery'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: `Sent an email for account recovery to ${email}`
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Password reset step 2: Verify email verification link sent to email [email]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const emailPasswordResetVerify = async (req: Request, res: Response) => {
|
||||
let user, token;
|
||||
try {
|
||||
const { email, code } = req.body;
|
||||
|
||||
user = await User.findOne({ email }).select('+publicKey');
|
||||
if (!user || !user?.publicKey) {
|
||||
// case: user doesn't exist with email [email] or
|
||||
// hasn't even completed their account
|
||||
return res.status(403).send({
|
||||
error: 'Failed email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
await checkEmailVerification({
|
||||
email,
|
||||
code
|
||||
});
|
||||
|
||||
// generate temporary password-reset token
|
||||
token = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString()
|
||||
},
|
||||
expiresIn: JWT_SIGNUP_LIFETIME,
|
||||
secret: JWT_SIGNUP_SECRET
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed email verification for password reset'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully verified email',
|
||||
user,
|
||||
token
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
|
||||
* @param req
|
||||
@ -43,7 +153,7 @@ export const srp1 = async (req: Request, res: Response) => {
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed to start change password process'
|
||||
@ -110,7 +220,7 @@ export const changePassword = async (req: Request, res: Response) => {
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
error: 'Failed to change password. Try again?'
|
||||
@ -180,10 +290,73 @@ export const createBackupPrivateKey = async (req: Request, res: Response) => {
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to update backup private key'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return backup private key for user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getBackupPrivateKey = async (req: Request, res: Response) => {
|
||||
let backupPrivateKey;
|
||||
try {
|
||||
backupPrivateKey = await BackupPrivateKey.findOne({
|
||||
user: req.user._id
|
||||
}).select('+encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!backupPrivateKey) throw new Error('Failed to find backup private key');
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email});
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get backup private key'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
backupPrivateKey
|
||||
});
|
||||
}
|
||||
|
||||
export const resetPassword = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier,
|
||||
} = req.body;
|
||||
|
||||
await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
encryptedPrivateKey,
|
||||
iv,
|
||||
tag,
|
||||
salt,
|
||||
verifier
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email});
|
||||
Sentry.captureException(err);
|
||||
return res.status(400).send({
|
||||
message: 'Failed to get backup private key'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: 'Successfully reset password'
|
||||
});
|
||||
}
|
@ -7,8 +7,9 @@ import {
|
||||
reformatPullSecrets
|
||||
} from '../helpers/secret';
|
||||
import { pushKeys } from '../helpers/key';
|
||||
import { eventPushSecrets } from '../events';
|
||||
import { EventService } from '../services';
|
||||
import { ENV_SET } from '../variables';
|
||||
|
||||
import { postHogClient } from '../services';
|
||||
|
||||
interface PushSecret {
|
||||
@ -60,7 +61,8 @@ export const pushSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId,
|
||||
keys
|
||||
});
|
||||
|
||||
|
||||
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: 'secrets pushed',
|
||||
@ -74,6 +76,13 @@ export const pushSecrets = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId
|
||||
})
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser({ email: req.user.email });
|
||||
Sentry.captureException(err);
|
||||
@ -192,7 +201,7 @@ export const pullSecretsServiceToken = async (req: Request, res: Response) => {
|
||||
};
|
||||
|
||||
if (postHogClient) {
|
||||
// capture secrets pushed event in production
|
||||
// capture secrets pulled event in production
|
||||
postHogClient.capture({
|
||||
distinctId: req.serviceToken.user.email,
|
||||
event: 'secrets pulled',
|
||||
|
5
backend/src/events/index.ts
Normal file
5
backend/src/events/index.ts
Normal file
@ -0,0 +1,5 @@
|
||||
import { eventPushSecrets } from "./secret"
|
||||
|
||||
export {
|
||||
eventPushSecrets
|
||||
}
|
37
backend/src/events/secret.ts
Normal file
37
backend/src/events/secret.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { EVENT_PUSH_SECRETS } from '../variables';
|
||||
|
||||
interface PushSecret {
|
||||
ciphertextKey: string;
|
||||
ivKey: string;
|
||||
tagKey: string;
|
||||
hashKey: string;
|
||||
ciphertextValue: string;
|
||||
ivValue: string;
|
||||
tagValue: string;
|
||||
hashValue: string;
|
||||
type: 'shared' | 'personal';
|
||||
}
|
||||
|
||||
/**
|
||||
* Return event for pushing secrets
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace to push secrets to
|
||||
* @returns
|
||||
*/
|
||||
const eventPushSecrets = ({
|
||||
workspaceId,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
}) => {
|
||||
return ({
|
||||
name: EVENT_PUSH_SECRETS,
|
||||
workspaceId,
|
||||
payload: {
|
||||
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
eventPushSecrets
|
||||
}
|
230
backend/src/helpers/bot.ts
Normal file
230
backend/src/helpers/bot.ts
Normal file
@ -0,0 +1,230 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Bot,
|
||||
BotKey,
|
||||
Secret,
|
||||
ISecret,
|
||||
IUser
|
||||
} from '../models';
|
||||
import {
|
||||
generateKeyPair,
|
||||
encryptSymmetric,
|
||||
decryptSymmetric,
|
||||
decryptAsymmetric
|
||||
} from '../utils/crypto';
|
||||
import { decryptSecrets } from '../helpers/secret';
|
||||
import { ENCRYPTION_KEY } from '../config';
|
||||
import { SECRET_SHARED } from '../variables';
|
||||
|
||||
/**
|
||||
* Create an inactive bot with name [name] for workspace with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.name - name of bot
|
||||
* @param {String} obj.workspaceId - id of workspace that bot belongs to
|
||||
*/
|
||||
const createBot = async ({
|
||||
name,
|
||||
workspaceId,
|
||||
}: {
|
||||
name: string;
|
||||
workspaceId: string;
|
||||
}) => {
|
||||
let bot;
|
||||
try {
|
||||
const { publicKey, privateKey } = generateKeyPair();
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext: privateKey,
|
||||
key: ENCRYPTION_KEY
|
||||
});
|
||||
|
||||
bot = await new Bot({
|
||||
name,
|
||||
workspace: workspaceId,
|
||||
isActive: false,
|
||||
publicKey,
|
||||
encryptedPrivateKey: ciphertext,
|
||||
iv,
|
||||
tag
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to create bot');
|
||||
}
|
||||
|
||||
return bot;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return decrypted secrets for workspace with id [workspaceId]
|
||||
* and [environment] using bot
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.environment - environment
|
||||
*/
|
||||
const getSecretsHelper = async ({
|
||||
workspaceId,
|
||||
environment
|
||||
}: {
|
||||
workspaceId: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
let content = {} as any;
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const secrets = await Secret.find({
|
||||
workspaceId,
|
||||
environment,
|
||||
type: SECRET_SHARED
|
||||
});
|
||||
|
||||
secrets.forEach((secret: ISecret) => {
|
||||
const secretKey = decryptSymmetric({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key
|
||||
});
|
||||
|
||||
content[secretKey] = secretValue;
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get secrets');
|
||||
}
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bot's copy of the workspace key for workspace
|
||||
* with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @returns {String} key - decrypted workspace key
|
||||
*/
|
||||
const getKey = async ({ workspaceId }: { workspaceId: string }) => {
|
||||
let key;
|
||||
try {
|
||||
const botKey = await BotKey.findOne({
|
||||
workspace: workspaceId
|
||||
}).populate<{ sender: IUser }>('sender', 'publicKey');
|
||||
|
||||
if (!botKey) throw new Error('Failed to find bot key');
|
||||
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId
|
||||
}).select('+encryptedPrivateKey +iv +tag');
|
||||
|
||||
if (!bot) throw new Error('Failed to find bot');
|
||||
if (!bot.isActive) throw new Error('Bot is not active');
|
||||
|
||||
const privateKeyBot = decryptSymmetric({
|
||||
ciphertext: bot.encryptedPrivateKey,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
key: ENCRYPTION_KEY
|
||||
});
|
||||
|
||||
key = decryptAsymmetric({
|
||||
ciphertext: botKey.encryptedKey,
|
||||
nonce: botKey.nonce,
|
||||
publicKey: botKey.sender.publicKey as string,
|
||||
privateKey: privateKeyBot
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get workspace key');
|
||||
}
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return symmetrically encrypted [plaintext] using the
|
||||
* key for workspace with id [workspaceId]
|
||||
* @param {Object} obj1
|
||||
* @param {String} obj1.workspaceId - id of workspace
|
||||
* @param {String} obj1.plaintext - plaintext to encrypt
|
||||
*/
|
||||
const encryptSymmetricHelper = async ({
|
||||
workspaceId,
|
||||
plaintext
|
||||
}: {
|
||||
workspaceId: string;
|
||||
plaintext: string;
|
||||
}) => {
|
||||
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const { ciphertext, iv, tag } = encryptSymmetric({
|
||||
plaintext,
|
||||
key
|
||||
});
|
||||
|
||||
return ({
|
||||
ciphertext,
|
||||
iv,
|
||||
tag
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric encryption with bot');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Return symmetrically decrypted [ciphertext] using the
|
||||
* key for workspace with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.ciphertext - ciphertext to decrypt
|
||||
* @param {String} obj.iv - iv
|
||||
* @param {String} obj.tag - tag
|
||||
*/
|
||||
const decryptSymmetricHelper = async ({
|
||||
workspaceId,
|
||||
ciphertext,
|
||||
iv,
|
||||
tag
|
||||
}: {
|
||||
workspaceId: string;
|
||||
ciphertext: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}) => {
|
||||
let plaintext;
|
||||
try {
|
||||
const key = await getKey({ workspaceId });
|
||||
const plaintext = decryptSymmetric({
|
||||
ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
key
|
||||
});
|
||||
|
||||
return plaintext;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to perform symmetric decryption with bot');
|
||||
}
|
||||
|
||||
return plaintext;
|
||||
}
|
||||
|
||||
export {
|
||||
createBot,
|
||||
getSecretsHelper,
|
||||
encryptSymmetricHelper,
|
||||
decryptSymmetricHelper
|
||||
}
|
51
backend/src/helpers/event.ts
Normal file
51
backend/src/helpers/event.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import { Bot, IBot } from '../models';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { EVENT_PUSH_SECRETS } from '../variables';
|
||||
import { IntegrationService } from '../services';
|
||||
|
||||
interface Event {
|
||||
name: string;
|
||||
workspaceId: string;
|
||||
payload: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle event [event]
|
||||
* @param {Object} obj
|
||||
* @param {Event} obj.event - an event
|
||||
* @param {String} obj.event.name - name of event
|
||||
* @param {String} obj.event.workspaceId - id of workspace that event is part of
|
||||
* @param {Object} obj.event.payload - payload of event (depends on event)
|
||||
*/
|
||||
const handleEventHelper = async ({
|
||||
event
|
||||
}: {
|
||||
event: Event;
|
||||
}) => {
|
||||
const { workspaceId } = event;
|
||||
|
||||
// TODO: moduralize bot check into separate function
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (!bot) return;
|
||||
|
||||
try {
|
||||
switch (event.name) {
|
||||
case EVENT_PUSH_SECRETS:
|
||||
IntegrationService.syncIntegrations({
|
||||
workspaceId
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
handleEventHelper
|
||||
}
|
@ -0,0 +1,350 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Bot,
|
||||
Integration,
|
||||
IIntegration,
|
||||
IntegrationAuth,
|
||||
IIntegrationAuth
|
||||
} from '../models';
|
||||
import { exchangeCode, exchangeRefresh, syncSecrets } from '../integrations';
|
||||
import { BotService, IntegrationService } from '../services';
|
||||
import {
|
||||
ENV_DEV,
|
||||
EVENT_PUSH_SECRETS,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY
|
||||
} from '../variables';
|
||||
|
||||
interface Update {
|
||||
workspace: string;
|
||||
integration: string;
|
||||
teamId?: string;
|
||||
accountId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange for workspace with id [workspaceId] and integration
|
||||
* named [integration]
|
||||
* - Store integration access and refresh tokens returned from the OAuth2 code-token exchange
|
||||
* - Add placeholder inactive integration
|
||||
* - Create bot sequence for integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.integration - name of integration
|
||||
* @param {String} obj.code - code
|
||||
*/
|
||||
const handleOAuthExchangeHelper = async ({
|
||||
workspaceId,
|
||||
integration,
|
||||
code
|
||||
}: {
|
||||
workspaceId: string;
|
||||
integration: string;
|
||||
code: string;
|
||||
}) => {
|
||||
let action;
|
||||
let integrationAuth;
|
||||
try {
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (!bot) throw new Error('Bot must be enabled for OAuth2 code-token exchange');
|
||||
|
||||
// exchange code for access and refresh tokens
|
||||
let res = await exchangeCode({
|
||||
integration,
|
||||
code
|
||||
});
|
||||
|
||||
let update: Update = {
|
||||
workspace: workspaceId,
|
||||
integration
|
||||
}
|
||||
|
||||
switch (integration) {
|
||||
case INTEGRATION_VERCEL:
|
||||
update.teamId = res.teamId;
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
update.accountId = res.accountId;
|
||||
break;
|
||||
}
|
||||
|
||||
integrationAuth = await IntegrationAuth.findOneAndUpdate({
|
||||
workspace: workspaceId,
|
||||
integration
|
||||
}, update, {
|
||||
new: true,
|
||||
upsert: true
|
||||
});
|
||||
|
||||
if (res.refreshToken) {
|
||||
// case: refresh token returned from exchange
|
||||
// set integration auth refresh token
|
||||
await setIntegrationAuthRefreshHelper({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
refreshToken: res.refreshToken
|
||||
});
|
||||
}
|
||||
|
||||
if (res.accessToken) {
|
||||
// case: access token returned from exchange
|
||||
// set integration auth access token
|
||||
await setIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integrationAuth._id.toString(),
|
||||
accessToken: res.accessToken,
|
||||
accessExpiresAt: res.accessExpiresAt
|
||||
});
|
||||
}
|
||||
|
||||
// initialize new integration after exchange
|
||||
await new Integration({
|
||||
workspace: workspaceId,
|
||||
environment: ENV_DEV,
|
||||
isActive: false,
|
||||
app: null,
|
||||
integration,
|
||||
integrationAuth: integrationAuth._id
|
||||
}).save();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to handle OAuth2 code-token exchange')
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sync/push environment variables in workspace with id [workspaceId] to
|
||||
* all active integrations for that workspace
|
||||
* @param {Object} obj
|
||||
* @param {Object} obj.workspaceId - id of workspace
|
||||
*/
|
||||
const syncIntegrationsHelper = async ({
|
||||
workspaceId
|
||||
}: {
|
||||
workspaceId: string;
|
||||
}) => {
|
||||
let integrations;
|
||||
try {
|
||||
|
||||
integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
isActive: true,
|
||||
app: { $ne: null }
|
||||
});
|
||||
|
||||
// for each workspace integration, sync/push secrets
|
||||
// to that integration
|
||||
for await (const integration of integrations) {
|
||||
// get workspace, environment (shared) secrets
|
||||
const secrets = await BotService.getSecrets({
|
||||
workspaceId: integration.workspace.toString(),
|
||||
environment: integration.environment
|
||||
});
|
||||
|
||||
const integrationAuth = await IntegrationAuth.findById(integration.integrationAuth);
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
// get integration auth access token
|
||||
const accessToken = await getIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integration.integrationAuth.toString()
|
||||
});
|
||||
|
||||
// sync secrets to integration
|
||||
await syncSecrets({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to sync secrets to integrations');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return decrypted refresh token using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} refreshToken - decrypted refresh token
|
||||
*/
|
||||
const getIntegrationAuthRefreshHelper = async ({ integrationAuthId }: { integrationAuthId: string }) => {
|
||||
let refreshToken;
|
||||
try {
|
||||
const integrationAuth = await IntegrationAuth
|
||||
.findById(integrationAuthId)
|
||||
.select('+refreshCiphertext +refreshIV +refreshTag');
|
||||
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
refreshToken = await BotService.decryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
ciphertext: integrationAuth.refreshCiphertext as string,
|
||||
iv: integrationAuth.refreshIV as string,
|
||||
tag: integrationAuth.refreshTag as string
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get integration refresh token');
|
||||
}
|
||||
|
||||
return refreshToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return decrypted access token using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @returns {String} accessToken - decrypted access token
|
||||
*/
|
||||
const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrationAuthId: string }) => {
|
||||
let accessToken;
|
||||
try {
|
||||
const integrationAuth = await IntegrationAuth
|
||||
.findById(integrationAuthId)
|
||||
.select('workspace integration +accessCiphertext +accessIV +accessTag +accessExpiresAt + refreshCiphertext');
|
||||
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
accessToken = await BotService.decryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
ciphertext: integrationAuth.accessCiphertext as string,
|
||||
iv: integrationAuth.accessIV as string,
|
||||
tag: integrationAuth.accessTag as string
|
||||
});
|
||||
|
||||
if (integrationAuth?.accessExpiresAt && integrationAuth?.refreshCiphertext) {
|
||||
// there is a access token expiration date
|
||||
// and refresh token to exchange with the OAuth2 server
|
||||
|
||||
if (integrationAuth.accessExpiresAt < new Date()) {
|
||||
// access token is expired
|
||||
const refreshToken = await getIntegrationAuthRefreshHelper({ integrationAuthId });
|
||||
accessToken = await exchangeRefresh({
|
||||
integration: integrationAuth.integration,
|
||||
refreshToken
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get integration access token');
|
||||
}
|
||||
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt refresh token [refreshToken] using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId] and store it
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} obj.refreshToken - refresh token
|
||||
*/
|
||||
const setIntegrationAuthRefreshHelper = async ({
|
||||
integrationAuthId,
|
||||
refreshToken
|
||||
}: {
|
||||
integrationAuthId: string;
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await IntegrationAuth
|
||||
.findById(integrationAuthId);
|
||||
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
const obj = await BotService.encryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
plaintext: refreshToken
|
||||
});
|
||||
|
||||
integrationAuth = await IntegrationAuth.findOneAndUpdate({
|
||||
_id: integrationAuthId
|
||||
}, {
|
||||
refreshCiphertext: obj.ciphertext,
|
||||
refreshIV: obj.iv,
|
||||
refreshTag: obj.tag
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to set integration auth refresh token');
|
||||
}
|
||||
|
||||
return integrationAuth;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt access token [accessToken] using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId] and store it along with [accessExpiresAt]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} obj.accessToken - access token
|
||||
* @param {Date} obj.accessExpiresAt - expiration date of access token
|
||||
*/
|
||||
const setIntegrationAuthAccessHelper = async ({
|
||||
integrationAuthId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
}: {
|
||||
integrationAuthId: string;
|
||||
accessToken: string;
|
||||
accessExpiresAt: Date;
|
||||
}) => {
|
||||
let integrationAuth;
|
||||
try {
|
||||
integrationAuth = await IntegrationAuth.findById(integrationAuthId);
|
||||
|
||||
if (!integrationAuth) throw new Error('Failed to find integration auth');
|
||||
|
||||
const obj = await BotService.encryptSymmetric({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
plaintext: accessToken
|
||||
});
|
||||
|
||||
integrationAuth = await IntegrationAuth.findOneAndUpdate({
|
||||
_id: integrationAuthId
|
||||
}, {
|
||||
accessCiphertext: obj.ciphertext,
|
||||
accessIV: obj.iv,
|
||||
accessTag: obj.tag,
|
||||
accessExpiresAt
|
||||
}, {
|
||||
new: true
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to save integration auth access token');
|
||||
}
|
||||
|
||||
return integrationAuth;
|
||||
}
|
||||
|
||||
export {
|
||||
handleOAuthExchangeHelper,
|
||||
syncIntegrationsHelper,
|
||||
getIntegrationAuthRefreshHelper,
|
||||
getIntegrationAuthAccessHelper,
|
||||
setIntegrationAuthRefreshHelper,
|
||||
setIntegrationAuthAccessHelper
|
||||
}
|
@ -1,174 +0,0 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import axios from 'axios';
|
||||
import { IntegrationAuth } from '../models';
|
||||
import { encryptSymmetric, decryptSymmetric } from '../utils/crypto';
|
||||
import { IIntegrationAuth } from '../models';
|
||||
import {
|
||||
ENCRYPTION_KEY,
|
||||
OAUTH_CLIENT_SECRET_HEROKU,
|
||||
OAUTH_TOKEN_URL_HEROKU
|
||||
} from '../config';
|
||||
|
||||
/**
|
||||
* Process token exchange and refresh responses from respective OAuth2 authorization servers by
|
||||
* encrypting access and refresh tokens, computing new access token expiration times [accessExpiresAt],
|
||||
* and upserting them into the DB for workspace with id [workspaceId] and integration [integration].
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.integration - name of integration (e.g. heroku)
|
||||
* @param {Object} obj.res - response from OAuth2 authorization server
|
||||
*/
|
||||
const processOAuthTokenRes = async ({
|
||||
workspaceId,
|
||||
integration,
|
||||
res
|
||||
}: {
|
||||
workspaceId: string;
|
||||
integration: string;
|
||||
res: any;
|
||||
}): Promise<IIntegrationAuth> => {
|
||||
let integrationAuth;
|
||||
try {
|
||||
// encrypt refresh + access tokens
|
||||
const {
|
||||
ciphertext: refreshCiphertext,
|
||||
iv: refreshIV,
|
||||
tag: refreshTag
|
||||
} = encryptSymmetric({
|
||||
plaintext: res.data.refresh_token,
|
||||
key: ENCRYPTION_KEY
|
||||
});
|
||||
|
||||
const {
|
||||
ciphertext: accessCiphertext,
|
||||
iv: accessIV,
|
||||
tag: accessTag
|
||||
} = encryptSymmetric({
|
||||
plaintext: res.data.access_token,
|
||||
key: ENCRYPTION_KEY
|
||||
});
|
||||
|
||||
// compute access token expiration date
|
||||
const accessExpiresAt = new Date();
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.data.expires_in
|
||||
);
|
||||
|
||||
// create or replace integration authorization with encrypted tokens
|
||||
// and access token expiration date
|
||||
integrationAuth = await IntegrationAuth.findOneAndUpdate(
|
||||
{ workspace: workspaceId, integration },
|
||||
{
|
||||
workspace: workspaceId,
|
||||
integration,
|
||||
refreshCiphertext,
|
||||
refreshIV,
|
||||
refreshTag,
|
||||
accessCiphertext,
|
||||
accessIV,
|
||||
accessTag,
|
||||
accessExpiresAt
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error(
|
||||
'Failed to process OAuth2 authorization server token response'
|
||||
);
|
||||
}
|
||||
|
||||
return integrationAuth;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return access token for integration either by decrypting a non-expired access token [accessCiphertext] on
|
||||
* the integration authorization document or by requesting a new one by decrypting and exchanging the
|
||||
* refresh token [refreshCiphertext] with the respective OAuth2 authorization server.
|
||||
* @param {Object} obj
|
||||
* @param {IIntegrationAuth} obj.integrationAuth - an integration authorization document
|
||||
* @returns {String} access token - new access token
|
||||
*/
|
||||
const getOAuthAccessToken = async ({
|
||||
integrationAuth
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
}) => {
|
||||
let accessToken;
|
||||
try {
|
||||
const {
|
||||
refreshCiphertext,
|
||||
refreshIV,
|
||||
refreshTag,
|
||||
accessCiphertext,
|
||||
accessIV,
|
||||
accessTag,
|
||||
accessExpiresAt
|
||||
} = integrationAuth;
|
||||
|
||||
if (
|
||||
refreshCiphertext &&
|
||||
refreshIV &&
|
||||
refreshTag &&
|
||||
accessCiphertext &&
|
||||
accessIV &&
|
||||
accessTag &&
|
||||
accessExpiresAt
|
||||
) {
|
||||
if (accessExpiresAt < new Date()) {
|
||||
// case: access token expired
|
||||
// TODO: fetch another access token
|
||||
|
||||
let clientSecret;
|
||||
switch (integrationAuth.integration) {
|
||||
case 'heroku':
|
||||
clientSecret = OAUTH_CLIENT_SECRET_HEROKU;
|
||||
}
|
||||
|
||||
// record new access token and refresh token
|
||||
// encrypt refresh + access tokens
|
||||
const refreshToken = decryptSymmetric({
|
||||
ciphertext: refreshCiphertext,
|
||||
iv: refreshIV,
|
||||
tag: refreshTag,
|
||||
key: ENCRYPTION_KEY
|
||||
});
|
||||
|
||||
// TODO: make route compatible with other integration types
|
||||
const res = await axios.post(
|
||||
OAUTH_TOKEN_URL_HEROKU, // maybe shouldn't be a config variable?
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
client_secret: clientSecret
|
||||
} as any)
|
||||
);
|
||||
|
||||
accessToken = res.data.access_token;
|
||||
|
||||
await processOAuthTokenRes({
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
integration: integrationAuth.integration,
|
||||
res
|
||||
});
|
||||
} else {
|
||||
// case: access token still works
|
||||
accessToken = decryptSymmetric({
|
||||
ciphertext: accessCiphertext,
|
||||
iv: accessIV,
|
||||
tag: accessTag,
|
||||
key: ENCRYPTION_KEY
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get OAuth2 access token');
|
||||
}
|
||||
|
||||
return accessToken;
|
||||
};
|
||||
|
||||
export { processOAuthTokenRes, getOAuthAccessToken };
|
||||
|
@ -1,6 +1,51 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Membership, Key } from '../models';
|
||||
|
||||
/**
|
||||
* Validate that user with id [userId] is a member of workspace with id [workspaceId]
|
||||
* and has at least one of the roles in [acceptedRoles] and statuses in [acceptedStatuses]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.userId - id of user to validate
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
*/
|
||||
const validateMembership = async ({
|
||||
userId,
|
||||
workspaceId,
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
}: {
|
||||
userId: string;
|
||||
workspaceId: string;
|
||||
acceptedRoles: string[];
|
||||
acceptedStatuses: string[];
|
||||
}) => {
|
||||
|
||||
let membership;
|
||||
try {
|
||||
membership = await Membership.findOne({
|
||||
user: userId,
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (!membership) throw new Error('Failed to find membership');
|
||||
|
||||
if (!acceptedRoles.includes(membership.role)) {
|
||||
throw new Error('Failed to validate membership role');
|
||||
}
|
||||
|
||||
if (!acceptedStatuses.includes(membership.status)) {
|
||||
throw new Error('Failed to validate membership status');
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to validate membership');
|
||||
}
|
||||
|
||||
return membership;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return membership matching criteria specified in query [queryObj]
|
||||
* @param {Object} queryObj - query object
|
||||
@ -97,4 +142,9 @@ const deleteMembership = async ({ membershipId }: { membershipId: string }) => {
|
||||
return deletedMembership;
|
||||
};
|
||||
|
||||
export { addMemberships, findMembership, deleteMembership };
|
||||
export {
|
||||
validateMembership,
|
||||
addMemberships,
|
||||
findMembership,
|
||||
deleteMembership
|
||||
};
|
||||
|
@ -2,21 +2,40 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
import handlebars from 'handlebars';
|
||||
import nodemailer from 'nodemailer';
|
||||
import { SMTP_HOST, SMTP_NAME, SMTP_USERNAME, SMTP_PASSWORD } from '../config';
|
||||
import {
|
||||
SMTP_HOST,
|
||||
SMTP_PORT,
|
||||
SMTP_NAME,
|
||||
SMTP_USERNAME,
|
||||
SMTP_PASSWORD
|
||||
} from '../config';
|
||||
import SMTPConnection from 'nodemailer/lib/smtp-connection';
|
||||
import * as Sentry from '@sentry/node';
|
||||
|
||||
const mailOpts: SMTPConnection.Options = {
|
||||
host: SMTP_HOST,
|
||||
port: SMTP_PORT as number
|
||||
};
|
||||
if (SMTP_USERNAME && SMTP_PASSWORD) {
|
||||
mailOpts.auth = {
|
||||
user: SMTP_USERNAME,
|
||||
pass: SMTP_PASSWORD
|
||||
};
|
||||
}
|
||||
// create nodemailer transporter
|
||||
const transporter = nodemailer.createTransport({
|
||||
host: SMTP_HOST,
|
||||
port: 587,
|
||||
auth: {
|
||||
user: SMTP_USERNAME,
|
||||
pass: SMTP_PASSWORD
|
||||
}
|
||||
});
|
||||
const transporter = nodemailer.createTransport(mailOpts);
|
||||
transporter
|
||||
.verify()
|
||||
.then(() => console.log('SMTP - Successfully connected'))
|
||||
.catch((err) => console.log('SMTP - Failed to connect'));
|
||||
.verify()
|
||||
.then(() => {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureMessage('SMTP - Successfully connected');
|
||||
})
|
||||
.catch((err) => {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(
|
||||
`SMTP - Failed to connect to ${SMTP_HOST}:${SMTP_PORT} \n\t${err}`
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {Object} obj
|
||||
@ -26,33 +45,34 @@ transporter
|
||||
* @param {Object} obj.substitutions - object containing template substitutions
|
||||
*/
|
||||
const sendMail = async ({
|
||||
template,
|
||||
subjectLine,
|
||||
recipients,
|
||||
substitutions
|
||||
template,
|
||||
subjectLine,
|
||||
recipients,
|
||||
substitutions
|
||||
}: {
|
||||
template: string;
|
||||
subjectLine: string;
|
||||
recipients: string[];
|
||||
substitutions: any;
|
||||
template: string;
|
||||
subjectLine: string;
|
||||
recipients: string[];
|
||||
substitutions: any;
|
||||
}) => {
|
||||
try {
|
||||
const html = fs.readFileSync(
|
||||
path.resolve(__dirname, '../templates/' + template),
|
||||
'utf8'
|
||||
);
|
||||
const temp = handlebars.compile(html);
|
||||
const htmlToSend = temp(substitutions);
|
||||
try {
|
||||
const html = fs.readFileSync(
|
||||
path.resolve(__dirname, '../templates/' + template),
|
||||
'utf8'
|
||||
);
|
||||
const temp = handlebars.compile(html);
|
||||
const htmlToSend = temp(substitutions);
|
||||
|
||||
await transporter.sendMail({
|
||||
from: `"${SMTP_NAME}" <${SMTP_USERNAME}>`,
|
||||
to: recipients.join(', '),
|
||||
subject: subjectLine,
|
||||
html: htmlToSend
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
await transporter.sendMail({
|
||||
from: `"${SMTP_NAME}" <${SMTP_USERNAME}>`,
|
||||
to: recipients.join(', '),
|
||||
subject: subjectLine,
|
||||
html: htmlToSend
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
}
|
||||
};
|
||||
|
||||
export { sendMail };
|
||||
|
@ -2,34 +2,35 @@ import rateLimit from 'express-rate-limit';
|
||||
|
||||
// 300 requests per 15 minutes
|
||||
const apiLimiter = rateLimit({
|
||||
windowMs: 15 * 60 * 1000,
|
||||
max: 400,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
windowMs: 15 * 60 * 1000,
|
||||
max: 400,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
skip: (request) => request.path === '/healthcheck'
|
||||
});
|
||||
|
||||
// 5 requests per hour
|
||||
const signupLimiter = rateLimit({
|
||||
windowMs: 60 * 60 * 1000,
|
||||
max: 10,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
windowMs: 60 * 60 * 1000,
|
||||
max: 10,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
});
|
||||
|
||||
// 10 requests per hour
|
||||
const loginLimiter = rateLimit({
|
||||
windowMs: 60 * 60 * 1000,
|
||||
max: 20,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
windowMs: 60 * 60 * 1000,
|
||||
max: 20,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
});
|
||||
|
||||
// 5 requests per hour
|
||||
const passwordLimiter = rateLimit({
|
||||
windowMs: 60 * 60 * 1000,
|
||||
max: 10,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
windowMs: 60 * 60 * 1000,
|
||||
max: 10,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
});
|
||||
|
||||
export { apiLimiter, signupLimiter, loginLimiter, passwordLimiter };
|
||||
|
@ -33,7 +33,7 @@ const sendEmailVerification = async ({ email }: { email: string }) => {
|
||||
// send mail
|
||||
await sendMail({
|
||||
template: 'emailVerification.handlebars',
|
||||
subjectLine: 'Infisical workspace invitation',
|
||||
subjectLine: 'Infisical confirmation code',
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
code: token
|
||||
@ -66,7 +66,7 @@ const checkEmailVerification = async ({
|
||||
email,
|
||||
token: code
|
||||
});
|
||||
|
||||
|
||||
if (!token) throw new Error('Failed to find email verification token');
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -1,13 +1,16 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Workspace,
|
||||
Bot,
|
||||
Membership,
|
||||
Key,
|
||||
Secret
|
||||
} from '../models';
|
||||
import { createBot } from '../helpers/bot';
|
||||
|
||||
/**
|
||||
* Create a workspace with name [name] in organization with id [organizationId]
|
||||
* and a bot for it.
|
||||
* @param {String} name - name of workspace to create.
|
||||
* @param {String} organizationId - id of organization to create workspace in
|
||||
* @param {Object} workspace - new workspace
|
||||
@ -21,10 +24,16 @@ const createWorkspace = async ({
|
||||
}) => {
|
||||
let workspace;
|
||||
try {
|
||||
// create workspace
|
||||
workspace = await new Workspace({
|
||||
name,
|
||||
organization: organizationId
|
||||
}).save();
|
||||
|
||||
const bot = await createBot({
|
||||
name: 'Infisical Bot',
|
||||
workspaceId: workspace._id.toString()
|
||||
});
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
@ -43,6 +52,9 @@ const createWorkspace = async ({
|
||||
const deleteWorkspace = async ({ id }: { id: string }) => {
|
||||
try {
|
||||
await Workspace.deleteOne({ _id: id });
|
||||
await Bot.deleteOne({
|
||||
workspace: id
|
||||
});
|
||||
await Membership.deleteMany({
|
||||
workspace: id
|
||||
});
|
||||
|
@ -1,3 +1,5 @@
|
||||
/* eslint-disable no-console */
|
||||
import http from 'http';
|
||||
import express from 'express';
|
||||
import helmet from 'helmet';
|
||||
import cors from 'cors';
|
||||
@ -7,21 +9,23 @@ import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { PORT, SENTRY_DSN, NODE_ENV, MONGO_URL, SITE_URL, POSTHOG_PROJECT_API_KEY, POSTHOG_HOST, TELEMETRY_ENABLED } from './config';
|
||||
import { PORT, SENTRY_DSN, NODE_ENV, MONGO_URL, SITE_URL } from './config';
|
||||
import { apiLimiter } from './helpers/rateLimiter';
|
||||
import { createTerminus } from '@godaddy/terminus';
|
||||
|
||||
const app = express();
|
||||
|
||||
Sentry.init({
|
||||
dsn: SENTRY_DSN,
|
||||
tracesSampleRate: 1.0,
|
||||
debug: NODE_ENV === 'production' ? false : true,
|
||||
environment: NODE_ENV
|
||||
dsn: SENTRY_DSN,
|
||||
tracesSampleRate: 1.0,
|
||||
debug: NODE_ENV === 'production' ? false : true,
|
||||
environment: NODE_ENV
|
||||
});
|
||||
|
||||
import {
|
||||
signup as signupRouter,
|
||||
auth as authRouter,
|
||||
bot as botRouter,
|
||||
organization as organizationRouter,
|
||||
workspace as workspaceRouter,
|
||||
membershipOrg as membershipOrgRouter,
|
||||
@ -39,31 +43,35 @@ import {
|
||||
} from './routes';
|
||||
|
||||
const connectWithRetry = () => {
|
||||
mongoose.connect(MONGO_URL)
|
||||
.then(() => console.log('Successfully connected to DB'))
|
||||
.catch((e) => {
|
||||
console.log('Failed to connect to DB ', e);
|
||||
setTimeout(() => {
|
||||
console.log(e);
|
||||
}, 5000);
|
||||
});
|
||||
}
|
||||
mongoose
|
||||
.connect(MONGO_URL)
|
||||
.then(() => console.log('Successfully connected to DB'))
|
||||
.catch((e) => {
|
||||
console.log('Failed to connect to DB ', e);
|
||||
setTimeout(() => {
|
||||
console.log(e);
|
||||
}, 5000);
|
||||
});
|
||||
return mongoose.connection;
|
||||
};
|
||||
|
||||
connectWithRetry();
|
||||
const dbConnection = connectWithRetry();
|
||||
|
||||
app.enable('trust proxy');
|
||||
app.use(cookieParser());
|
||||
app.use(cors({
|
||||
credentials: true,
|
||||
origin: SITE_URL
|
||||
}));
|
||||
app.use(
|
||||
cors({
|
||||
credentials: true,
|
||||
origin: SITE_URL
|
||||
})
|
||||
);
|
||||
|
||||
if (NODE_ENV === 'production') {
|
||||
// enable app-wide rate-limiting + helmet security
|
||||
// in production
|
||||
app.disable('x-powered-by');
|
||||
app.use(apiLimiter);
|
||||
app.use(helmet());
|
||||
// enable app-wide rate-limiting + helmet security
|
||||
// in production
|
||||
app.disable('x-powered-by');
|
||||
app.use(apiLimiter);
|
||||
app.use(helmet());
|
||||
}
|
||||
|
||||
app.use(express.json());
|
||||
@ -71,6 +79,7 @@ app.use(express.json());
|
||||
// routers
|
||||
app.use('/api/v1/signup', signupRouter);
|
||||
app.use('/api/v1/auth', authRouter);
|
||||
app.use('/api/v1/bot', botRouter);
|
||||
app.use('/api/v1/user', userRouter);
|
||||
app.use('/api/v1/user-action', userActionRouter);
|
||||
app.use('/api/v1/organization', organizationRouter);
|
||||
@ -86,6 +95,35 @@ app.use('/api/v1/stripe', stripeRouter);
|
||||
app.use('/api/v1/integration', integrationRouter);
|
||||
app.use('/api/v1/integration-auth', integrationAuthRouter);
|
||||
|
||||
app.listen(PORT, () => {
|
||||
console.log('Listening on PORT ' + PORT);
|
||||
const server = http.createServer(app);
|
||||
|
||||
const onSignal = () => {
|
||||
console.log('Server is starting clean-up');
|
||||
return Promise.all([
|
||||
() => {
|
||||
dbConnection.close(() => {
|
||||
console.info('Database connection closed');
|
||||
});
|
||||
}
|
||||
]);
|
||||
};
|
||||
|
||||
const healthCheck = () => {
|
||||
// `state.isShuttingDown` (boolean) shows whether the server is shutting down or not
|
||||
return Promise
|
||||
.resolve
|
||||
// optionally include a resolve value to be included as
|
||||
// info in the health check response
|
||||
();
|
||||
};
|
||||
|
||||
createTerminus(server, {
|
||||
healthChecks: {
|
||||
'/healthcheck': healthCheck,
|
||||
onSignal
|
||||
}
|
||||
});
|
||||
|
||||
server.listen(PORT, () => {
|
||||
console.log('Listening on PORT ' + PORT);
|
||||
});
|
||||
|
169
backend/src/integrations/apps.ts
Normal file
169
backend/src/integrations/apps.ts
Normal file
@ -0,0 +1,169 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
IIntegrationAuth
|
||||
} from '../models';
|
||||
import {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL
|
||||
} from '../variables';
|
||||
|
||||
/**
|
||||
* Return list of names of apps for integration named [integration]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integration - name of integration
|
||||
* @param {String} obj.accessToken - access token for integration
|
||||
* @returns {Object[]} apps - names of integration apps
|
||||
* @returns {String} apps.name - name of integration app
|
||||
*/
|
||||
const getApps = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
|
||||
interface App {
|
||||
name: string;
|
||||
siteId?: string;
|
||||
}
|
||||
|
||||
let apps: App[]; // TODO: add type and define payloads for apps
|
||||
try {
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
apps = await getAppsHeroku({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
apps = await getAppsVercel({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
apps = await getAppsNetlify({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get integration apps');
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of names of apps for Heroku integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for Heroku API
|
||||
* @returns {Object[]} apps - names of Heroku apps
|
||||
* @returns {String} apps.name - name of Heroku app
|
||||
*/
|
||||
const getAppsHeroku = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (await axios.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
|
||||
headers: {
|
||||
Accept: 'application/vnd.heroku+json; version=3',
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
})).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Heroku integration apps');
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of names of apps for Vercel integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for Vercel API
|
||||
* @returns {Object[]} apps - names of Vercel apps
|
||||
* @returns {String} apps.name - name of Vercel app
|
||||
*/
|
||||
const getAppsVercel = async ({
|
||||
accessToken
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (await axios.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
})).data;
|
||||
|
||||
apps = res.projects.map((a: any) => ({
|
||||
name: a.name
|
||||
}));
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Vercel integration apps');
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of names of sites for Netlify integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for Netlify API
|
||||
* @returns {Object[]} apps - names of Netlify sites
|
||||
* @returns {String} apps.name - name of Netlify site
|
||||
*/
|
||||
const getAppsNetlify = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let apps;
|
||||
try {
|
||||
const res = (await axios.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
})).data;
|
||||
|
||||
apps = res.map((a: any) => ({
|
||||
name: a.name,
|
||||
siteId: a.site_id
|
||||
}));
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get Netlify integration apps');
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
|
||||
export {
|
||||
getApps
|
||||
}
|
241
backend/src/integrations/exchange.ts
Normal file
241
backend/src/integrations/exchange.ts
Normal file
@ -0,0 +1,241 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
ACTION_PUSH_TO_HEROKU
|
||||
} from '../variables';
|
||||
import {
|
||||
SITE_URL,
|
||||
CLIENT_SECRET_HEROKU,
|
||||
CLIENT_ID_VERCEL,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_SECRET_VERCEL,
|
||||
CLIENT_SECRET_NETLIFY
|
||||
} from '../config';
|
||||
|
||||
interface ExchangeCodeHerokuResponse {
|
||||
token_type: string;
|
||||
access_token: string;
|
||||
expires_in: number;
|
||||
refresh_token: string;
|
||||
user_id: string;
|
||||
session_nonce?: string;
|
||||
}
|
||||
|
||||
interface ExchangeCodeVercelResponse {
|
||||
token_type: string;
|
||||
access_token: string;
|
||||
installation_id: string;
|
||||
user_id: string;
|
||||
team_id?: string;
|
||||
}
|
||||
|
||||
interface ExchangeCodeNetlifyResponse {
|
||||
access_token: string;
|
||||
token_type: string;
|
||||
refresh_token: string;
|
||||
scope: string;
|
||||
created_at: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for OAuth2
|
||||
* code-token exchange for integration named [integration]
|
||||
* @param {Object} obj1
|
||||
* @param {String} obj1.integration - name of integration
|
||||
* @param {String} obj1.code - code for code-token exchange
|
||||
* @returns {Object} obj
|
||||
* @returns {String} obj.accessToken - access token for integration
|
||||
* @returns {String} obj.refreshToken - refresh token for integration
|
||||
* @returns {Date} obj.accessExpiresAt - date of expiration for access token
|
||||
* @returns {String} obj.action - integration action for bot sequence
|
||||
*/
|
||||
const exchangeCode = async ({
|
||||
integration,
|
||||
code
|
||||
}: {
|
||||
integration: string;
|
||||
code: string;
|
||||
}) => {
|
||||
let obj = {} as any;
|
||||
|
||||
try {
|
||||
switch (integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
obj = await exchangeCodeHeroku({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
obj = await exchangeCodeVercel({
|
||||
code
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
obj = await exchangeCodeNetlify({
|
||||
code
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange');
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Heroku
|
||||
* OAuth2 code-token exchange
|
||||
* @param {Object} obj1
|
||||
* @param {Object} obj1.code - code for code-token exchange
|
||||
* @returns {Object} obj2
|
||||
* @returns {String} obj2.accessToken - access token for Heroku API
|
||||
* @returns {String} obj2.refreshToken - refresh token for Heroku API
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeHeroku = async ({
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
}) => {
|
||||
let res: ExchangeCodeHerokuResponse;
|
||||
let accessExpiresAt = new Date();
|
||||
try {
|
||||
res = (await axios.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_secret: CLIENT_SECRET_HEROKU
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
accessExpiresAt.setSeconds(
|
||||
accessExpiresAt.getSeconds() + res.expires_in
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Heroku');
|
||||
}
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Vercel
|
||||
* code-token exchange
|
||||
* @param {Object} obj1
|
||||
* @param {Object} obj1.code - code for code-token exchange
|
||||
* @returns {Object} obj2
|
||||
* @returns {String} obj2.accessToken - access token for Heroku API
|
||||
* @returns {String} obj2.refreshToken - refresh token for Heroku API
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeVercel = async ({
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
}) => {
|
||||
let res: ExchangeCodeVercelResponse;
|
||||
try {
|
||||
res = (await axios.post(
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
code: code,
|
||||
client_id: CLIENT_ID_VERCEL,
|
||||
client_secret: CLIENT_SECRET_VERCEL,
|
||||
redirect_uri: `${SITE_URL}/vercel`
|
||||
} as any)
|
||||
)).data;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Vercel');
|
||||
}
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: null,
|
||||
accessExpiresAt: null,
|
||||
teamId: res.team_id
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Vercel
|
||||
* code-token exchange
|
||||
* @param {Object} obj1
|
||||
* @param {Object} obj1.code - code for code-token exchange
|
||||
* @returns {Object} obj2
|
||||
* @returns {String} obj2.accessToken - access token for Heroku API
|
||||
* @returns {String} obj2.refreshToken - refresh token for Heroku API
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeNetlify = async ({
|
||||
code
|
||||
}: {
|
||||
code: string;
|
||||
}) => {
|
||||
let res: ExchangeCodeNetlifyResponse;
|
||||
let accountId;
|
||||
try {
|
||||
res = (await axios.post(
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
client_id: CLIENT_ID_NETLIFY,
|
||||
client_secret: CLIENT_SECRET_NETLIFY,
|
||||
redirect_uri: `${SITE_URL}/netlify`
|
||||
} as any)
|
||||
)).data;
|
||||
|
||||
const res2 = await axios.get(
|
||||
'https://api.netlify.com/api/v1/sites',
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const res3 = (await axios.get(
|
||||
'https://api.netlify.com/api/v1/accounts',
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${res.access_token}`
|
||||
}
|
||||
}
|
||||
)).data;
|
||||
|
||||
accountId = res3[0].id;
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed OAuth2 code-token exchange with Netlify');
|
||||
}
|
||||
|
||||
return ({
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accountId
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
exchangeCode
|
||||
}
|
13
backend/src/integrations/index.ts
Normal file
13
backend/src/integrations/index.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { exchangeCode } from './exchange';
|
||||
import { exchangeRefresh } from './refresh';
|
||||
import { getApps } from './apps';
|
||||
import { syncSecrets } from './sync';
|
||||
import { revokeAccess } from './revoke';
|
||||
|
||||
export {
|
||||
exchangeCode,
|
||||
exchangeRefresh,
|
||||
getApps,
|
||||
syncSecrets,
|
||||
revokeAccess
|
||||
}
|
78
backend/src/integrations/refresh.ts
Normal file
78
backend/src/integrations/refresh.ts
Normal file
@ -0,0 +1,78 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { INTEGRATION_HEROKU } from '../variables';
|
||||
import {
|
||||
CLIENT_SECRET_HEROKU
|
||||
} from '../config';
|
||||
import {
|
||||
INTEGRATION_HEROKU_TOKEN_URL
|
||||
} from '../variables';
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for integration
|
||||
* named [integration]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integration - name of integration
|
||||
* @param {String} obj.refreshToken - refresh token to use to get new access token for Heroku
|
||||
*/
|
||||
const exchangeRefresh = async ({
|
||||
integration,
|
||||
refreshToken
|
||||
}: {
|
||||
integration: string;
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
let accessToken;
|
||||
try {
|
||||
switch (integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
accessToken = await exchangeRefreshHeroku({
|
||||
refreshToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get new OAuth2 access token');
|
||||
}
|
||||
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||
* Heroku integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.refreshToken - refresh token to use to get new access token for Heroku
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshHeroku = async ({
|
||||
refreshToken
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
let accessToken;
|
||||
try {
|
||||
const res = await axios.post(
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
client_secret: CLIENT_SECRET_HEROKU
|
||||
} as any)
|
||||
);
|
||||
|
||||
accessToken = res.data.access_token;
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to get new OAuth2 access token for Heroku');
|
||||
}
|
||||
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
export {
|
||||
exchangeRefresh
|
||||
}
|
50
backend/src/integrations/revoke.ts
Normal file
50
backend/src/integrations/revoke.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
IIntegrationAuth,
|
||||
IntegrationAuth,
|
||||
Integration
|
||||
} from '../models';
|
||||
import {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY
|
||||
} from '../variables';
|
||||
|
||||
const revokeAccess = async ({
|
||||
integrationAuth,
|
||||
accessToken
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth,
|
||||
accessToken: String
|
||||
}) => {
|
||||
try {
|
||||
// add any integration-specific revocation logic
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
break;
|
||||
}
|
||||
|
||||
const deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
_id: integrationAuth._id
|
||||
});
|
||||
|
||||
if (deletedIntegrationAuth) {
|
||||
await Integration.deleteMany({
|
||||
integrationAuth: deletedIntegrationAuth._id
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to delete integration authorization');
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
revokeAccess
|
||||
}
|
598
backend/src/integrations/sync.ts
Normal file
598
backend/src/integrations/sync.ts
Normal file
@ -0,0 +1,598 @@
|
||||
import axios from 'axios';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
IIntegration, IIntegrationAuth
|
||||
} from '../models';
|
||||
import {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL
|
||||
} from '../variables';
|
||||
|
||||
// TODO: need a helper function in the future to handle integration
|
||||
// envar priorities (i.e. prioritize secrets within integration or those on Infisical)
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to [app] in integration named [integration]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {IIntegrationAuth} obj.integrationAuth - integration auth details
|
||||
* @param {Object} obj.app - app in integration
|
||||
* @param {Object} obj.target - (optional) target (environment) in integration
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for integration
|
||||
*/
|
||||
const syncSecrets = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken,
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
integrationAuth: IIntegrationAuth;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
try {
|
||||
switch (integration.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
await syncSecretsHeroku({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_VERCEL:
|
||||
await syncSecretsVercel({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NETLIFY:
|
||||
await syncSecretsNetlify({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
// TODO: set integration to inactive if it was not synced correctly (send alert?)
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to sync secrets to integration');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to Heroku [app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
*/
|
||||
const syncSecretsHeroku = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: IIntegration,
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
try {
|
||||
const herokuSecrets = (await axios.get(
|
||||
`${INTEGRATION_HEROKU_API_URL}/apps/${integration.app}/config-vars`,
|
||||
{
|
||||
headers: {
|
||||
Accept: 'application/vnd.heroku+json; version=3',
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
)).data;
|
||||
|
||||
Object.keys(herokuSecrets).forEach(key => {
|
||||
if (!(key in secrets)) {
|
||||
secrets[key] = null;
|
||||
}
|
||||
});
|
||||
|
||||
await axios.patch(
|
||||
`${INTEGRATION_HEROKU_API_URL}/apps/${integration.app}/config-vars`,
|
||||
secrets,
|
||||
{
|
||||
headers: {
|
||||
Accept: 'application/vnd.heroku+json; version=3',
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to sync secrets to Heroku');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to Heroku [app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
*/
|
||||
const syncSecretsVercel = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: IIntegration,
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
|
||||
interface VercelSecret {
|
||||
id?: string;
|
||||
type: string;
|
||||
key: string;
|
||||
value: string;
|
||||
target: string[];
|
||||
}
|
||||
|
||||
try {
|
||||
// Get all (decrypted) secrets back from Vercel in
|
||||
// decrypted format
|
||||
const params = new URLSearchParams({
|
||||
decrypt: "true"
|
||||
});
|
||||
|
||||
const res = (await Promise.all((await axios.get(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
))
|
||||
.data
|
||||
.envs
|
||||
.filter((secret: VercelSecret) => secret.target.includes(integration.target))
|
||||
.map(async (secret: VercelSecret) => (await axios.get(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
|
||||
}
|
||||
)).data)
|
||||
)).reduce((obj: any, secret: any) => ({
|
||||
...obj,
|
||||
[secret.key]: secret
|
||||
}), {});
|
||||
|
||||
const updateSecrets: VercelSecret[] = [];
|
||||
const deleteSecrets: VercelSecret[] = [];
|
||||
const newSecrets: VercelSecret[] = [];
|
||||
|
||||
// Identify secrets to create
|
||||
Object.keys(secrets).map((key) => {
|
||||
if (!(key in res)) {
|
||||
// case: secret has been created
|
||||
newSecrets.push({
|
||||
key: key,
|
||||
value: secrets[key],
|
||||
type: 'encrypted',
|
||||
target: [integration.target]
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Identify secrets to update and delete
|
||||
Object.keys(res).map((key) => {
|
||||
if (key in secrets) {
|
||||
if (res[key].value !== secrets[key]) {
|
||||
// case: secret value has changed
|
||||
updateSecrets.push({
|
||||
id: res[key].id,
|
||||
key: key,
|
||||
value: secrets[key],
|
||||
type: 'encrypted',
|
||||
target: [integration.target]
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// case: secret has been deleted
|
||||
deleteSecrets.push({
|
||||
id: res[key].id,
|
||||
key: key,
|
||||
value: res[key].value,
|
||||
type: 'encrypted',
|
||||
target: [integration.target],
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Sync/push new secrets
|
||||
if (newSecrets.length > 0) {
|
||||
await axios.post(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v10/projects/${integration.app}/env`,
|
||||
newSecrets,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Sync/push updated secrets
|
||||
if (updateSecrets.length > 0) {
|
||||
updateSecrets.forEach(async (secret: VercelSecret) => {
|
||||
const {
|
||||
id,
|
||||
...updatedSecret
|
||||
} = secret;
|
||||
await axios.patch(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
|
||||
updatedSecret,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Delete secrets
|
||||
if (deleteSecrets.length > 0) {
|
||||
deleteSecrets.forEach(async (secret: VercelSecret) => {
|
||||
await axios.delete(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${secret.id}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to sync secrets to Vercel');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to Netlify site [app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {IIntegrationAuth} obj.integrationAuth - integration auth details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
*/
|
||||
const syncSecretsNetlify = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
integrationAuth: IIntegrationAuth;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
// TODO: Netlify revision in progress
|
||||
// try {
|
||||
// const getParams = new URLSearchParams({
|
||||
// context_name: integration.context,
|
||||
// site_id: integration.siteId
|
||||
// });
|
||||
|
||||
// const res = (await axios.get(
|
||||
// `${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env`,
|
||||
// {
|
||||
// params: getParams,
|
||||
// headers: {
|
||||
// Authorization: `Bearer ${accessToken}`
|
||||
// }
|
||||
// }
|
||||
// ))
|
||||
// .data
|
||||
// .reduce((obj: any, secret: any) => ({
|
||||
// ...obj,
|
||||
// [secret.key]: secret
|
||||
// }), {});
|
||||
|
||||
// res.forEach((r: any) => console.log(r));
|
||||
// console.log('getParams', getParams);
|
||||
|
||||
// interface UpdateNetlifySecret {
|
||||
// key: string;
|
||||
// context: string;
|
||||
// value: string;
|
||||
// }
|
||||
|
||||
// interface DeleteNetlifySecret {
|
||||
// key: string;
|
||||
// }
|
||||
|
||||
// interface NewNetlifySecretValue {
|
||||
// value: string;
|
||||
// context: string;
|
||||
// }
|
||||
|
||||
// interface NewNetlifySecret {
|
||||
// key: string;
|
||||
// values: NewNetlifySecretValue[];
|
||||
// }
|
||||
|
||||
// let updateSecrets: UpdateNetlifySecret[] = [];
|
||||
// let deleteSecrets: DeleteNetlifySecret[] = [];
|
||||
// let newSecrets: NewNetlifySecret[] = [];
|
||||
|
||||
// interface NetlifyValue {
|
||||
// id: string;
|
||||
// value: string;
|
||||
// context: string;
|
||||
// role: string;
|
||||
// }
|
||||
|
||||
// // Identify secrets to create - GOOD
|
||||
// Object.keys(secrets).map((key) => {
|
||||
// if (!(key in res)) {
|
||||
// // case: secret does not exist in Netlify -> create secret
|
||||
// newSecrets.push({
|
||||
// key: key,
|
||||
// values: [{
|
||||
// value: secrets[key],
|
||||
// context: integration.context
|
||||
// }]
|
||||
// });
|
||||
// } else {
|
||||
// // case: secret exists in Netlify
|
||||
|
||||
// const netlifyContextsSet = new Set (res[key].values.map((netlifyValue: NetlifyValue) => netlifyValue.context));
|
||||
|
||||
// // TODO: check context/env.
|
||||
// res[key].values.forEach((netlifyValue: NetlifyValue) => {
|
||||
// if (netlifyValue.context === integration.context) {
|
||||
// // case: Netlify value context matches integration context
|
||||
// // TODO: check if value has changed
|
||||
// }
|
||||
// });
|
||||
// }
|
||||
// });
|
||||
|
||||
// // Identify secrets to update and delete
|
||||
// Object.keys(res).map((key) => {
|
||||
// if (key in secrets) {
|
||||
// // if (res[key] !== secrets[key]) {
|
||||
// // // case: secret value has changed
|
||||
// // updateSecrets.push({
|
||||
// // key: key,
|
||||
// // context: integration.context,
|
||||
// // value: secrets[key]
|
||||
// // });
|
||||
// // }
|
||||
|
||||
// // TODO: modify check and record of updated secrets
|
||||
|
||||
// // case 1: new context added.
|
||||
// } else {
|
||||
// // case: secret has been deleted
|
||||
// deleteSecrets.push({
|
||||
// key
|
||||
// });
|
||||
// }
|
||||
// });
|
||||
|
||||
// const syncParams = new URLSearchParams({
|
||||
// site_id: integration.siteId
|
||||
// });
|
||||
|
||||
// console.log('Netlify newSecrets', newSecrets);
|
||||
// newSecrets.forEach(secret => console.log(secret.values));
|
||||
// // Sync/push new secrets
|
||||
// if (newSecrets.length > 0) {
|
||||
// await axios.post(
|
||||
// `${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env`,
|
||||
// newSecrets,
|
||||
// {
|
||||
// params: syncParams,
|
||||
// headers: {
|
||||
// Authorization: `Bearer ${accessToken}`
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
// }
|
||||
|
||||
// console.log('Netlify updateSecrets', updateSecrets);
|
||||
// // Sync/push updated secrets
|
||||
// if (updateSecrets.length > 0) {
|
||||
|
||||
// updateSecrets.forEach(async (secret: UpdateNetlifySecret) => {
|
||||
// await axios.patch(
|
||||
// `${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${secret.key}`,
|
||||
// {
|
||||
// context: secret.context,
|
||||
// value: secret.value
|
||||
// },
|
||||
// {
|
||||
// params: syncParams,
|
||||
// headers: {
|
||||
// Authorization: `Bearer ${accessToken}`
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
// });
|
||||
// }
|
||||
|
||||
// console.log('Netlify deleteSecrets', deleteSecrets);
|
||||
// // Delete secrets
|
||||
// if (deleteSecrets.length > 0) {
|
||||
// deleteSecrets.forEach(async (secret: DeleteNetlifySecret) => {
|
||||
// await axios.delete(
|
||||
// `${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${secret.key}`,
|
||||
// {
|
||||
// params: syncParams,
|
||||
// headers: {
|
||||
// Authorization: `Bearer ${accessToken}`
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
// });
|
||||
// }
|
||||
// } catch (err) {
|
||||
// Sentry.setUser(null);
|
||||
// Sentry.captureException(err);
|
||||
// throw new Error('Failed to sync secrets to Heroku');
|
||||
// }
|
||||
|
||||
try {
|
||||
const getParams = new URLSearchParams({
|
||||
context_name: integration.context,
|
||||
site_id: integration.siteId
|
||||
});
|
||||
|
||||
const res = (await axios.get(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env`,
|
||||
{
|
||||
params: getParams,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
))
|
||||
.data
|
||||
.reduce((obj: any, secret: any) => ({
|
||||
...obj,
|
||||
[secret.key]: secret.values[0].value
|
||||
}), {});
|
||||
|
||||
interface UpdateNetlifySecret {
|
||||
key: string;
|
||||
context: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
interface DeleteNetlifySecret {
|
||||
key: string;
|
||||
}
|
||||
|
||||
interface NewNetlifySecretValue {
|
||||
value: string;
|
||||
context: string;
|
||||
}
|
||||
|
||||
interface NewNetlifySecret {
|
||||
key: string;
|
||||
values: NewNetlifySecretValue[];
|
||||
}
|
||||
|
||||
const updateSecrets: UpdateNetlifySecret[] = [];
|
||||
const deleteSecrets: DeleteNetlifySecret[] = [];
|
||||
const newSecrets: NewNetlifySecret[] = [];
|
||||
|
||||
// Identify secrets to create
|
||||
Object.keys(secrets).map((key) => {
|
||||
if (!(key in res)) {
|
||||
// case: secret has been created
|
||||
newSecrets.push({
|
||||
key: key,
|
||||
values: [{
|
||||
value: secrets[key], // include id?
|
||||
context: integration.context
|
||||
}]
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Identify secrets to update and delete
|
||||
Object.keys(res).map((key) => {
|
||||
if (key in secrets) {
|
||||
if (res[key] !== secrets[key]) {
|
||||
// case: secret value has changed
|
||||
updateSecrets.push({
|
||||
key: key,
|
||||
context: integration.context,
|
||||
value: secrets[key]
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// case: secret has been deleted
|
||||
deleteSecrets.push({
|
||||
key
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const syncParams = new URLSearchParams({
|
||||
site_id: integration.siteId
|
||||
});
|
||||
|
||||
// Sync/push new secrets
|
||||
if (newSecrets.length > 0) {
|
||||
await axios.post(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env`,
|
||||
newSecrets,
|
||||
{
|
||||
params: syncParams,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Sync/push updated secrets
|
||||
if (updateSecrets.length > 0) {
|
||||
|
||||
updateSecrets.forEach(async (secret: UpdateNetlifySecret) => {
|
||||
await axios.patch(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${secret.key}`,
|
||||
{
|
||||
context: secret.context,
|
||||
value: secret.value
|
||||
},
|
||||
{
|
||||
params: syncParams,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Delete secrets
|
||||
if (deleteSecrets.length > 0) {
|
||||
deleteSecrets.forEach(async (secret: DeleteNetlifySecret) => {
|
||||
await axios.delete(
|
||||
`${INTEGRATION_NETLIFY_API_URL}/api/v1/accounts/${integrationAuth.accountId}/env/${secret.key}`,
|
||||
{
|
||||
params: syncParams,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
throw new Error('Failed to sync secrets to Heroku');
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
syncSecrets
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
{
|
||||
"heroku": {
|
||||
"name": "Heroku",
|
||||
"type": "oauth2",
|
||||
"clientId": "bc132901-935a-4590-b010-f1857efc380d",
|
||||
"docsLink": ""
|
||||
},
|
||||
"netlify": {
|
||||
"name": "Netlify",
|
||||
"type": "oauth2",
|
||||
"clientId": "",
|
||||
"docsLink": ""
|
||||
},
|
||||
"digitalocean": {
|
||||
"name": "Digital Ocean",
|
||||
"type": "oauth2",
|
||||
"clientId": "",
|
||||
"docsLink": ""
|
||||
},
|
||||
"gcp": {
|
||||
"name": "Google Cloud Platform",
|
||||
"type": "oauth2",
|
||||
"clientId": "",
|
||||
"docsLink": ""
|
||||
},
|
||||
"aws": {
|
||||
"name": "Amazon Web Services",
|
||||
"type": "oauth2",
|
||||
"clientId": "",
|
||||
"docsLink": ""
|
||||
},
|
||||
"azure": {
|
||||
"name": "Microsoft Azure",
|
||||
"type": "oauth2",
|
||||
"clientId": "",
|
||||
"docsLink": ""
|
||||
},
|
||||
"travisci": {
|
||||
"name": "Travis CI",
|
||||
"type": "oauth2",
|
||||
"clientId": "",
|
||||
"docsLink": ""
|
||||
},
|
||||
"circleci": {
|
||||
"name": "Circle CI",
|
||||
"type": "oauth2",
|
||||
"clientId": "",
|
||||
"docsLink": ""
|
||||
}
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
import requireAuth from './requireAuth';
|
||||
import requireBotAuth from './requireBotAuth';
|
||||
import requireSignupAuth from './requireSignupAuth';
|
||||
import requireWorkspaceAuth from './requireWorkspaceAuth';
|
||||
import requireOrganizationAuth from './requireOrganizationAuth';
|
||||
@ -9,6 +10,7 @@ import validateRequest from './validateRequest';
|
||||
|
||||
export {
|
||||
requireAuth,
|
||||
requireBotAuth,
|
||||
requireSignupAuth,
|
||||
requireWorkspaceAuth,
|
||||
requireOrganizationAuth,
|
||||
|
45
backend/src/middleware/requireBotAuth.ts
Normal file
45
backend/src/middleware/requireBotAuth.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { Bot } from '../models';
|
||||
import { validateMembership } from '../helpers/membership';
|
||||
|
||||
type req = 'params' | 'body' | 'query';
|
||||
|
||||
const requireBotAuth = ({
|
||||
acceptedRoles,
|
||||
acceptedStatuses,
|
||||
location = 'params'
|
||||
}: {
|
||||
acceptedRoles: string[];
|
||||
acceptedStatuses: string[];
|
||||
location?: req;
|
||||
}) => {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const bot = await Bot.findOne({ _id: req[location].botId });
|
||||
|
||||
if (!bot) {
|
||||
throw new Error('Failed to find bot');
|
||||
}
|
||||
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: bot.workspace.toString(),
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
});
|
||||
|
||||
req.bot = bot;
|
||||
|
||||
next();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureException(err);
|
||||
return res.status(401).send({
|
||||
error: 'Failed bot authorization'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default requireBotAuth;
|
@ -1,7 +1,8 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { Integration, IntegrationAuth, Membership } from '../models';
|
||||
import { getOAuthAccessToken } from '../helpers/integrationAuth';
|
||||
import { Bot, Integration, IntegrationAuth, Membership } from '../models';
|
||||
import { IntegrationService } from '../services';
|
||||
import { validateMembership } from '../helpers/membership';
|
||||
|
||||
/**
|
||||
* Validate if user on request is a member of workspace with proper roles associated
|
||||
@ -31,24 +32,14 @@ const requireIntegrationAuth = ({
|
||||
if (!integration) {
|
||||
throw new Error('Failed to find integration');
|
||||
}
|
||||
|
||||
const membership = await Membership.findOne({
|
||||
user: req.user._id,
|
||||
workspace: integration.workspace
|
||||
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: integration.workspace.toString(),
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new Error('Failed to find integration workspace membership');
|
||||
}
|
||||
|
||||
if (!acceptedRoles.includes(membership.role)) {
|
||||
throw new Error('Failed to validate workspace membership role');
|
||||
}
|
||||
|
||||
if (!acceptedStatuses.includes(membership.status)) {
|
||||
throw new Error('Failed to validate workspace membership status');
|
||||
}
|
||||
|
||||
const integrationAuth = await IntegrationAuth.findOne({
|
||||
_id: integration.integrationAuth
|
||||
}).select(
|
||||
@ -60,7 +51,9 @@ const requireIntegrationAuth = ({
|
||||
}
|
||||
|
||||
req.integration = integration;
|
||||
req.accessToken = await getOAuthAccessToken({ integrationAuth });
|
||||
req.accessToken = await IntegrationService.getIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString()
|
||||
});
|
||||
|
||||
return next();
|
||||
} catch (err) {
|
||||
|
@ -1,8 +1,8 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { IntegrationAuth, Membership } from '../models';
|
||||
import { decryptSymmetric } from '../utils/crypto';
|
||||
import { getOAuthAccessToken } from '../helpers/integrationAuth';
|
||||
import { IntegrationAuth } from '../models';
|
||||
import { IntegrationService } from '../services';
|
||||
import { validateMembership } from '../helpers/membership';
|
||||
|
||||
/**
|
||||
* Validate if user on request is a member of workspace with proper roles associated
|
||||
@ -10,18 +10,18 @@ import { getOAuthAccessToken } from '../helpers/integrationAuth';
|
||||
* @param {Object} obj
|
||||
* @param {String[]} obj.acceptedRoles - accepted workspace roles
|
||||
* @param {String[]} obj.acceptedStatuses - accepted workspace statuses
|
||||
* @param {Boolean} obj.attachRefresh - whether or not to decrypt and attach integration authorization refresh token onto request
|
||||
* @param {Boolean} obj.attachAccessToken - whether or not to decrypt and attach integration authorization access token onto request
|
||||
*/
|
||||
const requireIntegrationAuthorizationAuth = ({
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
acceptedStatuses,
|
||||
attachAccessToken = true
|
||||
}: {
|
||||
acceptedRoles: string[];
|
||||
acceptedStatuses: string[];
|
||||
attachAccessToken?: boolean;
|
||||
}) => {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
// (authorization) integration authorization middleware
|
||||
|
||||
try {
|
||||
const { integrationAuthId } = req.params;
|
||||
|
||||
@ -34,30 +34,21 @@ const requireIntegrationAuthorizationAuth = ({
|
||||
if (!integrationAuth) {
|
||||
throw new Error('Failed to find integration authorization');
|
||||
}
|
||||
|
||||
const membership = await Membership.findOne({
|
||||
user: req.user._id,
|
||||
workspace: integrationAuth.workspace
|
||||
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: integrationAuth.workspace.toString(),
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new Error(
|
||||
'Failed to find integration authorization workspace membership'
|
||||
);
|
||||
}
|
||||
|
||||
if (!acceptedRoles.includes(membership.role)) {
|
||||
throw new Error('Failed to validate workspace membership role');
|
||||
}
|
||||
|
||||
if (!acceptedStatuses.includes(membership.status)) {
|
||||
throw new Error('Failed to validate workspace membership status');
|
||||
}
|
||||
|
||||
req.integrationAuth = integrationAuth;
|
||||
|
||||
// TODO: make compatible with other integration types since they won't necessarily have access tokens
|
||||
req.accessToken = await getOAuthAccessToken({ integrationAuth });
|
||||
if (attachAccessToken) {
|
||||
req.accessToken = await IntegrationService.getIntegrationAuthAccess({
|
||||
integrationAuthId: integrationAuth._id.toString()
|
||||
});
|
||||
}
|
||||
|
||||
return next();
|
||||
} catch (err) {
|
||||
Sentry.setUser(null);
|
||||
|
@ -1,6 +1,6 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { Membership, IWorkspace } from '../models';
|
||||
import { validateMembership } from '../helpers/membership';
|
||||
|
||||
type req = 'params' | 'body' | 'query';
|
||||
|
||||
@ -25,24 +25,12 @@ const requireWorkspaceAuth = ({
|
||||
// workspace authorization middleware
|
||||
|
||||
try {
|
||||
// validate workspace membership
|
||||
|
||||
const membership = await Membership.findOne({
|
||||
user: req.user._id,
|
||||
workspace: req[location].workspaceId
|
||||
}).populate<{ workspace: IWorkspace }>('workspace');
|
||||
|
||||
if (!membership) {
|
||||
throw new Error('Failed to find workspace membership');
|
||||
}
|
||||
|
||||
if (!acceptedRoles.includes(membership.role)) {
|
||||
throw new Error('Failed to validate workspace membership role');
|
||||
}
|
||||
|
||||
if (!acceptedStatuses.includes(membership.status)) {
|
||||
throw new Error('Failed to validate workspace membership status');
|
||||
}
|
||||
const membership = await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: req[location].workspaceId,
|
||||
acceptedRoles,
|
||||
acceptedStatuses
|
||||
});
|
||||
|
||||
req.membership = membership;
|
||||
|
||||
|
57
backend/src/models/bot.ts
Normal file
57
backend/src/models/bot.ts
Normal file
@ -0,0 +1,57 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
|
||||
export interface IBot {
|
||||
_id: Types.ObjectId;
|
||||
name: string;
|
||||
workspace: Types.ObjectId;
|
||||
isActive: boolean;
|
||||
publicKey: string;
|
||||
encryptedPrivateKey: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}
|
||||
|
||||
const botSchema = new Schema<IBot>(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false
|
||||
},
|
||||
publicKey: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
encryptedPrivateKey: {
|
||||
type: String,
|
||||
required: true,
|
||||
select: false
|
||||
},
|
||||
iv: {
|
||||
type: String,
|
||||
required: true,
|
||||
select: false
|
||||
},
|
||||
tag: {
|
||||
type: String,
|
||||
required: true,
|
||||
select: false
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
const Bot = model<IBot>('Bot', botSchema);
|
||||
|
||||
export default Bot;
|
45
backend/src/models/botKey.ts
Normal file
45
backend/src/models/botKey.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
|
||||
export interface IBotKey {
|
||||
_id: Types.ObjectId;
|
||||
encryptedKey: string;
|
||||
nonce: string;
|
||||
sender: Types.ObjectId;
|
||||
bot: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
}
|
||||
|
||||
const botKeySchema = new Schema<IBotKey>(
|
||||
{
|
||||
encryptedKey: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
nonce: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
sender: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true
|
||||
},
|
||||
bot: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Bot',
|
||||
required: true
|
||||
},
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Workspace',
|
||||
required: true
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
const BotKey = model<IBotKey>('BotKey', botKeySchema);
|
||||
|
||||
export default BotKey;
|
@ -1,4 +1,6 @@
|
||||
import BackupPrivateKey, { IBackupPrivateKey } from './backupPrivateKey';
|
||||
import Bot, { IBot } from './bot';
|
||||
import BotKey, { IBotKey } from './botKey';
|
||||
import IncidentContactOrg, { IIncidentContactOrg } from './incidentContactOrg';
|
||||
import Integration, { IIntegration } from './integration';
|
||||
import IntegrationAuth, { IIntegrationAuth } from './integrationAuth';
|
||||
@ -16,6 +18,10 @@ import Workspace, { IWorkspace } from './workspace';
|
||||
export {
|
||||
BackupPrivateKey,
|
||||
IBackupPrivateKey,
|
||||
Bot,
|
||||
IBot,
|
||||
BotKey,
|
||||
IBotKey,
|
||||
IncidentContactOrg,
|
||||
IIncidentContactOrg,
|
||||
Integration,
|
||||
|
@ -5,6 +5,7 @@ import {
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY
|
||||
} from '../variables';
|
||||
|
||||
@ -14,7 +15,10 @@ export interface IIntegration {
|
||||
environment: 'dev' | 'test' | 'staging' | 'prod';
|
||||
isActive: boolean;
|
||||
app: string;
|
||||
integration: 'heroku' | 'netlify';
|
||||
target: string;
|
||||
context: string;
|
||||
siteId: string;
|
||||
integration: 'heroku' | 'vercel' | 'netlify';
|
||||
integrationAuth: Types.ObjectId;
|
||||
}
|
||||
|
||||
@ -34,15 +38,29 @@ const integrationSchema = new Schema<IIntegration>(
|
||||
type: Boolean,
|
||||
required: true
|
||||
},
|
||||
app: {
|
||||
// name of app in provider
|
||||
app: { // name of app in provider
|
||||
type: String,
|
||||
default: null,
|
||||
required: true
|
||||
default: null
|
||||
},
|
||||
target: { // vercel-specific target (environment)
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
context: { // netlify-specific context (deploy)
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
siteId: { // netlify-specific site (app) id
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
integration: {
|
||||
type: String,
|
||||
enum: [INTEGRATION_HEROKU, INTEGRATION_NETLIFY],
|
||||
enum: [
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY
|
||||
],
|
||||
required: true
|
||||
},
|
||||
integrationAuth: {
|
||||
|
@ -1,10 +1,16 @@
|
||||
import { Schema, model, Types } from 'mongoose';
|
||||
import { INTEGRATION_HEROKU, INTEGRATION_NETLIFY } from '../variables';
|
||||
import {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY
|
||||
} from '../variables';
|
||||
|
||||
export interface IIntegrationAuth {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
integration: 'heroku' | 'netlify';
|
||||
integration: 'heroku' | 'vercel' | 'netlify';
|
||||
teamId: string;
|
||||
accountId: string;
|
||||
refreshCiphertext?: string;
|
||||
refreshIV?: string;
|
||||
refreshTag?: string;
|
||||
@ -22,9 +28,19 @@ const integrationAuthSchema = new Schema<IIntegrationAuth>(
|
||||
},
|
||||
integration: {
|
||||
type: String,
|
||||
enum: [INTEGRATION_HEROKU, INTEGRATION_NETLIFY],
|
||||
enum: [
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY
|
||||
],
|
||||
required: true
|
||||
},
|
||||
teamId: { // vercel-specific integration param
|
||||
type: String
|
||||
},
|
||||
accountId: { // netlify-specific integration param
|
||||
type: String
|
||||
},
|
||||
refreshCiphertext: {
|
||||
type: String,
|
||||
select: false
|
||||
|
@ -2,25 +2,30 @@ import { Schema, model } from 'mongoose';
|
||||
import { EMAIL_TOKEN_LIFETIME } from '../config';
|
||||
|
||||
export interface IToken {
|
||||
email: String;
|
||||
token: String;
|
||||
createdAt: Date;
|
||||
email: string;
|
||||
token: string;
|
||||
createdAt: Date;
|
||||
}
|
||||
|
||||
const tokenSchema = new Schema<IToken>({
|
||||
email: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
token: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
createdAt: {
|
||||
type: Date,
|
||||
expires: EMAIL_TOKEN_LIFETIME,
|
||||
default: Date.now
|
||||
}
|
||||
email: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
token: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
createdAt: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
}
|
||||
});
|
||||
|
||||
tokenSchema.index({
|
||||
createdAt: 1
|
||||
}, {
|
||||
expireAfterSeconds: parseInt(EMAIL_TOKEN_LIFETIME)
|
||||
});
|
||||
|
||||
const Token = model<IToken>('Token', tokenSchema);
|
||||
|
@ -5,28 +5,24 @@ import { requireAuth, validateRequest } from '../middleware';
|
||||
import { authController } from '../controllers';
|
||||
import { loginLimiter } from '../helpers/rateLimiter';
|
||||
|
||||
router.post('/token', validateRequest, authController.getNewToken);
|
||||
|
||||
router.post(
|
||||
'/token',
|
||||
validateRequest,
|
||||
authController.getNewToken
|
||||
'/login1',
|
||||
loginLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
body('clientPublicKey').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.login1
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/login1',
|
||||
loginLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
body('clientPublicKey').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.login1
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/login2',
|
||||
loginLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
body('clientProof').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.login2
|
||||
'/login2',
|
||||
loginLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
body('clientProof').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
authController.login2
|
||||
);
|
||||
|
||||
router.post('/logout', requireAuth, authController.logout);
|
||||
|
38
backend/src/routes/bot.ts
Normal file
38
backend/src/routes/bot.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import { body, param } from 'express-validator';
|
||||
import {
|
||||
requireAuth,
|
||||
requireBotAuth,
|
||||
requireWorkspaceAuth,
|
||||
validateRequest
|
||||
} from '../middleware';
|
||||
import { botController } from '../controllers';
|
||||
import { ADMIN, MEMBER, COMPLETED, GRANTED } from '../variables';
|
||||
|
||||
router.get(
|
||||
'/:workspaceId',
|
||||
requireAuth,
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
acceptedStatuses: [COMPLETED, GRANTED]
|
||||
}),
|
||||
param('workspaceId').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
botController.getBotByWorkspaceId
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/:botId/active',
|
||||
requireAuth,
|
||||
requireBotAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
acceptedStatuses: [COMPLETED, GRANTED]
|
||||
}),
|
||||
body('isActive').isBoolean(),
|
||||
body('botKey'),
|
||||
validateRequest,
|
||||
botController.setBotActiveState
|
||||
);
|
||||
|
||||
export default router;
|
@ -1,4 +1,5 @@
|
||||
import signup from './signup';
|
||||
import bot from './bot';
|
||||
import auth from './auth';
|
||||
import user from './user';
|
||||
import userAction from './userAction';
|
||||
@ -18,6 +19,7 @@ import integrationAuth from './integrationAuth';
|
||||
export {
|
||||
signup,
|
||||
auth,
|
||||
bot,
|
||||
user,
|
||||
userAction,
|
||||
organization,
|
||||
|
@ -9,22 +9,6 @@ import { ADMIN, MEMBER, GRANTED } from '../variables';
|
||||
import { body, param } from 'express-validator';
|
||||
import { integrationController } from '../controllers';
|
||||
|
||||
router.get('/integrations', requireAuth, integrationController.getIntegrations);
|
||||
|
||||
router.post(
|
||||
'/:integrationId/sync',
|
||||
requireAuth,
|
||||
requireIntegrationAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
acceptedStatuses: [GRANTED]
|
||||
}),
|
||||
param('integrationId').exists().trim(),
|
||||
body('key').exists(),
|
||||
body('secrets').exists(),
|
||||
validateRequest,
|
||||
integrationController.syncIntegration
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/:integrationId',
|
||||
requireAuth,
|
||||
@ -32,10 +16,15 @@ router.patch(
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
acceptedStatuses: [GRANTED]
|
||||
}),
|
||||
param('integrationId'),
|
||||
body('update'),
|
||||
param('integrationId').exists().trim(),
|
||||
body('app').exists().trim(),
|
||||
body('environment').exists().trim(),
|
||||
body('isActive').exists().isBoolean(),
|
||||
body('target').exists(),
|
||||
body('context').exists(),
|
||||
body('siteId').exists(),
|
||||
validateRequest,
|
||||
integrationController.modifyIntegration
|
||||
integrationController.updateIntegration
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@ -45,7 +34,7 @@ router.delete(
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
acceptedStatuses: [GRANTED]
|
||||
}),
|
||||
param('integrationId'),
|
||||
param('integrationId').exists().trim(),
|
||||
validateRequest,
|
||||
integrationController.deleteIntegration
|
||||
);
|
||||
|
@ -10,6 +10,12 @@ import {
|
||||
import { ADMIN, MEMBER, GRANTED } from '../variables';
|
||||
import { integrationAuthController } from '../controllers';
|
||||
|
||||
router.get(
|
||||
'/integration-options',
|
||||
requireAuth,
|
||||
integrationAuthController.getIntegrationOptions
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/oauth-token',
|
||||
requireAuth,
|
||||
@ -22,7 +28,7 @@ router.post(
|
||||
body('code').exists().trim().notEmpty(),
|
||||
body('integration').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
integrationAuthController.integrationAuthOauthExchange
|
||||
integrationAuthController.oAuthExchange
|
||||
);
|
||||
|
||||
router.get(
|
||||
@ -42,7 +48,8 @@ router.delete(
|
||||
requireAuth,
|
||||
requireIntegrationAuthorizationAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
acceptedStatuses: [GRANTED]
|
||||
acceptedStatuses: [GRANTED],
|
||||
attachAccessToken: false
|
||||
}),
|
||||
param('integrationAuthId'),
|
||||
validateRequest,
|
||||
|
@ -1,7 +1,7 @@
|
||||
import express from 'express';
|
||||
const router = express.Router();
|
||||
import { body } from 'express-validator';
|
||||
import { requireAuth, validateRequest } from '../middleware';
|
||||
import { requireAuth, requireSignupAuth, validateRequest } from '../middleware';
|
||||
import { passwordController } from '../controllers';
|
||||
import { passwordLimiter } from '../helpers/rateLimiter';
|
||||
|
||||
@ -27,6 +27,30 @@ router.post(
|
||||
passwordController.changePassword
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/email/password-reset',
|
||||
passwordLimiter,
|
||||
body('email').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
passwordController.emailPasswordReset
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/email/password-reset-verify',
|
||||
passwordLimiter,
|
||||
body('email').exists().trim().notEmpty().isEmail(),
|
||||
body('code').exists().trim().notEmpty(),
|
||||
validateRequest,
|
||||
passwordController.emailPasswordResetVerify
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/backup-private-key',
|
||||
passwordLimiter,
|
||||
requireSignupAuth,
|
||||
passwordController.getBackupPrivateKey
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/backup-private-key',
|
||||
passwordLimiter,
|
||||
@ -41,4 +65,16 @@ router.post(
|
||||
passwordController.createBackupPrivateKey
|
||||
);
|
||||
|
||||
export default router;
|
||||
router.post(
|
||||
'/password-reset',
|
||||
requireSignupAuth,
|
||||
body('encryptedPrivateKey').exists().trim().notEmpty(), // private key encrypted under new pwd
|
||||
body('iv').exists().trim().notEmpty(), // new iv for private key
|
||||
body('tag').exists().trim().notEmpty(), // new tag for private key
|
||||
body('salt').exists().trim().notEmpty(), // part of new pwd
|
||||
body('verifier').exists().trim().notEmpty(), // part of new pwd
|
||||
validateRequest,
|
||||
passwordController.resetPassword
|
||||
);
|
||||
|
||||
export default router;
|
82
backend/src/services/BotService.ts
Normal file
82
backend/src/services/BotService.ts
Normal file
@ -0,0 +1,82 @@
|
||||
import {
|
||||
getSecretsHelper,
|
||||
encryptSymmetricHelper,
|
||||
decryptSymmetricHelper
|
||||
} from '../helpers/bot';
|
||||
|
||||
/**
|
||||
* Class to handle bot actions
|
||||
*/
|
||||
class BotService {
|
||||
|
||||
/**
|
||||
* Return decrypted secrets for workspace with id [workspaceId] and
|
||||
* environment [environmen] shared to bot.
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace of secrets
|
||||
* @param {String} obj.environment - environment for secrets
|
||||
* @returns {Object} secretObj - object where keys are secret keys and values are secret values
|
||||
*/
|
||||
static async getSecrets({
|
||||
workspaceId,
|
||||
environment
|
||||
}: {
|
||||
workspaceId: string;
|
||||
environment: string;
|
||||
}) {
|
||||
return await getSecretsHelper({
|
||||
workspaceId,
|
||||
environment
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return symmetrically encrypted [plaintext] using the
|
||||
* bot's copy of the workspace key for workspace with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.plaintext - plaintext to encrypt
|
||||
*/
|
||||
static async encryptSymmetric({
|
||||
workspaceId,
|
||||
plaintext
|
||||
}: {
|
||||
workspaceId: string;
|
||||
plaintext: string;
|
||||
}) {
|
||||
return await encryptSymmetricHelper({
|
||||
workspaceId,
|
||||
plaintext
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return symmetrically decrypted [ciphertext] using the
|
||||
* bot's copy of the workspace key for workspace with id [workspaceId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.ciphertext - ciphertext to decrypt
|
||||
* @param {String} obj.iv - iv
|
||||
* @param {String} obj.tag - tag
|
||||
*/
|
||||
static async decryptSymmetric({
|
||||
workspaceId,
|
||||
ciphertext,
|
||||
iv,
|
||||
tag
|
||||
}: {
|
||||
workspaceId: string;
|
||||
ciphertext: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
}) {
|
||||
return await decryptSymmetricHelper({
|
||||
workspaceId,
|
||||
ciphertext,
|
||||
iv,
|
||||
tag
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default BotService;
|
30
backend/src/services/EventService.ts
Normal file
30
backend/src/services/EventService.ts
Normal file
@ -0,0 +1,30 @@
|
||||
import { Bot, IBot } from '../models';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { handleEventHelper } from '../helpers/event';
|
||||
|
||||
interface Event {
|
||||
name: string;
|
||||
workspaceId: string;
|
||||
payload: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Class to handle events.
|
||||
*/
|
||||
class EventService {
|
||||
/**
|
||||
* Handle event [event]
|
||||
* @param {Object} obj
|
||||
* @param {Event} obj.event - an event
|
||||
* @param {String} obj.event.name - name of event
|
||||
* @param {String} obj.event.workspaceId - id of workspace that event is part of
|
||||
* @param {Object} obj.event.payload - payload of event (depends on event)
|
||||
*/
|
||||
static async handleEvent({ event }: { event: Event }): Promise<void> {
|
||||
await handleEventHelper({
|
||||
event
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default EventService;
|
145
backend/src/services/IntegrationService.ts
Normal file
145
backend/src/services/IntegrationService.ts
Normal file
@ -0,0 +1,145 @@
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
Integration
|
||||
} from '../models';
|
||||
import {
|
||||
handleOAuthExchangeHelper,
|
||||
syncIntegrationsHelper,
|
||||
getIntegrationAuthRefreshHelper,
|
||||
getIntegrationAuthAccessHelper,
|
||||
setIntegrationAuthRefreshHelper,
|
||||
setIntegrationAuthAccessHelper,
|
||||
} from '../helpers/integration';
|
||||
import { exchangeCode } from '../integrations';
|
||||
import {
|
||||
ENV_DEV,
|
||||
EVENT_PUSH_SECRETS
|
||||
} from '../variables';
|
||||
|
||||
// should sync stuff be here too? Probably.
|
||||
// TODO: move bot functions to IntegrationService.
|
||||
|
||||
/**
|
||||
* Class to handle integrations
|
||||
*/
|
||||
class IntegrationService {
|
||||
|
||||
/**
|
||||
* Perform OAuth2 code-token exchange for workspace with id [workspaceId] and integration
|
||||
* named [integration]
|
||||
* - Store integration access and refresh tokens returned from the OAuth2 code-token exchange
|
||||
* - Add placeholder inactive integration
|
||||
* - Create bot sequence for integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace
|
||||
* @param {String} obj.integration - name of integration
|
||||
* @param {String} obj.code - code
|
||||
*/
|
||||
static async handleOAuthExchange({
|
||||
workspaceId,
|
||||
integration,
|
||||
code
|
||||
}: {
|
||||
workspaceId: string;
|
||||
integration: string;
|
||||
code: string;
|
||||
}) {
|
||||
await handleOAuthExchangeHelper({
|
||||
workspaceId,
|
||||
integration,
|
||||
code
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push environment variables in workspace with id [workspaceId] to
|
||||
* all associated integrations
|
||||
* @param {Object} obj
|
||||
* @param {Object} obj.workspaceId - id of workspace
|
||||
*/
|
||||
static async syncIntegrations({
|
||||
workspaceId
|
||||
}: {
|
||||
workspaceId: string;
|
||||
}) {
|
||||
return await syncIntegrationsHelper({
|
||||
workspaceId
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return decrypted refresh token for integration auth
|
||||
* with id [integrationAuthId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} refreshToken - decrypted refresh token
|
||||
*/
|
||||
static async getIntegrationAuthRefresh({ integrationAuthId }: { integrationAuthId: string}) {
|
||||
return await getIntegrationAuthRefreshHelper({
|
||||
integrationAuthId
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return decrypted access token for integration auth
|
||||
* with id [integrationAuthId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} accessToken - decrypted access token
|
||||
*/
|
||||
static async getIntegrationAuthAccess({ integrationAuthId }: { integrationAuthId: string}) {
|
||||
return await getIntegrationAuthAccessHelper({
|
||||
integrationAuthId
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt refresh token [refreshToken] using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} obj.refreshToken - refresh token
|
||||
* @returns {IntegrationAuth} integrationAuth - updated integration auth
|
||||
*/
|
||||
static async setIntegrationAuthRefresh({
|
||||
integrationAuthId,
|
||||
refreshToken
|
||||
}: {
|
||||
integrationAuthId: string;
|
||||
refreshToken: string;
|
||||
}) {
|
||||
return await setIntegrationAuthRefreshHelper({
|
||||
integrationAuthId,
|
||||
refreshToken
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt access token [accessToken] using the bot's copy
|
||||
* of the workspace key for workspace belonging to integration auth
|
||||
* with id [integrationAuthId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.integrationAuthId - id of integration auth
|
||||
* @param {String} obj.accessToken - access token
|
||||
* @param {String} obj.accessExpiresAt - expiration date of access token
|
||||
* @returns {IntegrationAuth} - updated integration auth
|
||||
*/
|
||||
static async setIntegrationAuthAccess({
|
||||
integrationAuthId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
}: {
|
||||
integrationAuthId: string;
|
||||
accessToken: string;
|
||||
accessExpiresAt: Date;
|
||||
}) {
|
||||
return await setIntegrationAuthAccessHelper({
|
||||
integrationAuthId,
|
||||
accessToken,
|
||||
accessExpiresAt
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default IntegrationService;
|
@ -1,15 +1,19 @@
|
||||
import { PostHog } from 'posthog-node';
|
||||
import { NODE_ENV, POSTHOG_HOST, POSTHOG_PROJECT_API_KEY, TELEMETRY_ENABLED } from '../config';
|
||||
import {
|
||||
NODE_ENV,
|
||||
POSTHOG_HOST,
|
||||
POSTHOG_PROJECT_API_KEY,
|
||||
TELEMETRY_ENABLED
|
||||
} from '../config';
|
||||
|
||||
console.log('TELEMETRY_ENABLED: ', TELEMETRY_ENABLED);
|
||||
|
||||
let postHogClient: any;
|
||||
if (
|
||||
NODE_ENV === 'production'
|
||||
&& TELEMETRY_ENABLED
|
||||
) {
|
||||
// case: enable opt-out telemetry in production
|
||||
postHogClient = new PostHog(POSTHOG_PROJECT_API_KEY, {
|
||||
host: POSTHOG_HOST
|
||||
});
|
||||
if (NODE_ENV === 'production' && TELEMETRY_ENABLED) {
|
||||
// case: enable opt-out telemetry in production
|
||||
postHogClient = new PostHog(POSTHOG_PROJECT_API_KEY, {
|
||||
host: POSTHOG_HOST
|
||||
});
|
||||
}
|
||||
|
||||
export default postHogClient;
|
||||
export default postHogClient;
|
||||
|
@ -1,5 +1,11 @@
|
||||
import postHogClient from './PostHogClient';
|
||||
import BotService from './BotService';
|
||||
import EventService from './EventService';
|
||||
import IntegrationService from './IntegrationService';
|
||||
|
||||
export {
|
||||
postHogClient
|
||||
postHogClient,
|
||||
BotService,
|
||||
EventService,
|
||||
IntegrationService
|
||||
}
|
@ -4,7 +4,7 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Email Verification</title>
|
||||
<title>Organization Invitation</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
|
15
backend/src/templates/passwordReset.handlebars
Normal file
15
backend/src/templates/passwordReset.handlebars
Normal file
@ -0,0 +1,15 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Account Recovery</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>Reset your password</h2>
|
||||
<p>Someone requested a password reset.</p>
|
||||
<a href="{{callback_url}}?token={{token}}&to={{email}}">Reset password</a>
|
||||
<p>If you didn't initiate this request, please contact us immediately at team@infisical.com</p>
|
||||
</body>
|
||||
</html>
|
@ -3,7 +3,7 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Email Verification</title>
|
||||
<title>Project Invitation</title>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
|
1
backend/src/types/express/index.d.ts
vendored
1
backend/src/types/express/index.d.ts
vendored
@ -11,6 +11,7 @@ declare global {
|
||||
membershipOrg: any;
|
||||
integration: any;
|
||||
integrationAuth: any;
|
||||
bot: any;
|
||||
serviceToken: any;
|
||||
accessToken: any;
|
||||
query?: any;
|
||||
|
@ -2,6 +2,21 @@ import nacl from 'tweetnacl';
|
||||
import util from 'tweetnacl-util';
|
||||
import AesGCM from './aes-gcm';
|
||||
|
||||
/**
|
||||
* Return new base64, NaCl, public-private key pair.
|
||||
* @returns {Object} obj
|
||||
* @returns {String} obj.publicKey - base64, NaCl, public key
|
||||
* @returns {String} obj.privateKey - base64, NaCl, private key
|
||||
*/
|
||||
const generateKeyPair = () => {
|
||||
const pair = nacl.box.keyPair();
|
||||
|
||||
return ({
|
||||
publicKey: util.encodeBase64(pair.publicKey),
|
||||
privateKey: util.encodeBase64(pair.secretKey)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return assymmetrically encrypted [plaintext] using [publicKey] where
|
||||
* [publicKey] likely belongs to the recipient.
|
||||
@ -81,7 +96,7 @@ const decryptAsymmetric = ({
|
||||
* Return symmetrically encrypted [plaintext] using [key].
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.plaintext - plaintext to encrypt
|
||||
* @param {String} obj.key - 16-byte hex key
|
||||
* @param {String} obj.key - hex key
|
||||
*/
|
||||
const encryptSymmetric = ({
|
||||
plaintext,
|
||||
@ -114,7 +129,7 @@ const encryptSymmetric = ({
|
||||
* @param {String} obj.ciphertext - ciphertext to decrypt
|
||||
* @param {String} obj.iv - iv
|
||||
* @param {String} obj.tag - tag
|
||||
* @param {String} obj.key - 32-byte hex key
|
||||
* @param {String} obj.key - hex key
|
||||
*
|
||||
*/
|
||||
const decryptSymmetric = ({
|
||||
@ -139,6 +154,7 @@ const decryptSymmetric = ({
|
||||
};
|
||||
|
||||
export {
|
||||
generateKeyPair,
|
||||
encryptAsymmetric,
|
||||
decryptAsymmetric,
|
||||
encryptSymmetric,
|
||||
|
@ -1,60 +0,0 @@
|
||||
// membership roles
|
||||
const OWNER = 'owner';
|
||||
const ADMIN = 'admin';
|
||||
const MEMBER = 'member';
|
||||
|
||||
// membership statuses
|
||||
const INVITED = 'invited';
|
||||
|
||||
// -- organization
|
||||
const ACCEPTED = 'accepted';
|
||||
|
||||
// -- workspace
|
||||
const COMPLETED = 'completed';
|
||||
const GRANTED = 'granted';
|
||||
|
||||
// subscriptions
|
||||
const PLAN_STARTER = 'starter';
|
||||
const PLAN_PRO = 'pro';
|
||||
|
||||
// secrets
|
||||
const SECRET_SHARED = 'shared';
|
||||
const SECRET_PERSONAL = 'personal';
|
||||
|
||||
// environments
|
||||
const ENV_DEV = 'dev';
|
||||
const ENV_TESTING = 'test';
|
||||
const ENV_STAGING = 'staging';
|
||||
const ENV_PROD = 'prod';
|
||||
const ENV_SET = new Set([ENV_DEV, ENV_TESTING, ENV_STAGING, ENV_PROD]);
|
||||
|
||||
// integrations
|
||||
const INTEGRATION_HEROKU = 'heroku';
|
||||
const INTEGRATION_NETLIFY = 'netlify';
|
||||
const INTEGRATION_SET = new Set([INTEGRATION_HEROKU, INTEGRATION_NETLIFY]);
|
||||
|
||||
// integration types
|
||||
const INTEGRATION_OAUTH2 = 'oauth2';
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
COMPLETED,
|
||||
GRANTED,
|
||||
PLAN_STARTER,
|
||||
PLAN_PRO,
|
||||
SECRET_SHARED,
|
||||
SECRET_PERSONAL,
|
||||
ENV_DEV,
|
||||
ENV_TESTING,
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
ENV_SET,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2
|
||||
};
|
5
backend/src/variables/action.ts
Normal file
5
backend/src/variables/action.ts
Normal file
@ -0,0 +1,5 @@
|
||||
const ACTION_PUSH_TO_HEROKU = 'pushToHeroku';
|
||||
|
||||
export {
|
||||
ACTION_PUSH_TO_HEROKU
|
||||
}
|
14
backend/src/variables/environment.ts
Normal file
14
backend/src/variables/environment.ts
Normal file
@ -0,0 +1,14 @@
|
||||
// environments
|
||||
const ENV_DEV = 'dev';
|
||||
const ENV_TESTING = 'test';
|
||||
const ENV_STAGING = 'staging';
|
||||
const ENV_PROD = 'prod';
|
||||
const ENV_SET = new Set([ENV_DEV, ENV_TESTING, ENV_STAGING, ENV_PROD]);
|
||||
|
||||
export {
|
||||
ENV_DEV,
|
||||
ENV_TESTING,
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
ENV_SET
|
||||
}
|
7
backend/src/variables/event.ts
Normal file
7
backend/src/variables/event.ts
Normal file
@ -0,0 +1,7 @@
|
||||
const EVENT_PUSH_SECRETS = 'pushSecrets';
|
||||
const EVENT_PULL_SECRETS = 'pullSecrets';
|
||||
|
||||
export {
|
||||
EVENT_PUSH_SECRETS,
|
||||
EVENT_PULL_SECRETS
|
||||
}
|
79
backend/src/variables/index.ts
Normal file
79
backend/src/variables/index.ts
Normal file
@ -0,0 +1,79 @@
|
||||
import {
|
||||
ENV_DEV,
|
||||
ENV_TESTING,
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
ENV_SET
|
||||
} from './environment';
|
||||
import {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_OPTIONS
|
||||
} from './integration';
|
||||
import {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
COMPLETED,
|
||||
GRANTED
|
||||
} from './organization';
|
||||
import {
|
||||
SECRET_SHARED,
|
||||
SECRET_PERSONAL
|
||||
} from './secret';
|
||||
import {
|
||||
PLAN_STARTER,
|
||||
PLAN_PRO
|
||||
} from './stripe';
|
||||
import {
|
||||
EVENT_PUSH_SECRETS,
|
||||
EVENT_PULL_SECRETS
|
||||
} from './event';
|
||||
import {
|
||||
ACTION_PUSH_TO_HEROKU
|
||||
} from './action';
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
COMPLETED,
|
||||
GRANTED,
|
||||
PLAN_STARTER,
|
||||
PLAN_PRO,
|
||||
SECRET_SHARED,
|
||||
SECRET_PERSONAL,
|
||||
ENV_DEV,
|
||||
ENV_TESTING,
|
||||
ENV_STAGING,
|
||||
ENV_PROD,
|
||||
ENV_SET,
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
EVENT_PUSH_SECRETS,
|
||||
EVENT_PULL_SECRETS,
|
||||
ACTION_PUSH_TO_HEROKU,
|
||||
INTEGRATION_OPTIONS
|
||||
};
|
119
backend/src/variables/integration.ts
Normal file
119
backend/src/variables/integration.ts
Normal file
@ -0,0 +1,119 @@
|
||||
import {
|
||||
CLIENT_ID_HEROKU,
|
||||
CLIENT_ID_NETLIFY,
|
||||
CLIENT_SLUG_VERCEL
|
||||
} from '../config';
|
||||
|
||||
// integrations
|
||||
const INTEGRATION_HEROKU = 'heroku';
|
||||
const INTEGRATION_VERCEL = 'vercel';
|
||||
const INTEGRATION_NETLIFY = 'netlify';
|
||||
const INTEGRATION_SET = new Set([
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY
|
||||
]);
|
||||
|
||||
// integration types
|
||||
const INTEGRATION_OAUTH2 = 'oauth2';
|
||||
|
||||
// integration oauth endpoints
|
||||
const INTEGRATION_HEROKU_TOKEN_URL = 'https://id.heroku.com/oauth/token';
|
||||
const INTEGRATION_VERCEL_TOKEN_URL = 'https://api.vercel.com/v2/oauth/access_token';
|
||||
const INTEGRATION_NETLIFY_TOKEN_URL = 'https://api.netlify.com/oauth/token';
|
||||
|
||||
// integration apps endpoints
|
||||
const INTEGRATION_HEROKU_API_URL = 'https://api.heroku.com';
|
||||
const INTEGRATION_VERCEL_API_URL = 'https://api.vercel.com';
|
||||
const INTEGRATION_NETLIFY_API_URL = 'https://api.netlify.com';
|
||||
|
||||
const INTEGRATION_OPTIONS = [
|
||||
{
|
||||
name: 'Heroku',
|
||||
slug: 'heroku',
|
||||
image: 'Heroku',
|
||||
isAvailable: true,
|
||||
type: 'oauth2',
|
||||
clientId: CLIENT_ID_HEROKU,
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Vercel',
|
||||
slug: 'vercel',
|
||||
image: 'Vercel',
|
||||
isAvailable: true,
|
||||
type: 'vercel',
|
||||
clientId: '',
|
||||
clientSlug: CLIENT_SLUG_VERCEL,
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Netlify',
|
||||
slug: 'netlify',
|
||||
image: 'Netlify',
|
||||
isAvailable: false,
|
||||
type: 'oauth2',
|
||||
clientId: CLIENT_ID_NETLIFY,
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Google Cloud Platform',
|
||||
slug: 'gcp',
|
||||
image: 'Google Cloud Platform',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Amazon Web Services',
|
||||
slug: 'aws',
|
||||
image: 'Amazon Web Services',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Microsoft Azure',
|
||||
slug: 'azure',
|
||||
image: 'Microsoft Azure',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Travis CI',
|
||||
slug: 'travisci',
|
||||
image: 'Travis CI',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
},
|
||||
{
|
||||
name: 'Circle CI',
|
||||
slug: 'circleci',
|
||||
image: 'Circle CI',
|
||||
isAvailable: false,
|
||||
type: '',
|
||||
clientId: '',
|
||||
docsLink: ''
|
||||
}
|
||||
]
|
||||
|
||||
export {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_OAUTH2,
|
||||
INTEGRATION_HEROKU_TOKEN_URL,
|
||||
INTEGRATION_VERCEL_TOKEN_URL,
|
||||
INTEGRATION_NETLIFY_TOKEN_URL,
|
||||
INTEGRATION_HEROKU_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_OPTIONS
|
||||
}
|
24
backend/src/variables/organization.ts
Normal file
24
backend/src/variables/organization.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// membership roles
|
||||
const OWNER = 'owner';
|
||||
const ADMIN = 'admin';
|
||||
const MEMBER = 'member';
|
||||
|
||||
// membership statuses
|
||||
const INVITED = 'invited';
|
||||
|
||||
// -- organization
|
||||
const ACCEPTED = 'accepted';
|
||||
|
||||
// -- workspace
|
||||
const COMPLETED = 'completed';
|
||||
const GRANTED = 'granted';
|
||||
|
||||
export {
|
||||
OWNER,
|
||||
ADMIN,
|
||||
MEMBER,
|
||||
INVITED,
|
||||
ACCEPTED,
|
||||
COMPLETED,
|
||||
GRANTED
|
||||
}
|
8
backend/src/variables/secret.ts
Normal file
8
backend/src/variables/secret.ts
Normal file
@ -0,0 +1,8 @@
|
||||
// secrets
|
||||
const SECRET_SHARED = 'shared';
|
||||
const SECRET_PERSONAL = 'personal';
|
||||
|
||||
export {
|
||||
SECRET_SHARED,
|
||||
SECRET_PERSONAL
|
||||
}
|
7
backend/src/variables/stripe.ts
Normal file
7
backend/src/variables/stripe.ts
Normal file
@ -0,0 +1,7 @@
|
||||
const PLAN_STARTER = 'starter';
|
||||
const PLAN_PRO = 'pro';
|
||||
|
||||
export {
|
||||
PLAN_STARTER,
|
||||
PLAN_PRO
|
||||
}
|
@ -19,6 +19,7 @@ const (
|
||||
FormatDotenv string = "dotenv"
|
||||
FormatJson string = "json"
|
||||
FormatCSV string = "csv"
|
||||
FormatYaml string = "yaml"
|
||||
)
|
||||
|
||||
// exportCmd represents the export command
|
||||
@ -101,8 +102,10 @@ func formatEnvs(envs []models.SingleEnvironmentVariable, format string) (string,
|
||||
return formatAsJson(envs), nil
|
||||
case FormatCSV:
|
||||
return formatAsCSV(envs), nil
|
||||
case FormatYaml:
|
||||
return formatAsYaml(envs), nil
|
||||
default:
|
||||
return "", fmt.Errorf("invalid format flag: %s", format)
|
||||
return "", fmt.Errorf("invalid format type: %s. Available format types are [%s]", format, []string{FormatDotenv, FormatJson, FormatCSV, FormatYaml})
|
||||
}
|
||||
}
|
||||
|
||||
@ -127,6 +130,14 @@ func formatAsDotEnv(envs []models.SingleEnvironmentVariable) string {
|
||||
return dotenv
|
||||
}
|
||||
|
||||
func formatAsYaml(envs []models.SingleEnvironmentVariable) string {
|
||||
var dotenv string
|
||||
for _, env := range envs {
|
||||
dotenv += fmt.Sprintf("%s: %s\n", env.Key, env.Value)
|
||||
}
|
||||
return dotenv
|
||||
}
|
||||
|
||||
// Format environment variables as a JSON file
|
||||
func formatAsJson(envs []models.SingleEnvironmentVariable) string {
|
||||
// Dump as a json array
|
||||
|
@ -36,7 +36,7 @@ var initCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
|
||||
if util.WorkspaceConfigFileExists() {
|
||||
if util.WorkspaceConfigFileExistsInCurrentPath() {
|
||||
shouldOverride, err := shouldOverrideWorkspacePrompt()
|
||||
if err != nil {
|
||||
log.Errorln("Unable to parse your answer")
|
||||
|
@ -114,7 +114,7 @@ func init() {
|
||||
|
||||
func askForLoginCredentials() (email string, password string, err error) {
|
||||
validateEmail := func(input string) error {
|
||||
matched, err := regexp.MatchString("^\\S+@\\S+$", input)
|
||||
matched, err := regexp.MatchString("^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\.[a-zA-Z0-9-.]+$", input)
|
||||
if err != nil || !matched {
|
||||
return errors.New("this doesn't look like an email address")
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ var rootCmd = &cobra.Command{
|
||||
Short: "Infisical CLI is used to inject environment variables into any process",
|
||||
Long: `Infisical is a simple, end-to-end encrypted service that enables teams to sync and manage their environment variables across their development life cycle.`,
|
||||
CompletionOptions: cobra.CompletionOptions{DisableDefaultCmd: true},
|
||||
Version: "0.1.8",
|
||||
Version: "0.1.11",
|
||||
}
|
||||
|
||||
// Execute adds all child commands to the root command and sets flags appropriately.
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"runtime"
|
||||
"strings"
|
||||
"syscall"
|
||||
|
||||
@ -19,12 +20,38 @@ import (
|
||||
|
||||
// runCmd represents the run command
|
||||
var runCmd = &cobra.Command{
|
||||
Example: `
|
||||
infisical run --env=dev -- npm run dev
|
||||
infisical run --command "first-command && second-command; more-commands..."
|
||||
`,
|
||||
Use: "run [any infisical run command flags] -- [your application start command]",
|
||||
Short: "Used to inject environments variables into your application process",
|
||||
DisableFlagsInUseLine: true,
|
||||
Example: "infisical run --env=prod -- npm run dev",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
PreRun: toggleDebug,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
// Check if the --command flag has been set
|
||||
commandFlagSet := cmd.Flags().Changed("command")
|
||||
|
||||
// If the --command flag has been set, check if a value was provided
|
||||
if commandFlagSet {
|
||||
command := cmd.Flag("command").Value.String()
|
||||
if command == "" {
|
||||
return fmt.Errorf("you need to provide a command after the flag --command")
|
||||
}
|
||||
|
||||
// If the --command flag has been set, args should not be provided
|
||||
if len(args) > 0 {
|
||||
return fmt.Errorf("you cannot set any arguments after --command flag. --command only takes a string command")
|
||||
}
|
||||
} else {
|
||||
// If the --command flag has not been set, at least one arg should be provided
|
||||
if len(args) == 0 {
|
||||
return fmt.Errorf("at least one argument is required after the run command, received %d", len(args))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
envName, err := cmd.Flags().GetString("env")
|
||||
if err != nil {
|
||||
@ -47,18 +74,30 @@ var runCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
|
||||
envsFromApi, err := util.GetAllEnvironmentVariables(projectId, envName)
|
||||
secrets, err := util.GetAllEnvironmentVariables(projectId, envName)
|
||||
if err != nil {
|
||||
log.Errorln("Something went wrong when pulling secrets using your Infisical token. Double check the token, project id or environment name (dev, prod, ect.)")
|
||||
log.Debugln(err)
|
||||
return
|
||||
}
|
||||
|
||||
if shouldExpandSecrets {
|
||||
substitutions := util.SubstituteSecrets(envsFromApi)
|
||||
execCmd(args[0], args[1:], substitutions)
|
||||
secrets = util.SubstituteSecrets(secrets)
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("command") {
|
||||
command := cmd.Flag("command").Value.String()
|
||||
err = executeMultipleCommandWithEnvs(command, secrets)
|
||||
if err != nil {
|
||||
log.Errorf("Something went wrong when executing your command [error=%s]", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
execCmd(args[0], args[1:], envsFromApi)
|
||||
err = executeSingleCommandWithEnvs(args, secrets)
|
||||
if err != nil {
|
||||
log.Errorf("Something went wrong when executing your command [error=%s]", err)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
},
|
||||
@ -69,19 +108,51 @@ func init() {
|
||||
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
|
||||
runCmd.Flags().String("projectId", "", "The project ID from which your secrets should be pulled from")
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")")
|
||||
}
|
||||
|
||||
// Credit: inspired by AWS Valut
|
||||
func execCmd(command string, args []string, envs []models.SingleEnvironmentVariable) error {
|
||||
log.Infof("\x1b[%dm%s\x1b[0m", 32, "\u2713 Injected Infisical secrets into your application process successfully")
|
||||
log.Debugln("Secrets to inject:", envs)
|
||||
log.Debugf("executing command: %s %s \n", command, strings.Join(args, " "))
|
||||
cmd := exec.Command(command, args...)
|
||||
// Will execute a single command and pass in the given secrets into the process
|
||||
func executeSingleCommandWithEnvs(args []string, secrets []models.SingleEnvironmentVariable) error {
|
||||
command := args[0]
|
||||
argsForCommand := args[1:]
|
||||
numberOfSecretsInjected := fmt.Sprintf("\u2713 Injected %v Infisical secrets into your application process successfully", len(secrets))
|
||||
log.Infof("\x1b[%dm%s\x1b[0m", 32, numberOfSecretsInjected)
|
||||
log.Debugf("executing command: %s %s \n", command, strings.Join(argsForCommand, " "))
|
||||
log.Debugln("Secrets injected:", secrets)
|
||||
|
||||
cmd := exec.Command(command, argsForCommand...)
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
cmd.Env = getAllEnvs(envs)
|
||||
cmd.Env = getAllEnvs(secrets)
|
||||
|
||||
return execCmd(cmd)
|
||||
}
|
||||
|
||||
func executeMultipleCommandWithEnvs(fullCommand string, secrets []models.SingleEnvironmentVariable) error {
|
||||
shell := [2]string{"sh", "-c"}
|
||||
if runtime.GOOS == "windows" {
|
||||
shell = [2]string{"cmd", "/C"}
|
||||
} else {
|
||||
shell[0] = os.Getenv("SHELL")
|
||||
}
|
||||
|
||||
cmd := exec.Command(shell[0], shell[1], fullCommand)
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
cmd.Env = getAllEnvs(secrets)
|
||||
|
||||
numberOfSecretsInjected := fmt.Sprintf("\u2713 Injected %v Infisical secrets into your application process successfully", len(secrets))
|
||||
log.Infof("\x1b[%dm%s\x1b[0m", 32, numberOfSecretsInjected)
|
||||
log.Debugf("executing command: %s %s %s \n", shell[0], shell[1], fullCommand)
|
||||
log.Debugln("Secrets injected:", secrets)
|
||||
|
||||
return execCmd(cmd)
|
||||
}
|
||||
|
||||
// Credit: inspired by AWS Valut
|
||||
func execCmd(cmd *exec.Cmd) error {
|
||||
sigChannel := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChannel)
|
||||
|
||||
@ -98,7 +169,7 @@ func execCmd(command string, args []string, envs []models.SingleEnvironmentVaria
|
||||
|
||||
if err := cmd.Wait(); err != nil {
|
||||
_ = cmd.Process.Signal(os.Kill)
|
||||
return fmt.Errorf("Failed to wait for command termination: %v", err)
|
||||
return fmt.Errorf("failed to wait for command termination: %v", err)
|
||||
}
|
||||
|
||||
waitStatus := cmd.ProcessState.Sys().(syscall.WaitStatus)
|
||||
|
@ -56,7 +56,7 @@ func ConfigFileExists() bool {
|
||||
}
|
||||
}
|
||||
|
||||
func WorkspaceConfigFileExists() bool {
|
||||
func WorkspaceConfigFileExistsInCurrentPath() bool {
|
||||
if _, err := os.Stat(INFISICAL_WORKSPACE_CONFIG_FILE_NAME); err == nil {
|
||||
return true
|
||||
} else {
|
||||
@ -90,3 +90,65 @@ func GetFullConfigFilePath() (fullPathToFile string, fullPathToDirectory string,
|
||||
fullDirPath := fmt.Sprintf("%s/%s", homeDir, CONFIG_FOLDER_NAME)
|
||||
return fullPath, fullDirPath, err
|
||||
}
|
||||
|
||||
// Given a path to a workspace config, unmarshal workspace config
|
||||
func GetWorkspaceConfigByPath(path string) (workspaceConfig models.WorkspaceConfigFile, err error) {
|
||||
workspaceConfigFileAsBytes, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return models.WorkspaceConfigFile{}, fmt.Errorf("GetWorkspaceConfigByPath: Unable to read workspace config file because [%s]", err)
|
||||
}
|
||||
|
||||
var workspaceConfigFile models.WorkspaceConfigFile
|
||||
err = json.Unmarshal(workspaceConfigFileAsBytes, &workspaceConfigFile)
|
||||
if err != nil {
|
||||
return models.WorkspaceConfigFile{}, fmt.Errorf("GetWorkspaceConfigByPath: Unable to unmarshal workspace config file because [%s]", err)
|
||||
}
|
||||
|
||||
return workspaceConfigFile, nil
|
||||
}
|
||||
|
||||
// Will get the list of .infisical.json files that are located
|
||||
// within the root of each sub folder from where the CLI is ran from
|
||||
func GetAllWorkSpaceConfigsStartingFromCurrentPath() (workspaces []models.WorkspaceConfigFile, err error) {
|
||||
currentDir, err := os.Getwd()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetAllProjectConfigs: unable to get the current directory because [%s]", err)
|
||||
}
|
||||
|
||||
files, err := os.ReadDir(currentDir)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetAllProjectConfigs: unable to read the contents of the current directory because [%s]", err)
|
||||
}
|
||||
|
||||
listOfWorkSpaceConfigs := []models.WorkspaceConfigFile{}
|
||||
for _, file := range files {
|
||||
if !file.IsDir() && file.Name() == INFISICAL_WORKSPACE_CONFIG_FILE_NAME {
|
||||
pathToWorkspaceConfigFile := currentDir + "/" + INFISICAL_WORKSPACE_CONFIG_FILE_NAME
|
||||
|
||||
workspaceConfig, err := GetWorkspaceConfigByPath(pathToWorkspaceConfigFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetAllProjectConfigs: Unable to get config file because [%s]", err)
|
||||
}
|
||||
|
||||
listOfWorkSpaceConfigs = append(listOfWorkSpaceConfigs, workspaceConfig)
|
||||
|
||||
} else if file.IsDir() {
|
||||
pathToSubFolder := currentDir + "/" + file.Name()
|
||||
pathToMaybeWorkspaceConfigFile := pathToSubFolder + "/" + INFISICAL_WORKSPACE_CONFIG_FILE_NAME
|
||||
|
||||
_, err := os.Stat(pathToMaybeWorkspaceConfigFile)
|
||||
if err != nil {
|
||||
continue // workspace config file doesn't exist
|
||||
}
|
||||
|
||||
workspaceConfig, err := GetWorkspaceConfigByPath(pathToMaybeWorkspaceConfigFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetAllProjectConfigs: Unable to get config file because [%s]", err)
|
||||
}
|
||||
|
||||
listOfWorkSpaceConfigs = append(listOfWorkSpaceConfigs, workspaceConfig)
|
||||
}
|
||||
}
|
||||
|
||||
return listOfWorkSpaceConfigs, nil
|
||||
}
|
||||
|
@ -3,12 +3,9 @@ package util
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func DecryptSymmetric(key []byte, encryptedPrivateKey []byte, tag []byte, IV []byte) ([]byte, error) {
|
||||
log.Debugln("Key:", key, "encryptedPrivateKey", encryptedPrivateKey, "tag", tag, "IV", IV)
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -14,19 +14,7 @@ import (
|
||||
"golang.org/x/crypto/nacl/box"
|
||||
)
|
||||
|
||||
func GetSecretsFromAPIUsingCurrentLoggedInUser(envName string, userCreds models.UserCredentials) ([]models.SingleEnvironmentVariable, error) {
|
||||
log.Debugln("envName", envName, "userCreds", userCreds)
|
||||
// check if user has configured a workspace
|
||||
workspace, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Unable to read workspace file:", err)
|
||||
}
|
||||
|
||||
// create http client
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(userCreds.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
func getSecretsByWorkspaceIdAndEnvName(httpClient resty.Client, envName string, workspace models.WorkspaceConfigFile, userCreds models.UserCredentials) (listOfSecrets []models.SingleEnvironmentVariable, err error) {
|
||||
var pullSecretsRequestResponse models.PullSecretsResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
@ -35,14 +23,11 @@ func GetSecretsFromAPIUsingCurrentLoggedInUser(envName string, userCreds models.
|
||||
SetResult(&pullSecretsRequestResponse).
|
||||
Get(fmt.Sprintf("%v/v1/secret/%v", INFISICAL_URL, workspace.WorkspaceId)) // need to change workspace id
|
||||
|
||||
log.Debugln("Response from get secrets:", response)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if response.StatusCode() > 299 {
|
||||
log.Debugln(response)
|
||||
return nil, fmt.Errorf(response.Status())
|
||||
}
|
||||
|
||||
@ -67,7 +52,7 @@ func GetSecretsFromAPIUsingCurrentLoggedInUser(envName string, userCreds models.
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Debugln("workspaceKey", workspaceKey, "nonce", nonce, "senderPublicKey", senderPublicKey, "currentUsersPrivateKey", currentUsersPrivateKey)
|
||||
// log.Debugln("workspaceKey", workspaceKey, "nonce", nonce, "senderPublicKey", senderPublicKey, "currentUsersPrivateKey", currentUsersPrivateKey)
|
||||
workspaceKeyInBytes, _ := box.Open(nil, workspaceKey, (*[24]byte)(nonce), (*[32]byte)(senderPublicKey), (*[32]byte)(currentUsersPrivateKey))
|
||||
var listOfEnv []models.SingleEnvironmentVariable
|
||||
|
||||
@ -101,6 +86,32 @@ func GetSecretsFromAPIUsingCurrentLoggedInUser(envName string, userCreds models.
|
||||
return listOfEnv, nil
|
||||
}
|
||||
|
||||
func GetSecretsFromAPIUsingCurrentLoggedInUser(envName string, userCreds models.UserCredentials) ([]models.SingleEnvironmentVariable, error) {
|
||||
log.Debugln("GetSecretsFromAPIUsingCurrentLoggedInUser", "envName", envName, "userCreds", userCreds)
|
||||
// check if user has configured a workspace
|
||||
workspaces, err := GetAllWorkSpaceConfigsStartingFromCurrentPath()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Unable to read workspace file(s):", err)
|
||||
}
|
||||
|
||||
// create http client
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(userCreds.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
secrets := []models.SingleEnvironmentVariable{}
|
||||
for _, workspace := range workspaces {
|
||||
secretsFromAPI, err := getSecretsByWorkspaceIdAndEnvName(*httpClient, envName, workspace, userCreds)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetSecretsFromAPIUsingCurrentLoggedInUser: Unable to get secrets by workspace id and env name")
|
||||
}
|
||||
|
||||
secrets = append(secrets, secretsFromAPI...)
|
||||
}
|
||||
|
||||
return secrets, nil
|
||||
}
|
||||
|
||||
func GetSecretsFromAPIUsingInfisicalToken(infisicalToken string, envName string, projectId string) ([]models.SingleEnvironmentVariable, error) {
|
||||
if infisicalToken == "" || projectId == "" || envName == "" {
|
||||
return nil, errors.New("infisical token, project id and or environment name cannot be empty")
|
||||
@ -127,7 +138,6 @@ func GetSecretsFromAPIUsingInfisicalToken(infisicalToken string, envName string,
|
||||
}
|
||||
|
||||
if response.StatusCode() > 299 {
|
||||
log.Debugln(response)
|
||||
return nil, fmt.Errorf(response.Status())
|
||||
}
|
||||
|
||||
@ -186,49 +196,57 @@ func GetSecretsFromAPIUsingInfisicalToken(infisicalToken string, envName string,
|
||||
}
|
||||
|
||||
func GetAllEnvironmentVariables(projectId string, envName string) ([]models.SingleEnvironmentVariable, error) {
|
||||
var envsFromApi []models.SingleEnvironmentVariable
|
||||
infisicalToken := os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
|
||||
if infisicalToken == "" {
|
||||
hasUserLoggedInbefore, loggedInUserEmail, err := IsUserLoggedIn()
|
||||
if err != nil {
|
||||
log.Info("Unexpected issue occurred while checking login status. To see more details, add flag --debug")
|
||||
log.Debugln(err)
|
||||
return envsFromApi, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !hasUserLoggedInbefore {
|
||||
log.Infoln("No logged in user. To login, please run command [infisical login]")
|
||||
return envsFromApi, fmt.Errorf("user not logged in")
|
||||
return nil, fmt.Errorf("user not logged in")
|
||||
}
|
||||
|
||||
userCreds, err := GetUserCredsFromKeyRing(loggedInUserEmail)
|
||||
if err != nil {
|
||||
log.Infoln("Unable to get user creds from key ring")
|
||||
log.Debug(err)
|
||||
return envsFromApi, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !WorkspaceConfigFileExists() {
|
||||
log.Infoln("Your project is not connected to a project yet. Run command [infisical init]")
|
||||
return envsFromApi, fmt.Errorf("project not initialized")
|
||||
workspaceConfigs, err := GetAllWorkSpaceConfigsStartingFromCurrentPath()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to check if you have a %s file in your current directory", INFISICAL_WORKSPACE_CONFIG_FILE_NAME)
|
||||
}
|
||||
|
||||
envsFromApi, err = GetSecretsFromAPIUsingCurrentLoggedInUser(envName, userCreds)
|
||||
if len(workspaceConfigs) == 0 {
|
||||
log.Infoln("Your local project is not connected to a Infisical project yet. Run command [infisical init]")
|
||||
return nil, fmt.Errorf("project not initialized")
|
||||
}
|
||||
|
||||
envsFromApi, err := GetSecretsFromAPIUsingCurrentLoggedInUser(envName, userCreds)
|
||||
if err != nil {
|
||||
log.Errorln("Something went wrong when pulling secrets using your logged in credentials. If the issue persists, double check your project id/try logging in again.")
|
||||
log.Debugln(err)
|
||||
return envsFromApi, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return envsFromApi, nil
|
||||
|
||||
} else {
|
||||
envsFromApi, err := GetSecretsFromAPIUsingInfisicalToken(infisicalToken, envName, projectId)
|
||||
if err != nil {
|
||||
log.Errorln("Something went wrong when pulling secrets using your Infisical token. Double check the token, project id or environment name (dev, prod, ect.)")
|
||||
log.Debugln(err)
|
||||
return envsFromApi, err
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return envsFromApi, nil
|
||||
return envsFromApi, nil
|
||||
}
|
||||
}
|
||||
|
||||
func GetWorkSpacesFromAPI(userCreds models.UserCredentials) (workspaces []models.Workspace, err error) {
|
||||
|
@ -20,6 +20,7 @@ services:
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- smtp-server
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile
|
||||
@ -33,7 +34,7 @@ services:
|
||||
- NODE_ENV=development
|
||||
networks:
|
||||
- infisical-dev
|
||||
|
||||
|
||||
frontend:
|
||||
container_name: infisical-dev-frontend
|
||||
restart: unless-stopped
|
||||
@ -50,6 +51,7 @@ services:
|
||||
env_file: .env
|
||||
environment:
|
||||
- NEXT_PUBLIC_ENV=development
|
||||
- INFISICAL_TELEMETRY_ENABLED=${TELEMETRY_ENABLED}
|
||||
- NEXT_PUBLIC_STRIPE_PRODUCT_PRO=${STRIPE_PRODUCT_PRO}
|
||||
- NEXT_PUBLIC_STRIPE_PRODUCT_STARTER=${STRIPE_PRODUCT_STARTER}
|
||||
networks:
|
||||
@ -84,6 +86,18 @@ services:
|
||||
networks:
|
||||
- infisical-dev
|
||||
|
||||
smtp-server:
|
||||
container_name: infisical-dev-smtp-server
|
||||
image: mailhog/mailhog
|
||||
restart: always
|
||||
logging:
|
||||
driver: 'none' # disable saving logs
|
||||
ports:
|
||||
- 1025:1025 # SMTP server
|
||||
- 8025:8025 # Web UI
|
||||
networks:
|
||||
- infisical-dev
|
||||
|
||||
volumes:
|
||||
mongo-data:
|
||||
driver: local
|
||||
|
@ -9,13 +9,13 @@ services:
|
||||
- 80:80
|
||||
- 443:443
|
||||
volumes:
|
||||
- ./nginx/default.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
- ./nginx/default.dev.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
depends_on:
|
||||
- frontend
|
||||
- backend
|
||||
networks:
|
||||
- infisical
|
||||
|
||||
|
||||
backend:
|
||||
container_name: infisical-backend
|
||||
restart: unless-stopped
|
||||
@ -28,7 +28,7 @@ services:
|
||||
- NODE_ENV=production
|
||||
networks:
|
||||
- infisical
|
||||
|
||||
|
||||
frontend:
|
||||
container_name: infisical-frontend
|
||||
restart: unless-stopped
|
||||
|
@ -15,7 +15,7 @@ Export environment variables from the platform into a file format.
|
||||
| Option | Description | Default value |
|
||||
| ------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | ------------- |
|
||||
| `--env` | Used to set the environment that secrets are pulled from. Accepted values: `dev`, `staging`, `test`, `prod` | `dev` |
|
||||
| `--projectId` | Used to determine from which infisical project your secrets will be exported from (only required if injecting via the service token method). | `None` |
|
||||
| `--projectId` | Only required if injecting via the [service token method](../token). If you are not using service token, the project id will be automatically retrieved from the `.infisical.json` located at the root of your local project. | `None` |
|
||||
| `--expand` | Parse shell parameter expansions in your secrets (e.g., `${DOMAIN}`) | `true` |
|
||||
| `--format` | Format of the output file. Accepted values: `dotenv`, `csv` and `json` | `dotenv` |
|
||||
|
@ -2,9 +2,25 @@
|
||||
title: "infisical run"
|
||||
---
|
||||
|
||||
```bash
|
||||
infisical run [options] -- [your application start command]
|
||||
```
|
||||
<Tabs>
|
||||
<Tab title="Single command">
|
||||
```bash
|
||||
infisical run [options] -- [your application start command]
|
||||
|
||||
# Example
|
||||
infisical run [options] -- npm run dev
|
||||
```
|
||||
</Tab>
|
||||
|
||||
<Tab title="Chained commands">
|
||||
```bash
|
||||
infisical run [options] --command [string command]
|
||||
|
||||
# Example
|
||||
infisical run [options] --command "npm run bootstrap && npm run dev start; other-bash-command"
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Description
|
||||
|
||||
@ -15,5 +31,6 @@ Inject environment variables from the platform into an application process.
|
||||
| Option | Description | Default value |
|
||||
| -------------- | ----------------------------------------------------------------------------------------------------------- | ------------- |
|
||||
| `--env` | Used to set the environment that secrets are pulled from. Accepted values: `dev`, `staging`, `test`, `prod` | `dev` |
|
||||
| `--projectId` | Used to link a local project to the platform (required only if injecting via the service token method) | `None` |
|
||||
| `--projectId` | Used to link a local project to the platform (required only if injecting via the service token method) | None |
|
||||
| `--expand` | Parse shell parameter expansions in your secrets (e.g., `${DOMAIN}`) | `true` |
|
||||
| `--command` | Pass secrets into chained commands (e.g., `"first-command && second-command; more-commands..."`) | None |
|
@ -1,5 +1,5 @@
|
||||
---
|
||||
title: "Overview"
|
||||
title: 'Install'
|
||||
---
|
||||
|
||||
Prerequisite: Set up an account with [Infisical Cloud](https://app.infisical.com) or via a [self-hosted installation](/self-hosting/overview).
|
||||
@ -13,11 +13,7 @@ The Infisical CLI provides a way to inject environment variables from the platfo
|
||||
Use [brew](https://brew.sh/) package manager
|
||||
|
||||
```bash
|
||||
# install
|
||||
brew install infisical/get-cli/infisical
|
||||
|
||||
# check version
|
||||
infisical --version
|
||||
```
|
||||
|
||||
## Updates
|
||||
@ -31,14 +27,13 @@ The Infisical CLI provides a way to inject environment variables from the platfo
|
||||
Use [Scoop](https://scoop.sh/) package manager
|
||||
|
||||
```bash
|
||||
# install
|
||||
scoop bucket add org https://github.com/Infisical/scoop-infisical.git
|
||||
scoop install infisical
|
||||
|
||||
# check version
|
||||
infisical --version
|
||||
```
|
||||
|
||||
```bash
|
||||
scoop install infisical
|
||||
```
|
||||
|
||||
## Updates
|
||||
|
||||
```bash
|
||||
@ -49,33 +44,33 @@ The Infisical CLI provides a way to inject environment variables from the platfo
|
||||
<Tab title="Alpine">
|
||||
Install prerequisite
|
||||
```bash
|
||||
$ sudo apk add --no-cache bash sudo
|
||||
sudo apk add --no-cache bash sudo
|
||||
```
|
||||
|
||||
Add Infisical repository
|
||||
```bash
|
||||
$ curl -1sLf \
|
||||
curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' \
|
||||
| sudo -E bash
|
||||
```
|
||||
|
||||
Then install CLI
|
||||
```bash
|
||||
$ sudo apk update && sudo apk add infisical
|
||||
sudo apk update && sudo apk add infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="RedHat/CentOs/Amazon">
|
||||
Add Infisical repository
|
||||
```bash
|
||||
$ curl -1sLf \
|
||||
curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.rpm.sh' \
|
||||
| sudo -E bash
|
||||
```
|
||||
|
||||
Then install CLI
|
||||
```bash
|
||||
$ sudo yum install infisical
|
||||
sudo yum install infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
@ -83,14 +78,14 @@ The Infisical CLI provides a way to inject environment variables from the platfo
|
||||
Add Infisical repository
|
||||
|
||||
```bash
|
||||
$ curl -1sLf \
|
||||
curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' \
|
||||
| sudo -E bash
|
||||
```
|
||||
|
||||
Then install CLI
|
||||
```bash
|
||||
$ sudo apt-get update && sudo apt-get install -y infisical
|
||||
sudo apt-get update && sudo apt-get install -y infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
|
@ -1,11 +1,13 @@
|
||||
---
|
||||
title: "Frequently Asked Questions"
|
||||
description: "Have any questions? [Join our Slack community](https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g)."
|
||||
title: 'Frequently Asked Questions'
|
||||
description: 'Have any questions? [Join our Slack community](https://join.slack.com/t/infisical-users/shared_invite/zt-1kdbk07ro-RtoyEt_9E~fyzGo_xQYP6g).'
|
||||
---
|
||||
|
||||
## Problem with SMTP
|
||||
|
||||
You can normally populate `SMTP_USERNAME` and `SMTP_PASSWORD` with your usual login and password (you could also create a 'burner' email). Sometimes, there still are problems.
|
||||
If you opt for actual SMTP server (not the local MailHog), you have to have the right environment variables set.
|
||||
|
||||
You can normally populate `SMTP_USERNAME` and `SMTP_PASSWORD` with your usual login and password (you could also create a 'burner' email). Sometimes, there still are problems.
|
||||
|
||||
You can go to your Gmail account settings > security and enable “less secure apps”. This would allow Infisical to use your Gmail to send emails.
|
||||
|
||||
@ -13,4 +15,4 @@ If it still doesn't work, [this](https://stackoverflow.com/questions/72547853/un
|
||||
|
||||
## `MONGO_URL` issues
|
||||
|
||||
Your `MONGO_URL` should be something like `mongodb://root:example@mongo:27017/?authSource=admin`. If you want to change it (not recommended), you should make sure that you keep this URL in line with `MONGO_USERNAME=root` and `MONGO_PASSWORD=example`.
|
||||
Your `MONGO_URL` should be something like `mongodb://root:example@mongo:27017/?authSource=admin`. If you want to change it (not recommended), you should make sure that you keep this URL in line with `MONGO_USERNAME=root` and `MONGO_PASSWORD=example`.
|
||||
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "Developing"
|
||||
description: "This guide will help you set up and run Infisical in development mode."
|
||||
title: 'Developing'
|
||||
description: 'This guide will help you set up and run Infisical in development mode.'
|
||||
---
|
||||
|
||||
## Clone the repo
|
||||
@ -16,17 +16,65 @@ cd infisical
|
||||
|
||||
## Set up environment variables
|
||||
|
||||
Tweak the `.env` according to your preferences. Refer to the available [environment variables](/self-hosting/configuration/envars).
|
||||
Before running the docker-compose we have to generate the .env file with the environment variables, you can create your own file or start with the
|
||||
`.env.example` as an example guide.
|
||||
|
||||
Mandatory variables in the `.env` file:
|
||||
|
||||
1. Keys and JWT variables
|
||||
|
||||

|
||||
|
||||
The `.env.example` has these variables empty, you can self generate the `JWT and ENCRYPTION_KEY` with this [32-byte random hex strings generator](https://www.browserling.com/tools/random-hex).
|
||||
|
||||
For the `PRIVATE_KEY and PUBLIC_KEY` you can use the ones shown in the screenshot:
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
PRIVATE_KEY='oGVv5rThrpZ7WLgQW27chY1cXngr4wLQIZnGfSKgHPk='
|
||||
PUBLIC_KEY='ldr6JaC7AY+tun3omGLdE4SWpkJbtVBOI54KfUP53Xc='
|
||||
```
|
||||
|
||||
2. Mongo variables and site URL
|
||||
|
||||

|
||||
|
||||
These variables are used to connect the MongoDB and set the URL for the localhost.
|
||||
|
||||
For development, you can use `root` for the `MONGO_USERNAME` and `example` for the `MONGO_PASSWORD` as shown in the screenshot.
|
||||
|
||||
Take into account that if you use your own `MONGO_USERNAME` and `MONGO_PASSWORD`, you also have to change the `MONGO_URL` with the form of `MONGO_USERNAME:MONGO_PASSWORD` after the `//` part of the URL.
|
||||
|
||||
3. Mail SMTP service variables
|
||||
|
||||

|
||||
|
||||
If you want to receive actual emails (e.g. you want to test how the email message will look like), take note of the following.
|
||||
|
||||
For the `SMTP_USERNAME` variable, you will need an email with 2-steps-verification.
|
||||
|
||||
For the `SMTP_PASSWORD` variable, you will need to [generate an app password](https://support.google.com/mail/answer/185833?hl=en) with the email you used in the `SMTP_USERNAME` variable.
|
||||
|
||||
Otherwise, a local SMTP server (MailHog) is available for testing purposes. Set the following values to use this:
|
||||
|
||||
```
|
||||
SMTP_HOST=smtp-server
|
||||
SMTP_PORT=1025
|
||||
SMTP_NAME=<whatever you like>
|
||||
SMTP_USERNAME=team@infisical.com
|
||||
SMTP_PASSWORD=
|
||||
```
|
||||
|
||||
Make sure to leave the `SMTP_PASSWORD` blank so the backend will be able to connect to MailHog
|
||||
|
||||
You can browse `http://localhost:8025/` to browse email messages sent by the backend.
|
||||
|
||||
With these environment variables, you will be ready to run the docker-compose.
|
||||
|
||||
## Docker for development
|
||||
|
||||
```bash
|
||||
# build and start the services
|
||||
docker-compose -f docker-compose.dev.yml up --build
|
||||
docker-compose -f docker-compose.dev.yml up --build --force-recreate
|
||||
```
|
||||
|
||||
Then browse http://localhost:8080
|
||||
|
@ -4,8 +4,11 @@ title: "Integrations"
|
||||
|
||||
Integrations allow environment variables to be synced across your entire infrastructure from local development to CI/CD and production.
|
||||
|
||||
We're still early with integrations, but expect more soon.
|
||||
We're still early with integrations, but expect more soon.
|
||||
|
||||
<Card title="View integrations documentation" icon="link" href="/integrations/overview">
|
||||
View all available integrations and their guide
|
||||
</Card>
|
||||
|
||||

|
||||
|
||||
Check out our [integrations](/integrations/overview).
|
||||
|
@ -4,13 +4,16 @@ title: "Infisical Token"
|
||||
|
||||
An Infisical Token is needed to authenticate the CLI when there isn't an easy way to input your login credentials.
|
||||
|
||||
It's useful for the [Docker](/integrations/platforms/docker) and [Docker Compose](/integrations/platforms/docker-compose) integrations.
|
||||
It's useful for your CI/CD environments and integrations such as [Docker](/integrations/platforms/docker) and [Docker Compose](/integrations/platforms/docker-compose).
|
||||
|
||||
To generate the the token, head over to your project settings as shown below.
|
||||
|
||||
It's possible to generate the token in the settings of a project.
|
||||
|
||||

|
||||
|
||||
<Note>
|
||||
The token grants read-only access to a particular environment and project for
|
||||
a specified amount of time.
|
||||
a specified amount of time. Once the token is expired, the CLI using it will no longer be able to make
|
||||
requests with it.
|
||||
</Note>
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user