Compare commits

..

2 Commits

Author SHA1 Message Date
e8213799c8 Add deny api/get envs api 2023-01-29 21:04:47 -08:00
967df7282e add basic auth model for Organization 2023-01-24 23:16:08 -08:00
396 changed files with 3404 additions and 17456 deletions

View File

@ -64,7 +64,7 @@ POSTHOG_PROJECT_API_KEY=
STRIPE_SECRET_KEY=
STRIPE_PUBLISHABLE_KEY=
STRIPE_WEBHOOK_SECRET=
STRIPE_PRODUCT_STARTER=
STRIPE_PRODUCT_TEAM=
STRIPE_PRODUCT_CARD_AUTH=
STRIPE_PRODUCT_PRO=
STRIPE_PRODUCT_STARTER=
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=

83
.github/values.yaml vendored
View File

@ -1,93 +1,36 @@
#####
# INFISICAL K8 DEFAULT VALUES FILE
# PLEASE REPLACE VALUES/EDIT AS REQUIRED
#####
nameOverride: ""
frontend:
name: frontend
podAnnotations: {}
deploymentAnnotations:
secrets.infisical.com/auto-reload: "true"
replicaCount: 2
replicaCount: 1
image:
repository: infisical/frontend
pullPolicy: Always
repository:
pullPolicy: Always
tag: "latest"
kubeSecretRef: managed-secret-frontend
service:
# type of the frontend service
type: ClusterIP
# define the nodePort if service type is NodePort
# nodePort:
annotations: {}
backend:
name: backend
podAnnotations: {}
deploymentAnnotations:
secrets.infisical.com/auto-reload: "true"
replicaCount: 2
replicaCount: 1
image:
repository: infisical/backend
repository:
pullPolicy: Always
tag: "latest"
kubeSecretRef: managed-backend-secret
service:
annotations: {}
mongodb:
name: mongodb
podAnnotations: {}
image:
repository: mongo
pullPolicy: IfNotPresent
tag: "latest"
service:
annotations: {}
# By default the backend will be connected to a Mongo instance in the cluster.
# However, it is recommended to add a managed document DB connection string because the DB instance in the cluster does not have persistence yet ( data will be deleted on next deploy).
# Learn about connection string type here https://www.mongodb.com/docs/manual/reference/connection-string/
mongodbConnection: {}
# externalMongoDBConnectionString: <>
ingress:
enabled: true
annotations:
kubernetes.io/ingress.class: "nginx"
hostName: gamma.infisical.com # replace with your domain
cert-manager.io/cluster-issuer: "letsencrypt-prod"
hostName: gamma.infisical.com
frontend:
path: /
pathType: Prefix
backend:
path: /api
pathType: Prefix
tls: []
tls:
- secretName: echo-tls
hosts:
- gamma.infisical.com
backendEnvironmentVariables:
## Complete Ingress example
# ingress:
# enabled: true
# annotations:
# kubernetes.io/ingress.class: "nginx"
# cert-manager.io/issuer: letsencrypt-nginx
# hostName: k8.infisical.com
# frontend:
# path: /
# pathType: Prefix
# backend:
# path: /api
# pathType: Prefix
# tls:
# - secretName: letsencrypt-nginx
# hosts:
# - k8.infisical.com
###
### YOU MUST FILL IN ALL SECRETS BELOW
###
backendEnvironmentVariables: {}
frontendEnvironmentVariables: {}
frontendEnvironmentVariables:

View File

@ -4,7 +4,7 @@ on:
push:
# run only against tags
tags:
- "v*"
- 'v*'
permissions:
contents: write
@ -18,16 +18,10 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
go-version: '>=1.19.3'
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
@ -39,18 +33,19 @@ jobs:
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
- uses: goreleaser/goreleaser-action@v2
with:
distribution: goreleaser
version: latest
args: release --clean
args: release --rm-dist
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -14,9 +14,6 @@ before:
builds:
- id: darwin-build
binary: infisical
ldflags: -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
flags:
- -trimpath
env:
- CGO_ENABLED=1
- CC=/home/runner/work/osxcross/target/bin/o64-clang
@ -27,14 +24,10 @@ builds:
- goos: darwin
goarch: "386"
dir: ./cli
- id: all-other-builds
env:
- CGO_ENABLED=0
binary: infisical
ldflags: -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
flags:
- -trimpath
goos:
- freebsd
- linux
@ -68,20 +61,18 @@ archives:
release:
replace_existing_draft: true
mode: "replace"
mode: 'replace'
checksum:
name_template: "checksums.txt"
name_template: 'checksums.txt'
snapshot:
name_template: "{{ incpatch .Version }}-devel"
name_template: "{{ incpatch .Version }}"
changelog:
sort: asc
filters:
exclude:
- "^docs:"
- "^test:"
- '^docs:'
- '^test:'
# publishers:
# - name: fury.io
@ -89,7 +80,6 @@ changelog:
# - infisical
# dir: "{{ dir .ArtifactPath }}"
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
brews:
- name: infisical
tap:
@ -101,39 +91,31 @@ brews:
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
nfpms:
- id: infisical
package_name: infisical
builds:
- all-other-builds
vendor: Infisical, Inc
homepage: https://infisical.com/
maintainer: Infisical, Inc
description: The offical Infisical CLI
license: MIT
formats:
- rpm
- deb
- apk
- archlinux
bindir: /usr/bin
contents:
- src: ./completions/infisical.bash
dst: /etc/bash_completion.d/infisical
- src: ./completions/infisical.fish
dst: /usr/share/fish/vendor_completions.d/infisical.fish
- src: ./completions/infisical.zsh
dst: /usr/share/zsh/site-functions/_infisical
- src: ./manpages/infisical.1.gz
dst: /usr/share/man/man1/infisical.1.gz
- id: infisical
package_name: infisical
builds:
- all-other-builds
vendor: Infisical, Inc
homepage: https://infisical.com/
maintainer: Infisical, Inc
description: The offical Infisical CLI
license: MIT
formats:
- rpm
- deb
- apk
- archlinux
bindir: /usr/bin
contents:
- src: ./completions/infisical.bash
dst: /etc/bash_completion.d/infisical
- src: ./completions/infisical.fish
dst: /usr/share/fish/vendor_completions.d/infisical.fish
- src: ./completions/infisical.zsh
dst: /usr/share/zsh/site-functions/_infisical
- src: ./manpages/infisical.1.gz
dst: /usr/share/man/man1/infisical.1.gz
scoop:
bucket:
owner: Infisical
@ -144,16 +126,16 @@ scoop:
homepage: "https://infisical.com"
description: "The official Infisical CLI"
license: MIT
aurs:
- name: infisical-bin
-
name: infisical-bin
homepage: "https://infisical.com"
description: "The official Infisical CLI"
maintainers:
- Infisical, Inc <support@infisical.com>
license: MIT
private_key: "{{ .Env.AUR_KEY }}"
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
private_key: '{{ .Env.AUR_KEY }}'
git_url: 'ssh://aur@aur.archlinux.org/infisical-bin.git'
package: |-
# bin
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
@ -168,13 +150,19 @@ aurs:
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
# man pages
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
# dockers:
# - dockerfile: cli/docker/Dockerfile
# - dockerfile: goreleaser.dockerfile
# goos: linux
# goarch: amd64
# ids:
# - infisical
# image_templates:
# - "infisical/cli:{{ .Version }}"
# - "infisical/cli:{{ .Version }}"
# - "infisical/cli:{{ .Major }}.{{ .Minor }}"
# - "infisical/cli:{{ .Major }}"
# - "infisical/cli:latest"
# build_flag_templates:
# - "--label=org.label-schema.schema-version=1.0"
# - "--label=org.label-schema.version={{.Version}}"
# - "--label=org.label-schema.name={{.ProjectName}}"
# - "--platform=linux/amd64"

File diff suppressed because one or more lines are too long

View File

@ -3,10 +3,8 @@ export {};
declare global {
namespace NodeJS {
interface ProcessEnv {
PORT: string;
EMAIL_TOKEN_LIFETIME: string;
ENCRYPTION_KEY: string;
SALT_ROUNDS: string;
JWT_AUTH_LIFETIME: string;
JWT_AUTH_SECRET: string;
JWT_REFRESH_LIFETIME: string;
@ -21,31 +19,23 @@ declare global {
CLIENT_ID_HEROKU: string;
CLIENT_ID_VERCEL: string;
CLIENT_ID_NETLIFY: string;
CLIENT_ID_GITHUB: string;
CLIENT_SECRET_HEROKU: string;
CLIENT_SECRET_VERCEL: string;
CLIENT_SECRET_NETLIFY: string;
CLIENT_SECRET_GITHUB: string;
CLIENT_SLUG_VERCEL: string;
POSTHOG_HOST: string;
POSTHOG_PROJECT_API_KEY: string;
SENTRY_DSN: string;
SITE_URL: string;
SMTP_HOST: string;
SMTP_SECURE: string;
SMTP_PORT: string;
SMTP_USERNAME: string;
SMTP_NAME: string;
SMTP_PASSWORD: string;
SMTP_FROM_ADDRESS: string;
SMTP_FROM_NAME: string;
STRIPE_PRODUCT_STARTER: string;
STRIPE_PRODUCT_TEAM: string;
SMTP_USERNAME: string;
STRIPE_PRODUCT_CARD_AUTH: string;
STRIPE_PRODUCT_PRO: string;
STRIPE_PRODUCT_STARTER: string;
STRIPE_PUBLISHABLE_KEY: string;
STRIPE_SECRET_KEY: string;
STRIPE_WEBHOOK_SECRET: string;
TELEMETRY_ENABLED: string;
LICENSE_KEY: string;
}
}
}

3547
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,48 +1,4 @@
{
"dependencies": {
"@aws-sdk/client-secrets-manager": "^3.267.0",
"@godaddy/terminus": "^4.11.2",
"@octokit/rest": "^19.0.5",
"@sentry/node": "^7.14.0",
"@sentry/tracing": "^7.19.0",
"@types/crypto-js": "^4.1.1",
"@types/libsodium-wrappers": "^0.7.10",
"await-to-js": "^3.0.0",
"aws-sdk": "^2.1311.0",
"axios": "^1.1.3",
"bcrypt": "^5.1.0",
"bigint-conversion": "^2.2.2",
"builder-pattern": "^2.2.0",
"cookie-parser": "^1.4.6",
"cors": "^2.8.5",
"crypto-js": "^4.1.1",
"dotenv": "^16.0.1",
"express": "^4.18.1",
"express-rate-limit": "^6.7.0",
"express-validator": "^6.14.2",
"handlebars": "^4.7.7",
"helmet": "^5.1.1",
"js-yaml": "^4.1.0",
"jsonwebtoken": "^9.0.0",
"jsrp": "^0.2.4",
"libsodium-wrappers": "^0.7.10",
"lodash": "^4.17.21",
"mongoose": "^6.7.2",
"nodemailer": "^6.8.0",
"posthog-node": "^2.2.2",
"query-string": "^7.1.3",
"request-ip": "^3.3.0",
"rimraf": "^3.0.2",
"stripe": "^10.7.0",
"swagger-autogen": "^2.22.0",
"swagger-ui-express": "^4.6.0",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"typescript": "^4.9.3",
"utility-types": "^3.10.0",
"winston": "^3.8.2",
"winston-loki": "^6.0.6"
},
"name": "infisical-api",
"version": "1.0.0",
"main": "src/index.js",
@ -118,5 +74,47 @@
"suiteNameTemplate": "{filepath}",
"classNameTemplate": "{classname}",
"titleTemplate": "{title}"
},
"dependencies": {
"@godaddy/terminus": "^4.11.2",
"@octokit/rest": "^19.0.5",
"@sentry/node": "^7.14.0",
"@sentry/tracing": "^7.19.0",
"@types/crypto-js": "^4.1.1",
"@types/libsodium-wrappers": "^0.7.10",
"await-to-js": "^3.0.0",
"axios": "^1.1.3",
"bcrypt": "^5.1.0",
"bigint-conversion": "^2.2.2",
"builder-pattern": "^2.2.0",
"cookie-parser": "^1.4.6",
"cors": "^2.8.5",
"crypto-js": "^4.1.1",
"dotenv": "^16.0.1",
"express": "^4.18.1",
"express-rate-limit": "^6.7.0",
"express-validator": "^6.14.2",
"handlebars": "^4.7.7",
"helmet": "^5.1.1",
"js-yaml": "^4.1.0",
"jsonwebtoken": "^9.0.0",
"jsrp": "^0.2.4",
"libsodium-wrappers": "^0.7.10",
"lodash": "^4.17.21",
"mongoose": "^6.7.2",
"nodemailer": "^6.8.0",
"posthog-node": "^2.2.2",
"query-string": "^7.1.3",
"request-ip": "^3.3.0",
"rimraf": "^3.0.2",
"stripe": "^10.7.0",
"swagger-autogen": "^2.22.0",
"swagger-ui-express": "^4.6.0",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"typescript": "^4.9.3",
"utility-types": "^3.10.0",
"winston": "^3.8.2",
"winston-loki": "^6.0.6"
}
}

View File

@ -50,7 +50,6 @@ import {
serviceTokenData as v2ServiceTokenDataRouter,
apiKeyData as v2APIKeyDataRouter,
environment as v2EnvironmentRouter,
tags as v2TagsRouter,
} from './routes/v2';
import { healthCheck } from './routes/status';
@ -113,7 +112,6 @@ app.use('/api/v1/integration-auth', v1IntegrationAuthRouter);
app.use('/api/v2/users', v2UsersRouter);
app.use('/api/v2/organizations', v2OrganizationsRouter);
app.use('/api/v2/workspace', v2EnvironmentRouter);
app.use('/api/v2/workspace', v2TagsRouter);
app.use('/api/v2/workspace', v2WorkspaceRouter);
app.use('/api/v2/secret', v2SecretRouter); // deprecated
app.use('/api/v2/secrets', v2SecretsRouter);

View File

@ -1,6 +1,5 @@
const PORT = process.env.PORT || 4000;
const EMAIL_TOKEN_LIFETIME = parseInt(process.env.EMAIL_TOKEN_LIFETIME! || '86400');
const INVITE_ONLY_SIGNUP = process.env.INVITE_ONLY_SIGNUP == undefined ? false : process.env.INVITE_ONLY_SIGNUP
const EMAIL_TOKEN_LIFETIME = process.env.EMAIL_TOKEN_LIFETIME! || '86400';
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY!;
const SALT_ROUNDS = parseInt(process.env.SALT_ROUNDS!) || 10;
const JWT_AUTH_LIFETIME = process.env.JWT_AUTH_LIFETIME! || '10d';
@ -14,18 +13,15 @@ const MONGO_URL = process.env.MONGO_URL!;
const NODE_ENV = process.env.NODE_ENV! || 'production';
const VERBOSE_ERROR_OUTPUT = process.env.VERBOSE_ERROR_OUTPUT! === 'true' && true;
const LOKI_HOST = process.env.LOKI_HOST || undefined;
const CLIENT_ID_AZURE = process.env.CLIENT_ID_AZURE!;
const TENANT_ID_AZURE = process.env.TENANT_ID_AZURE!;
const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
const CLIENT_ID_HEROKU = process.env.CLIENT_ID_HEROKU!;
const CLIENT_ID_VERCEL = process.env.CLIENT_ID_VERCEL!;
const CLIENT_ID_NETLIFY = process.env.CLIENT_ID_NETLIFY!;
const CLIENT_ID_GITHUB = process.env.CLIENT_ID_GITHUB!;
const CLIENT_SECRET_AZURE = process.env.CLIENT_SECRET_AZURE!;
const CLIENT_SECRET_HEROKU = process.env.CLIENT_SECRET_HEROKU!;
const CLIENT_SECRET_VERCEL = process.env.CLIENT_SECRET_VERCEL!;
const CLIENT_SECRET_NETLIFY = process.env.CLIENT_SECRET_NETLIFY!;
const CLIENT_SECRET_GITHUB = process.env.CLIENT_SECRET_GITHUB!;
const CLIENT_SLUG_VERCEL = process.env.CLIENT_SLUG_VERCEL!;
const CLIENT_SLUG_VERCEL= process.env.CLIENT_SLUG_VERCEL!;
const POSTHOG_HOST = process.env.POSTHOG_HOST! || 'https://app.posthog.com';
const POSTHOG_PROJECT_API_KEY =
process.env.POSTHOG_PROJECT_API_KEY! ||
@ -39,9 +35,9 @@ const SMTP_USERNAME = process.env.SMTP_USERNAME!;
const SMTP_PASSWORD = process.env.SMTP_PASSWORD!;
const SMTP_FROM_ADDRESS = process.env.SMTP_FROM_ADDRESS!;
const SMTP_FROM_NAME = process.env.SMTP_FROM_NAME! || 'Infisical';
const STRIPE_PRODUCT_STARTER = process.env.STRIPE_PRODUCT_STARTER!;
const STRIPE_PRODUCT_CARD_AUTH = process.env.STRIPE_PRODUCT_CARD_AUTH!;
const STRIPE_PRODUCT_PRO = process.env.STRIPE_PRODUCT_PRO!;
const STRIPE_PRODUCT_TEAM = process.env.STRIPE_PRODUCT_TEAM!;
const STRIPE_PRODUCT_STARTER = process.env.STRIPE_PRODUCT_STARTER!;
const STRIPE_PUBLISHABLE_KEY = process.env.STRIPE_PUBLISHABLE_KEY!;
const STRIPE_SECRET_KEY = process.env.STRIPE_SECRET_KEY!;
const STRIPE_WEBHOOK_SECRET = process.env.STRIPE_WEBHOOK_SECRET!;
@ -51,7 +47,6 @@ const LICENSE_KEY = process.env.LICENSE_KEY!;
export {
PORT,
EMAIL_TOKEN_LIFETIME,
INVITE_ONLY_SIGNUP,
ENCRYPTION_KEY,
SALT_ROUNDS,
JWT_AUTH_LIFETIME,
@ -65,13 +60,10 @@ export {
NODE_ENV,
VERBOSE_ERROR_OUTPUT,
LOKI_HOST,
CLIENT_ID_AZURE,
TENANT_ID_AZURE,
CLIENT_ID_HEROKU,
CLIENT_ID_VERCEL,
CLIENT_ID_NETLIFY,
CLIENT_ID_GITHUB,
CLIENT_SECRET_AZURE,
CLIENT_SECRET_HEROKU,
CLIENT_SECRET_VERCEL,
CLIENT_SECRET_NETLIFY,
@ -88,9 +80,9 @@ export {
SMTP_PASSWORD,
SMTP_FROM_ADDRESS,
SMTP_FROM_NAME,
STRIPE_PRODUCT_STARTER,
STRIPE_PRODUCT_TEAM,
STRIPE_PRODUCT_CARD_AUTH,
STRIPE_PRODUCT_PRO,
STRIPE_PRODUCT_STARTER,
STRIPE_PUBLISHABLE_KEY,
STRIPE_SECRET_KEY,
STRIPE_WEBHOOK_SECRET,

View File

@ -4,21 +4,14 @@ import jwt from 'jsonwebtoken';
import * as Sentry from '@sentry/node';
import * as bigintConversion from 'bigint-conversion';
const jsrp = require('jsrp');
import { User, LoginSRPDetail } from '../../models';
import { User } from '../../models';
import { createToken, issueTokens, clearTokens } from '../../helpers/auth';
import {
ACTION_LOGIN,
ACTION_LOGOUT
} from '../../variables';
import {
NODE_ENV,
JWT_AUTH_LIFETIME,
JWT_AUTH_SECRET,
JWT_REFRESH_SECRET
} from '../../config';
import { BadRequestError } from '../../utils/errors';
import { EELogService } from '../../ee/services';
import { getChannelFromUserAgent } from '../../utils/posthog'; // TODO: move this
declare module 'jsonwebtoken' {
export interface UserIDJwtPayload extends jwt.JwtPayload {
@ -26,6 +19,8 @@ declare module 'jsonwebtoken' {
}
}
const clientPublicKeys: any = {};
/**
* Log in user step 1: Return [salt] and [serverPublicKey] as part of step 1 of SRP protocol
* @param req
@ -51,15 +46,13 @@ export const login1 = async (req: Request, res: Response) => {
salt: user.salt,
verifier: user.verifier
},
async () => {
() => {
// generate server-side public key
const serverPublicKey = server.getPublicKey();
await LoginSRPDetail.findOneAndReplace({ email: email }, {
email: email,
clientPublicKey: clientPublicKey,
serverBInt: bigintConversion.bigintToBuf(server.bInt),
}, { upsert: true, returnNewDocument: false })
clientPublicKeys[email] = {
clientPublicKey,
serverBInt: bigintConversion.bigintToBuf(server.bInt)
};
return res.status(200).send({
serverPublicKey,
@ -92,21 +85,15 @@ export const login2 = async (req: Request, res: Response) => {
if (!user) throw new Error('Failed to find user');
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: email })
if (!loginSRPDetailFromDB) {
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
}
const server = new jsrp.server();
server.init(
{
salt: user.salt,
verifier: user.verifier,
b: loginSRPDetailFromDB.serverBInt
b: clientPublicKeys[email].serverBInt
},
async () => {
server.setClientPublicKey(loginSRPDetailFromDB.clientPublicKey);
server.setClientPublicKey(clientPublicKeys[email].clientPublicKey);
// compare server and client shared keys
if (server.checkClientProof(clientProof)) {
@ -121,18 +108,6 @@ export const login2 = async (req: Request, res: Response) => {
secure: NODE_ENV === 'production' ? true : false
});
const loginAction = await EELogService.createAction({
name: ACTION_LOGIN,
userId: user._id
});
loginAction && await EELogService.createLog({
userId: user._id,
actions: [loginAction],
channel: getChannelFromUserAgent(req.headers['user-agent']),
ipAddress: req.ip
});
// return (access) token in response
return res.status(200).send({
token: tokens.token,
@ -176,19 +151,6 @@ export const logout = async (req: Request, res: Response) => {
sameSite: 'strict',
secure: NODE_ENV === 'production' ? true : false
});
const logoutAction = await EELogService.createAction({
name: ACTION_LOGOUT,
userId: req.user._id
});
logoutAction && await EELogService.createLog({
userId: req.user._id,
actions: [logoutAction],
channel: getChannelFromUserAgent(req.headers['user-agent']),
ipAddress: req.ip
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);

View File

@ -10,37 +10,15 @@ import { INTEGRATION_SET, INTEGRATION_OPTIONS } from '../../variables';
import { IntegrationService } from '../../services';
import { getApps, revokeAccess } from '../../integrations';
/***
* Return integration authorization with id [integrationAuthId]
*/
export const getIntegrationAuth = async (req: Request, res: Response) => {
let integrationAuth;
try {
const { integrationAuthId } = req.params;
integrationAuth = await IntegrationAuth.findById(integrationAuthId);
if (!integrationAuth) return res.status(400).send({
message: 'Failed to find integration authorization'
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get integration authorization'
});
}
export const getIntegrationOptions = async (
req: Request,
res: Response
) => {
return res.status(200).send({
integrationAuth
integrationOptions: INTEGRATION_OPTIONS
});
}
export const getIntegrationOptions = async (req: Request, res: Response) => {
return res.status(200).send({
integrationOptions: INTEGRATION_OPTIONS,
});
};
/**
* Perform OAuth2 code-token exchange as part of integration [integration] for workspace with id [workspaceId]
* @param req
@ -53,6 +31,7 @@ export const oAuthExchange = async (
) => {
try {
const { workspaceId, code, integration } = req.body;
if (!INTEGRATION_SET.has(integration))
throw new Error('Failed to validate integration');
@ -61,16 +40,12 @@ export const oAuthExchange = async (
throw new Error("Failed to get environments")
}
const integrationAuth = await IntegrationService.handleOAuthExchange({
await IntegrationService.handleOAuthExchange({
workspaceId,
integration,
code,
environment: environments[0].slug,
});
return res.status(200).send({
integrationAuth
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
@ -78,42 +53,34 @@ export const oAuthExchange = async (
message: 'Failed to get OAuth2 code-token exchange'
});
}
return res.status(200).send({
message: 'Successfully enabled integration authorization'
});
};
/**
* Save integration access token and (optionally) access id as part of integration
* [integration] for workspace with id [workspaceId]
* Save integration access token as part of integration [integration] for workspace with id [workspaceId]
* @param req
* @param res
*/
export const saveIntegrationAccessToken = async (
req: Request,
res: Response
req: Request,
res: Response
) => {
// TODO: refactor
// TODO: check if access token is valid for each integration
let integrationAuth;
try {
const {
workspaceId,
accessId,
accessToken,
integration
}: {
workspaceId: string;
accessId: string | null;
accessToken: string;
integration: string;
} = req.body;
const bot = await Bot.findOne({
workspace: new Types.ObjectId(workspaceId),
isActive: true
});
if (!bot) throw new Error('Bot must be enabled to save integration access token');
integrationAuth = await IntegrationAuth.findOneAndUpdate({
workspace: new Types.ObjectId(workspaceId),
integration
@ -124,11 +91,17 @@ export const saveIntegrationAccessToken = async (
new: true,
upsert: true
});
const bot = await Bot.findOne({
workspace: new Types.ObjectId(workspaceId),
isActive: true
});
if (!bot) throw new Error('Bot must be enabled to save integration access token');
// encrypt and save integration access details
// encrypt and save integration access token
integrationAuth = await IntegrationService.setIntegrationAuthAccess({
integrationAuthId: integrationAuth._id.toString(),
accessId,
accessToken,
accessExpiresAt: undefined
});
@ -154,23 +127,23 @@ export const saveIntegrationAccessToken = async (
* @returns
*/
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
let apps;
try {
apps = await getApps({
integrationAuth: req.integrationAuth,
accessToken: req.accessToken,
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get integration authorization applications",
});
}
let apps;
try {
apps = await getApps({
integrationAuth: req.integrationAuth,
accessToken: req.accessToken
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get integration authorization applications'
});
}
return res.status(200).send({
apps,
});
return res.status(200).send({
apps
});
};
/**
@ -180,21 +153,21 @@ export const getIntegrationAuthApps = async (req: Request, res: Response) => {
* @returns
*/
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
let integrationAuth;
try {
integrationAuth = await revokeAccess({
integrationAuth: req.integrationAuth,
accessToken: req.accessToken,
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to delete integration authorization",
});
}
return res.status(200).send({
integrationAuth,
});
};
let integrationAuth;
try {
integrationAuth = await revokeAccess({
integrationAuth: req.integrationAuth,
accessToken: req.accessToken
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to delete integration authorization'
});
}
return res.status(200).send({
integrationAuth
});
}

View File

@ -1,5 +1,4 @@
import { Request, Response } from 'express';
import { Types } from 'mongoose';
import * as Sentry from '@sentry/node';
import {
Integration,
@ -12,41 +11,73 @@ import { eventPushSecrets } from '../../events';
/**
* Create/initialize an (empty) integration for integration authorization
* @param req
* @param res
* @returns
* @param req
* @param res
* @returns
*/
export const createIntegration = async (req: Request, res: Response) => {
let integration;
try {
const {
integrationAuthId,
app,
appId,
isActive,
sourceEnvironment,
targetEnvironment,
owner,
path,
region
} = req.body;
// TODO: validate [sourceEnvironment] and [targetEnvironment]
// initialize new integration after saving integration access token
integration = await new Integration({
workspace: req.integrationAuth.workspace._id,
environment: sourceEnvironment,
isActive,
app,
isActive: false,
app: null,
environment: req.integrationAuth.workspace?.environments[0].slug,
integration: req.integrationAuth.integration,
integrationAuth: req.integrationAuth._id
}).save();
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to create integration'
});
}
return res.status(200).send({
integration
});
}
/**
* Change environment or name of integration with id [integrationId]
* @param req
* @param res
* @returns
*/
export const updateIntegration = async (req: Request, res: Response) => {
let integration;
// TODO: add integration-specific validation to ensure that each
// integration has the correct fields populated in [Integration]
try {
const {
environment,
isActive,
app,
appId,
targetEnvironment,
owner,
path,
region,
integration: req.integrationAuth.integration,
integrationAuth: new Types.ObjectId(integrationAuthId)
}).save();
owner, // github-specific integration param
} = req.body;
integration = await Integration.findOneAndUpdate(
{
_id: req.integration._id
},
{
environment,
isActive,
app,
appId,
targetEnvironment,
owner
},
{
new: true
}
);
if (integration) {
// trigger event - push secrets
@ -56,78 +87,17 @@ export const createIntegration = async (req: Request, res: Response) => {
})
});
}
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to create integration'
message: 'Failed to update integration'
});
}
return res.status(200).send({
integration,
});
};
/**
* Change environment or name of integration with id [integrationId]
* @param req
* @param res
* @returns
*/
export const updateIntegration = async (req: Request, res: Response) => {
let integration;
// TODO: add integration-specific validation to ensure that each
// integration has the correct fields populated in [Integration]
try {
const {
environment,
isActive,
app,
appId,
targetEnvironment,
owner, // github-specific integration param
} = req.body;
integration = await Integration.findOneAndUpdate(
{
_id: req.integration._id,
},
{
environment,
isActive,
app,
appId,
targetEnvironment,
owner,
},
{
new: true,
}
);
if (integration) {
// trigger event - push secrets
EventService.handleEvent({
event: eventPushSecrets({
workspaceId: integration.workspace.toString(),
}),
});
}
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to update integration",
});
}
return res.status(200).send({
integration,
});
return res.status(200).send({
integration
});
};
/**
@ -138,24 +108,24 @@ export const updateIntegration = async (req: Request, res: Response) => {
* @returns
*/
export const deleteIntegration = async (req: Request, res: Response) => {
let integration;
try {
const { integrationId } = req.params;
let integration;
try {
const { integrationId } = req.params;
integration = await Integration.findOneAndDelete({
_id: integrationId,
});
if (!integration) throw new Error("Failed to find integration");
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to delete integration",
});
}
return res.status(200).send({
integration,
});
integration = await Integration.findOneAndDelete({
_id: integrationId
});
if (!integration) throw new Error('Failed to find integration');
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to delete integration'
});
}
return res.status(200).send({
integration
});
};

View File

@ -1,7 +1,7 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import crypto from 'crypto';
import { SITE_URL, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, EMAIL_TOKEN_LIFETIME } from '../../config';
import { SITE_URL, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
import { MembershipOrg, Organization, User, Token } from '../../models';
import { deleteMembershipOrg as deleteMemberFromOrg } from '../../helpers/membershipOrg';
import { checkEmailVerification } from '../../helpers/signup';
@ -77,6 +77,8 @@ export const changeMembershipOrgRole = async (req: Request, res: Response) => {
// change role for (target) organization membership with id
// [membershipOrgId]
// TODO
let membershipToChangeRole;
// try {
// } catch (err) {
@ -113,14 +115,14 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
if (!membershipOrg) {
throw new Error('Failed to validate organization membership');
}
invitee = await User.findOne({
email: inviteeEmail
}).select('+publicKey');
if (invitee) {
// case: invitee is an existing user
inviteeMembershipOrg = await MembershipOrg.findOne({
user: invitee._id,
organization: organizationId
@ -170,8 +172,7 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
{
email: inviteeEmail,
token,
createdAt: new Date(),
ttl: Math.floor(+new Date() / 1000) + EMAIL_TOKEN_LIFETIME // time in seconds, i.e unix
createdAt: new Date()
},
{ upsert: true, new: true }
);
@ -242,7 +243,7 @@ export const verifyUserToOrganization = async (req: Request, res: Response) => {
message: 'Successfully verified email',
user,
});
}
}
if (!user) {
// initialize user account

View File

@ -2,7 +2,10 @@ import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import {
SITE_URL,
STRIPE_SECRET_KEY
STRIPE_SECRET_KEY,
STRIPE_PRODUCT_STARTER,
STRIPE_PRODUCT_PRO,
STRIPE_PRODUCT_CARD_AUTH
} from '../../config';
import Stripe from 'stripe';
@ -14,13 +17,17 @@ import {
MembershipOrg,
Organization,
Workspace,
IncidentContactOrg,
IMembershipOrg
IncidentContactOrg
} from '../../models';
import { createOrganization as create } from '../../helpers/organization';
import { addMembershipsOrg } from '../../helpers/membershipOrg';
import { OWNER, ACCEPTED } from '../../variables';
import _ from 'lodash';
const productToPriceMap = {
starter: STRIPE_PRODUCT_STARTER,
pro: STRIPE_PRODUCT_PRO,
cardAuth: STRIPE_PRODUCT_CARD_AUTH
};
export const getOrganizations = async (req: Request, res: Response) => {
let organizations;
@ -333,6 +340,7 @@ export const createOrganizationPortalSession = async (
if (paymentMethods.data.length < 1) {
// case: no payment method on file
productToPriceMap['cardAuth'];
session = await stripe.checkout.sessions.create({
customer: req.membershipOrg.organization.customerId,
mode: 'setup',
@ -384,44 +392,3 @@ export const getOrganizationSubscriptions = async (
subscriptions
});
};
/**
* Given a org id, return the projects each member of the org belongs to
* @param req
* @param res
* @returns
*/
export const getOrganizationMembersAndTheirWorkspaces = async (
req: Request,
res: Response
) => {
const { organizationId } = req.params;
const workspacesSet = (
await Workspace.find(
{
organization: organizationId
},
'_id'
)
).map((w) => w._id.toString());
const memberships = (
await Membership.find({
workspace: { $in: workspacesSet }
}).populate('workspace')
);
const userToWorkspaceIds: any = {};
memberships.forEach(membership => {
const user = membership.user.toString();
if (userToWorkspaceIds[user]) {
userToWorkspaceIds[user].push(membership.workspace);
} else {
userToWorkspaceIds[user] = [membership.workspace];
}
});
return res.json(userToWorkspaceIds);
};

View File

@ -4,12 +4,13 @@ import crypto from 'crypto';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const jsrp = require('jsrp');
import * as bigintConversion from 'bigint-conversion';
import { User, Token, BackupPrivateKey, LoginSRPDetail } from '../../models';
import { User, Token, BackupPrivateKey } from '../../models';
import { checkEmailVerification } from '../../helpers/signup';
import { createToken } from '../../helpers/auth';
import { sendMail } from '../../helpers/nodemailer';
import { EMAIL_TOKEN_LIFETIME, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, SITE_URL } from '../../config';
import { BadRequestError } from '../../utils/errors';
import { JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, SITE_URL } from '../../config';
const clientPublicKeys: any = {};
/**
* Password reset step 1: Send email verification link to email [email]
@ -31,7 +32,7 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
error: 'Failed to send email verification for password reset'
});
}
const token = crypto.randomBytes(16).toString('hex');
await Token.findOneAndUpdate(
@ -39,12 +40,11 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
{
email,
token,
createdAt: new Date(),
ttl: Math.floor(+new Date() / 1000) + EMAIL_TOKEN_LIFETIME // time in seconds, i.e unix
createdAt: new Date()
},
{ upsert: true, new: true }
);
await sendMail({
template: 'passwordReset.handlebars',
subjectLine: 'Infisical password reset',
@ -55,15 +55,15 @@ export const emailPasswordReset = async (req: Request, res: Response) => {
callback_url: SITE_URL + '/password-reset'
}
});
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to send email for account recovery'
});
});
}
return res.status(200).send({
message: `Sent an email for account recovery to ${email}`
});
@ -79,7 +79,7 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
let user, token;
try {
const { email, code } = req.body;
user = await User.findOne({ email }).select('+publicKey');
if (!user || !user?.publicKey) {
// case: user doesn't exist with email [email] or
@ -93,7 +93,7 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
email,
code
});
// generate temporary password-reset token
token = createToken({
payload: {
@ -107,7 +107,7 @@ export const emailPasswordResetVerify = async (req: Request, res: Response) => {
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed email verification for password reset'
});
});
}
return res.status(200).send({
@ -130,7 +130,7 @@ export const srp1 = async (req: Request, res: Response) => {
const user = await User.findOne({
email: req.user.email
}).select('+salt +verifier');
if (!user) throw new Error('Failed to find user');
const server = new jsrp.server();
@ -139,15 +139,13 @@ export const srp1 = async (req: Request, res: Response) => {
salt: user.salt,
verifier: user.verifier
},
async () => {
() => {
// generate server-side public key
const serverPublicKey = server.getPublicKey();
await LoginSRPDetail.findOneAndReplace({ email: req.user.email }, {
email: req.user.email,
clientPublicKey: clientPublicKey,
serverBInt: bigintConversion.bigintToBuf(server.bInt),
}, { upsert: true, returnNewDocument: false })
clientPublicKeys[req.user.email] = {
clientPublicKey,
serverBInt: bigintConversion.bigintToBuf(server.bInt)
};
return res.status(200).send({
serverPublicKey,
@ -182,21 +180,17 @@ export const changePassword = async (req: Request, res: Response) => {
if (!user) throw new Error('Failed to find user');
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: req.user.email })
if (!loginSRPDetailFromDB) {
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
}
const server = new jsrp.server();
server.init(
{
salt: user.salt,
verifier: user.verifier,
b: loginSRPDetailFromDB.serverBInt
b: clientPublicKeys[req.user.email].serverBInt
},
async () => {
server.setClientPublicKey(loginSRPDetailFromDB.clientPublicKey);
server.setClientPublicKey(
clientPublicKeys[req.user.email].clientPublicKey
);
// compare server and client shared keys
if (server.checkClientProof(clientProof)) {
@ -255,22 +249,16 @@ export const createBackupPrivateKey = async (req: Request, res: Response) => {
if (!user) throw new Error('Failed to find user');
const loginSRPDetailFromDB = await LoginSRPDetail.findOneAndDelete({ email: req.user.email })
if (!loginSRPDetailFromDB) {
return BadRequestError(Error("It looks like some details from the first login are not found. Please try login one again"))
}
const server = new jsrp.server();
server.init(
{
salt: user.salt,
verifier: user.verifier,
b: loginSRPDetailFromDB.serverBInt
b: clientPublicKeys[req.user.email].serverBInt
},
async () => {
server.setClientPublicKey(
loginSRPDetailFromDB.clientPublicKey
clientPublicKeys[req.user.email].clientPublicKey
);
// compare server and client shared keys
@ -323,16 +311,16 @@ export const getBackupPrivateKey = async (req: Request, res: Response) => {
backupPrivateKey = await BackupPrivateKey.findOne({
user: req.user._id
}).select('+encryptedPrivateKey +iv +tag');
if (!backupPrivateKey) throw new Error('Failed to find backup private key');
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.setUser({ email: req.user.email});
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get backup private key'
});
}
return res.status(200).send({
backupPrivateKey
});
@ -360,15 +348,15 @@ export const resetPassword = async (req: Request, res: Response) => {
{
new: true
}
);
);
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.setUser({ email: req.user.email});
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get backup private key'
});
});
}
return res.status(200).send({
message: 'Successfully reset password'
});

View File

@ -1,6 +1,6 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import { NODE_ENV, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET, INVITE_ONLY_SIGNUP } from '../../config';
import { NODE_ENV, JWT_SIGNUP_LIFETIME, JWT_SIGNUP_SECRET } from '../../config';
import { User, MembershipOrg } from '../../models';
import { completeAccount } from '../../helpers/user';
import {
@ -11,7 +11,6 @@ import {
import { issueTokens, createToken } from '../../helpers/auth';
import { INVITED, ACCEPTED } from '../../variables';
import axios from 'axios';
import { BadRequestError } from '../../utils/errors';
/**
* Signup step 1: Initialize account for user under email [email] and send a verification code
@ -25,14 +24,6 @@ export const beginEmailSignup = async (req: Request, res: Response) => {
try {
email = req.body.email;
if (INVITE_ONLY_SIGNUP) {
// Only one user can create an account without being invited. The rest need to be invited in order to make an account
const userCount = await User.countDocuments({})
if (userCount != 0) {
throw BadRequestError({ message: "New user sign ups are not allowed at this time. You must be invited to sign up." })
}
}
const user = await User.findOne({ email }).select('+publicKey');
if (user && user?.publicKey) {
// case: user has already completed account
@ -138,7 +129,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
// get user
user = await User.findOne({ email });
if (!user || (user && user?.publicKey)) {
// case 1: user doesn't exist.
// case 2: user has already completed account

View File

@ -1,21 +1,21 @@
import { Request, Response } from "express";
import * as Sentry from "@sentry/node";
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import {
Workspace,
Membership,
MembershipOrg,
Integration,
IntegrationAuth,
IUser,
ServiceToken,
ServiceTokenData,
} from "../../models";
Workspace,
Membership,
MembershipOrg,
Integration,
IntegrationAuth,
IUser,
ServiceToken,
ServiceTokenData
} from '../../models';
import {
createWorkspace as create,
deleteWorkspace as deleteWork,
} from "../../helpers/workspace";
import { addMemberships } from "../../helpers/membership";
import { ADMIN } from "../../variables";
createWorkspace as create,
deleteWorkspace as deleteWork
} from '../../helpers/workspace';
import { addMemberships } from '../../helpers/membership';
import { ADMIN } from '../../variables';
/**
* Return public keys of members of workspace with id [workspaceId]
@ -24,31 +24,32 @@ import { ADMIN } from "../../variables";
* @returns
*/
export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
let publicKeys;
try {
const { workspaceId } = req.params;
let publicKeys;
try {
const { workspaceId } = req.params;
publicKeys = (
await Membership.find({
workspace: workspaceId,
}).populate<{ user: IUser }>("user", "publicKey")
).map((member) => {
return {
publicKey: member.user.publicKey,
userId: member.user._id,
};
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get workspace member public keys",
});
}
publicKeys = (
await Membership.find({
workspace: workspaceId
}).populate<{ user: IUser }>('user', 'publicKey')
)
.map((member) => {
return {
publicKey: member.user.publicKey,
userId: member.user._id
};
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get workspace member public keys'
});
}
return res.status(200).send({
publicKeys,
});
return res.status(200).send({
publicKeys
});
};
/**
@ -58,24 +59,24 @@ export const getWorkspacePublicKeys = async (req: Request, res: Response) => {
* @returns
*/
export const getWorkspaceMemberships = async (req: Request, res: Response) => {
let users;
try {
const { workspaceId } = req.params;
let users;
try {
const { workspaceId } = req.params;
users = await Membership.find({
workspace: workspaceId,
}).populate("user", "+publicKey");
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get workspace members",
});
}
users = await Membership.find({
workspace: workspaceId
}).populate('user', '+publicKey');
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get workspace members'
});
}
return res.status(200).send({
users,
});
return res.status(200).send({
users
});
};
/**
@ -85,24 +86,24 @@ export const getWorkspaceMemberships = async (req: Request, res: Response) => {
* @returns
*/
export const getWorkspaces = async (req: Request, res: Response) => {
let workspaces;
try {
workspaces = (
await Membership.find({
user: req.user._id,
}).populate("workspace")
).map((m) => m.workspace);
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get workspaces",
});
}
let workspaces;
try {
workspaces = (
await Membership.find({
user: req.user._id
}).populate('workspace')
).map((m) => m.workspace);
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get workspaces'
});
}
return res.status(200).send({
workspaces,
});
return res.status(200).send({
workspaces
});
};
/**
@ -112,24 +113,24 @@ export const getWorkspaces = async (req: Request, res: Response) => {
* @returns
*/
export const getWorkspace = async (req: Request, res: Response) => {
let workspace;
try {
const { workspaceId } = req.params;
let workspace;
try {
const { workspaceId } = req.params;
workspace = await Workspace.findOne({
_id: workspaceId,
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get workspace",
});
}
workspace = await Workspace.findOne({
_id: workspaceId
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get workspace'
});
}
return res.status(200).send({
workspace,
});
return res.status(200).send({
workspace
});
};
/**
@ -140,46 +141,46 @@ export const getWorkspace = async (req: Request, res: Response) => {
* @returns
*/
export const createWorkspace = async (req: Request, res: Response) => {
let workspace;
try {
const { workspaceName, organizationId } = req.body;
let workspace;
try {
const { workspaceName, organizationId } = req.body;
// validate organization membership
const membershipOrg = await MembershipOrg.findOne({
user: req.user._id,
organization: organizationId,
});
// validate organization membership
const membershipOrg = await MembershipOrg.findOne({
user: req.user._id,
organization: organizationId
});
if (!membershipOrg) {
throw new Error("Failed to validate organization membership");
}
if (!membershipOrg) {
throw new Error('Failed to validate organization membership');
}
if (workspaceName.length < 1) {
throw new Error("Workspace names must be at least 1-character long");
}
if (workspaceName.length < 1) {
throw new Error('Workspace names must be at least 1-character long');
}
// create workspace and add user as member
workspace = await create({
name: workspaceName,
organizationId,
});
// create workspace and add user as member
workspace = await create({
name: workspaceName,
organizationId
});
await addMemberships({
userIds: [req.user._id],
workspaceId: workspace._id.toString(),
roles: [ADMIN],
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to create workspace",
});
}
await addMemberships({
userIds: [req.user._id],
workspaceId: workspace._id.toString(),
roles: [ADMIN]
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to create workspace'
});
}
return res.status(200).send({
workspace,
});
return res.status(200).send({
workspace
});
};
/**
@ -189,24 +190,24 @@ export const createWorkspace = async (req: Request, res: Response) => {
* @returns
*/
export const deleteWorkspace = async (req: Request, res: Response) => {
try {
const { workspaceId } = req.params;
try {
const { workspaceId } = req.params;
// delete workspace
await deleteWork({
id: workspaceId,
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to delete workspace",
});
}
// delete workspace
await deleteWork({
id: workspaceId
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to delete workspace'
});
}
return res.status(200).send({
message: "Successfully deleted workspace",
});
return res.status(200).send({
message: 'Successfully deleted workspace'
});
};
/**
@ -216,34 +217,34 @@ export const deleteWorkspace = async (req: Request, res: Response) => {
* @returns
*/
export const changeWorkspaceName = async (req: Request, res: Response) => {
let workspace;
try {
const { workspaceId } = req.params;
const { name } = req.body;
let workspace;
try {
const { workspaceId } = req.params;
const { name } = req.body;
workspace = await Workspace.findOneAndUpdate(
{
_id: workspaceId,
},
{
name,
},
{
new: true,
}
);
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to change workspace name",
});
}
workspace = await Workspace.findOneAndUpdate(
{
_id: workspaceId
},
{
name
},
{
new: true
}
);
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to change workspace name'
});
}
return res.status(200).send({
message: "Successfully changed workspace name",
workspace,
});
return res.status(200).send({
message: 'Successfully changed workspace name',
workspace
});
};
/**
@ -253,24 +254,24 @@ export const changeWorkspaceName = async (req: Request, res: Response) => {
* @returns
*/
export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
let integrations;
try {
const { workspaceId } = req.params;
let integrations;
try {
const { workspaceId } = req.params;
integrations = await Integration.find({
workspace: workspaceId,
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get workspace integrations",
});
}
integrations = await Integration.find({
workspace: workspaceId
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get workspace integrations'
});
}
return res.status(200).send({
integrations,
});
return res.status(200).send({
integrations
});
};
/**
@ -280,56 +281,56 @@ export const getWorkspaceIntegrations = async (req: Request, res: Response) => {
* @returns
*/
export const getWorkspaceIntegrationAuthorizations = async (
req: Request,
res: Response
req: Request,
res: Response
) => {
let authorizations;
try {
const { workspaceId } = req.params;
let authorizations;
try {
const { workspaceId } = req.params;
authorizations = await IntegrationAuth.find({
workspace: workspaceId,
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get workspace integration authorizations",
});
}
authorizations = await IntegrationAuth.find({
workspace: workspaceId
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get workspace integration authorizations'
});
}
return res.status(200).send({
authorizations,
});
return res.status(200).send({
authorizations
});
};
/**
* Return service service tokens for workspace [workspaceId] belonging to user
* @param req
* @param res
* @returns
* @param req
* @param res
* @returns
*/
export const getWorkspaceServiceTokens = async (
req: Request,
res: Response
req: Request,
res: Response
) => {
let serviceTokens;
try {
const { workspaceId } = req.params;
// ?? FIX.
serviceTokens = await ServiceToken.find({
user: req.user._id,
workspace: workspaceId,
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: "Failed to get workspace service tokens",
});
}
return res.status(200).send({
serviceTokens,
});
};
let serviceTokens;
try {
const { workspaceId } = req.params;
// ?? FIX.
serviceTokens = await ServiceToken.find({
user: req.user._id,
workspace: workspaceId
});
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to get workspace service tokens'
});
}
return res.status(200).send({
serviceTokens
});
}

View File

@ -11,7 +11,7 @@ import {
import { SecretVersion } from '../../ee/models';
import { BadRequestError } from '../../utils/errors';
import _ from 'lodash';
import { ABILITY_READ, ABILITY_WRITE } from '../../variables/organization';
import { ABILITY_READ } from '../../variables/organization';
/**
* Create new workspace environment named [environmentName] under workspace with id
@ -236,7 +236,7 @@ export const getAllAccessibleEnvironmentsOfWorkspace = async (
throw BadRequestError()
}
const accessibleEnvironments: any = []
const accessibleEnvironments: { name: string; slug: string; }[] = []
const deniedPermission = workspacesUserIsMemberOf.deniedPermissions
const relatedWorkspace = await Workspace.findById(workspaceId)
@ -245,16 +245,11 @@ export const getAllAccessibleEnvironmentsOfWorkspace = async (
}
relatedWorkspace.environments.forEach(environment => {
const isReadBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_READ })
const isWriteBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_WRITE })
if (isReadBlocked && isWriteBlocked) {
// const isWriteBlocked = _.some(deniedPermission, { environmentSlug: environment.slug, ability: ABILITY_WRITE })
if (isReadBlocked) {
return
} else {
accessibleEnvironments.push({
name: environment.name,
slug: environment.slug,
isWriteDenied: isWriteBlocked,
isReadDenied: isReadBlocked
})
accessibleEnvironments.push(environment)
}
})

View File

@ -6,7 +6,6 @@ import * as apiKeyDataController from './apiKeyDataController';
import * as secretController from './secretController';
import * as secretsController from './secretsController';
import * as environmentController from './environmentController';
import * as tagController from './tagController';
export {
usersController,
@ -16,6 +15,5 @@ export {
apiKeyDataController,
secretController,
secretsController,
environmentController,
tagController
environmentController
}

View File

@ -17,9 +17,7 @@ import { EESecretService, EELogService } from '../../ee/services';
import { postHogClient } from '../../services';
import { getChannelFromUserAgent } from '../../utils/posthog';
import { ABILITY_READ, ABILITY_WRITE } from '../../variables/organization';
import { userHasNoAbility, userHasWorkspaceAccess, userHasWriteOnlyAbility } from '../../ee/helpers/checkMembershipPermissions';
import Tag from '../../models/tag';
import _ from 'lodash';
import { userHasWorkspaceAccess } from '../../ee/helpers/checkMembershipPermissions';
/**
* Create secret(s) for workspace with id [workspaceId] and environment [environment]
@ -81,38 +79,24 @@ export const createSecrets = async (req: Request, res: Response) => {
*/
const channel = getChannelFromUserAgent(req.headers['user-agent'])
const { workspaceId, environment }: { workspaceId: string, environment: string } = req.body;
const { workspaceId, environment } = req.body;
const hasAccess = await userHasWorkspaceAccess(req.user, workspaceId, environment, ABILITY_WRITE)
if (!hasAccess) {
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
}
let listOfSecretsToCreate;
let toAdd;
if (Array.isArray(req.body.secrets)) {
// case: create multiple secrets
listOfSecretsToCreate = req.body.secrets;
toAdd = req.body.secrets;
} else if (typeof req.body.secrets === 'object') {
// case: create 1 secret
listOfSecretsToCreate = [req.body.secrets];
toAdd = [req.body.secrets];
}
type secretsToCreateType = {
type: string;
secretKeyCiphertext: string;
secretKeyIV: string;
secretKeyTag: string;
secretValueCiphertext: string;
secretValueIV: string;
secretValueTag: string;
secretCommentCiphertext: string;
secretCommentIV: string;
secretCommentTag: string;
tags: string[]
}
const newlyCreatedSecrets = await Secret.insertMany(
listOfSecretsToCreate.map(({
const newSecrets = await Secret.insertMany(
toAdd.map(({
type,
secretKeyCiphertext,
secretKeyIV,
@ -120,29 +104,27 @@ export const createSecrets = async (req: Request, res: Response) => {
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag,
tags
}: secretsToCreateType) => {
return ({
version: 1,
workspace: new Types.ObjectId(workspaceId),
type,
user: type === SECRET_PERSONAL ? req.user : undefined,
environment,
secretKeyCiphertext,
secretKeyIV,
secretKeyTag,
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag,
tags
});
})
}: {
type: string;
secretKeyCiphertext: string;
secretKeyIV: string;
secretKeyTag: string;
secretValueCiphertext: string;
secretValueIV: string;
secretValueTag: string;
}) => ({
version: 1,
workspace: new Types.ObjectId(workspaceId),
type,
user: type === SECRET_PERSONAL ? req.user : undefined,
environment,
secretKeyCiphertext,
secretKeyIV,
secretKeyTag,
secretValueCiphertext,
secretValueIV,
secretValueTag
}))
);
setTimeout(async () => {
@ -156,7 +138,7 @@ export const createSecrets = async (req: Request, res: Response) => {
// (EE) add secret versions for new secrets
await EESecretService.addSecretVersions({
secretVersions: newlyCreatedSecrets.map(({
secretVersions: newSecrets.map(({
_id,
version,
workspace,
@ -170,11 +152,7 @@ export const createSecrets = async (req: Request, res: Response) => {
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretValueHash,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag,
tags
secretValueHash
}) => ({
_id: new Types.ObjectId(),
secret: _id,
@ -191,25 +169,21 @@ export const createSecrets = async (req: Request, res: Response) => {
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretValueHash,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag,
tags
secretValueHash
}))
});
const addAction = await EELogService.createAction({
const addAction = await EELogService.createActionSecret({
name: ACTION_ADD_SECRETS,
userId: req.user._id,
workspaceId: new Types.ObjectId(workspaceId),
secretIds: newlyCreatedSecrets.map((n) => n._id)
userId: req.user._id.toString(),
workspaceId,
secretIds: newSecrets.map((n) => n._id)
});
// (EE) create (audit) log
addAction && await EELogService.createLog({
userId: req.user._id.toString(),
workspaceId: new Types.ObjectId(workspaceId),
workspaceId,
actions: [addAction],
channel,
ipAddress: req.ip
@ -225,7 +199,7 @@ export const createSecrets = async (req: Request, res: Response) => {
event: 'secrets added',
distinctId: req.user.email,
properties: {
numberOfSecrets: listOfSecretsToCreate.length,
numberOfSecrets: toAdd.length,
environment,
workspaceId,
channel: channel,
@ -235,7 +209,7 @@ export const createSecrets = async (req: Request, res: Response) => {
}
return res.status(200).send({
secrets: newlyCreatedSecrets
secrets: newSecrets
});
}
@ -286,108 +260,6 @@ export const getSecrets = async (req: Request, res: Response) => {
}
}
*/
const { workspaceId, environment, tagSlugs } = req.query;
const tagNamesList = typeof tagSlugs === 'string' && tagSlugs !== '' ? tagSlugs.split(',') : [];
let userId = "" // used for getting personal secrets for user
let userEmail = "" // used for posthog
if (req.user) {
userId = req.user._id;
userEmail = req.user.email;
}
if (req.serviceTokenData) {
userId = req.serviceTokenData.user._id
userEmail = req.serviceTokenData.user.email;
}
// none service token case as service tokens are already scoped to env and project
let hasWriteOnlyAccess
if (!req.serviceTokenData) {
hasWriteOnlyAccess = await userHasWriteOnlyAbility(userId, workspaceId, environment)
const hasNoAccess = await userHasNoAbility(userId, workspaceId, environment)
if (hasNoAccess) {
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
}
}
let secrets: any
let secretQuery: any
if (tagNamesList != undefined && tagNamesList.length != 0) {
const workspaceFromDB = await Tag.find({ workspace: workspaceId })
const tagIds = _.map(tagNamesList, (tagName) => {
const tag = _.find(workspaceFromDB, { slug: tagName });
return tag ? tag.id : null;
});
secretQuery = {
workspace: workspaceId,
environment,
$or: [
{ user: userId },
{ user: { $exists: false } }
],
tags: { $in: tagIds },
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
}
} else {
secretQuery = {
workspace: workspaceId,
environment,
$or: [
{ user: userId },
{ user: { $exists: false } }
],
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
}
}
if (hasWriteOnlyAccess) {
secrets = await Secret.find(secretQuery).select("secretKeyCiphertext secretKeyIV secretKeyTag")
} else {
secrets = await Secret.find(secretQuery).populate("tags")
}
const channel = getChannelFromUserAgent(req.headers['user-agent'])
const readAction = await EELogService.createAction({
name: ACTION_READ_SECRETS,
userId: new Types.ObjectId(userId),
workspaceId: new Types.ObjectId(workspaceId as string),
secretIds: secrets.map((n: any) => n._id)
});
readAction && await EELogService.createLog({
userId: new Types.ObjectId(userId),
workspaceId: new Types.ObjectId(workspaceId as string),
actions: [readAction],
channel,
ipAddress: req.ip
});
if (postHogClient) {
postHogClient.capture({
event: 'secrets pulled',
distinctId: userEmail,
properties: {
numberOfSecrets: secrets.length,
environment,
workspaceId,
channel,
userAgent: req.headers?.['user-agent']
}
});
}
return res.status(200).send({
secrets
});
}
export const getOnlySecretKeys = async (req: Request, res: Response) => {
const { workspaceId, environment } = req.query;
let userId = "" // used for getting personal secrets for user
@ -410,7 +282,7 @@ export const getOnlySecretKeys = async (req: Request, res: Response) => {
}
}
const [err, secretKeys] = await to(Secret.find(
const [err, secrets] = await to(Secret.find(
{
workspace: workspaceId,
environment,
@ -420,22 +292,43 @@ export const getOnlySecretKeys = async (req: Request, res: Response) => {
],
type: { $in: [SECRET_SHARED, SECRET_PERSONAL] }
}
)
.select("secretKeyIV secretKeyTag secretKeyCiphertext")
.then())
).then())
if (err) throw ValidationError({ message: 'Failed to get secrets', stack: err.stack });
// readAction && await EELogService.createLog({
// userId: new Types.ObjectId(userId),
// workspaceId: new Types.ObjectId(workspaceId as string),
// actions: [readAction],
// channel,
// ipAddress: req.ip
// });
const channel = getChannelFromUserAgent(req.headers['user-agent'])
const readAction = await EELogService.createActionSecret({
name: ACTION_READ_SECRETS,
userId: userId,
workspaceId: workspaceId as string,
secretIds: secrets.map((n: any) => n._id)
});
readAction && await EELogService.createLog({
userId: userId,
workspaceId: workspaceId as string,
actions: [readAction],
channel,
ipAddress: req.ip
});
if (postHogClient) {
postHogClient.capture({
event: 'secrets pulled',
distinctId: userEmail,
properties: {
numberOfSecrets: secrets.length,
environment,
workspaceId,
channel,
userAgent: req.headers?.['user-agent']
}
});
}
return res.status(200).send({
secretKeys
secrets
});
}
@ -503,7 +396,6 @@ export const updateSecrets = async (req: Request, res: Response) => {
secretCommentCiphertext: string;
secretCommentIV: string;
secretCommentTag: string;
tags: string[]
}
const updateOperationsToPerform = req.body.secrets.map((secret: PatchSecret) => {
@ -516,8 +408,7 @@ export const updateSecrets = async (req: Request, res: Response) => {
secretValueTag,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag,
tags
secretCommentTag
} = secret;
return ({
@ -533,9 +424,8 @@ export const updateSecrets = async (req: Request, res: Response) => {
secretValueCiphertext,
secretValueIV,
secretValueTag,
tags,
...((
secretCommentCiphertext !== undefined &&
secretCommentCiphertext &&
secretCommentIV &&
secretCommentTag
) ? {
@ -568,7 +458,6 @@ export const updateSecrets = async (req: Request, res: Response) => {
secretCommentCiphertext,
secretCommentIV,
secretCommentTag,
tags
} = secretModificationsBySecretId[secret._id.toString()]
return ({
@ -586,7 +475,6 @@ export const updateSecrets = async (req: Request, res: Response) => {
secretCommentCiphertext: secretCommentCiphertext ? secretCommentCiphertext : secret.secretCommentCiphertext,
secretCommentIV: secretCommentIV ? secretCommentIV : secret.secretCommentIV,
secretCommentTag: secretCommentTag ? secretCommentTag : secret.secretCommentTag,
tags: tags ? tags : secret.tags
});
})
}
@ -615,17 +503,17 @@ export const updateSecrets = async (req: Request, res: Response) => {
});
}, 10000);
const updateAction = await EELogService.createAction({
const updateAction = await EELogService.createActionSecret({
name: ACTION_UPDATE_SECRETS,
userId: req.user._id,
workspaceId: new Types.ObjectId(key),
userId: req.user._id.toString(),
workspaceId: key,
secretIds: workspaceSecretObj[key].map((secret: ISecret) => secret._id)
});
// (EE) create (audit) log
updateAction && await EELogService.createLog({
userId: req.user._id.toString(),
workspaceId: new Types.ObjectId(key),
workspaceId: key,
actions: [updateAction],
channel,
ipAddress: req.ip
@ -741,17 +629,17 @@ export const deleteSecrets = async (req: Request, res: Response) => {
workspaceId: key
})
});
const deleteAction = await EELogService.createAction({
const deleteAction = await EELogService.createActionSecret({
name: ACTION_DELETE_SECRETS,
userId: req.user._id,
workspaceId: new Types.ObjectId(key),
userId: req.user._id.toString(),
workspaceId: key,
secretIds: workspaceSecretObj[key].map((secret: ISecret) => secret._id)
});
// (EE) create (audit) log
deleteAction && await EELogService.createLog({
userId: req.user._id.toString(),
workspaceId: new Types.ObjectId(key),
workspaceId: key,
actions: [deleteAction],
channel,
ipAddress: req.ip

View File

@ -8,8 +8,6 @@ import {
import {
SALT_ROUNDS
} from '../../config';
import { userHasWorkspaceAccess } from '../../ee/helpers/checkMembershipPermissions';
import { ABILITY_READ } from '../../variables/organization';
/**
* Return service token data associated with service token on request
@ -39,11 +37,6 @@ export const createServiceTokenData = async (req: Request, res: Response) => {
expiresIn
} = req.body;
const hasAccess = await userHasWorkspaceAccess(req.user, workspaceId, environment, ABILITY_READ)
if (!hasAccess) {
throw UnauthorizedRequestError({ message: "You do not have the necessary permission(s) perform this action" })
}
const secret = crypto.randomBytes(16).toString('hex');
const secretHash = await bcrypt.hash(secret, SALT_ROUNDS);
@ -107,8 +100,4 @@ export const deleteServiceTokenData = async (req: Request, res: Response) => {
return res.status(200).send({
serviceTokenData
});
}
function UnauthorizedRequestError(arg0: { message: string; }) {
throw new Error('Function not implemented.');
}
}

View File

@ -1,72 +0,0 @@
import { Request, Response } from 'express';
import * as Sentry from '@sentry/node';
import { Types } from 'mongoose';
import {
Membership, Secret,
} from '../../models';
import Tag, { ITag } from '../../models/tag';
import { Builder } from "builder-pattern"
import to from 'await-to-js';
import { BadRequestError, UnauthorizedRequestError } from '../../utils/errors';
import { MongoError } from 'mongodb';
import { userHasWorkspaceAccess } from '../../ee/helpers/checkMembershipPermissions';
export const createWorkspaceTag = async (req: Request, res: Response) => {
const { workspaceId } = req.params
const { name, slug } = req.body
const sanitizedTagToCreate = Builder<ITag>()
.name(name)
.workspace(new Types.ObjectId(workspaceId))
.slug(slug)
.user(new Types.ObjectId(req.user._id))
.build();
const [err, createdTag] = await to(Tag.create(sanitizedTagToCreate))
if (err) {
if ((err as MongoError).code === 11000) {
throw BadRequestError({ message: "Tags must be unique in a workspace" })
}
throw err
}
res.json(createdTag)
}
export const deleteWorkspaceTag = async (req: Request, res: Response) => {
const { tagId } = req.params
const tagFromDB = await Tag.findById(tagId)
if (!tagFromDB) {
throw BadRequestError()
}
// can only delete if the request user is one that belongs to the same workspace as the tag
const membership = await Membership.findOne({
user: req.user,
workspace: tagFromDB.workspace
});
if (!membership) {
UnauthorizedRequestError({ message: 'Failed to validate membership' });
}
const result = await Tag.findByIdAndDelete(tagId);
// remove the tag from secrets
await Secret.updateMany(
{ tags: { $in: [tagId] } },
{ $pull: { tags: tagId } }
);
res.json(result);
}
export const getWorkspaceTags = async (req: Request, res: Response) => {
const { workspaceId } = req.params
const workspaceTags = await Tag.find({ workspace: workspaceId })
return res.json({
workspaceTags
})
}

View File

@ -467,42 +467,4 @@ export const deleteWorkspaceMembership = async (req: Request, res: Response) =>
return res.status(200).send({
membership
});
}
/**
* Change autoCapitilzation Rule of workspace
* @param req
* @param res
* @returns
*/
export const toggleAutoCapitalization = async (req: Request, res: Response) => {
let workspace;
try {
const { workspaceId } = req.params;
const { autoCapitalization } = req.body;
workspace = await Workspace.findOneAndUpdate(
{
_id: workspaceId
},
{
autoCapitalization
},
{
new: true
}
);
} catch (err) {
Sentry.setUser({ email: req.user.email });
Sentry.captureException(err);
return res.status(400).send({
message: 'Failed to change autoCapitalization setting'
});
}
return res.status(200).send({
message: 'Successfully changed autoCapitalization setting',
workspace
});
};
}

View File

@ -15,13 +15,7 @@ export const getSecretSnapshot = async (req: Request, res: Response) => {
secretSnapshot = await SecretSnapshot
.findById(secretSnapshotId)
.populate({
path: 'secretVersions',
populate: {
path: 'tags',
model: 'Tag',
}
});
.populate('secretVersions');
if (!secretSnapshot) throw new Error('Failed to find secret snapshot');

View File

@ -1,40 +1,39 @@
import * as Sentry from '@sentry/node';
import { Types } from 'mongoose';
import { Action } from '../models';
import { SecretVersion, Action } from '../models';
import {
getLatestSecretVersionIds,
getLatestNSecretSecretVersionIds
} from '../helpers/secretVersion';
import {
ACTION_LOGIN,
ACTION_LOGOUT,
ACTION_ADD_SECRETS,
ACTION_READ_SECRETS,
ACTION_DELETE_SECRETS,
ACTION_UPDATE_SECRETS,
} from '../../variables';
import { ACTION_UPDATE_SECRETS } from '../../variables';
/**
* Create an (audit) action for updating secrets
* Create an (audit) action for secrets including
* add, delete, update, and read actions.
* @param {Object} obj
* @param {String} obj.name - name of action
* @param {Types.ObjectId} obj.secretIds - ids of relevant secrets
* @param {ObjectId[]} obj.secretIds - ids of relevant secrets
* @returns {Action} action - new action
*/
const createActionUpdateSecret = async ({
const createActionSecretHelper = async ({
name,
userId,
workspaceId,
secretIds
}: {
name: string;
userId: Types.ObjectId;
workspaceId: Types.ObjectId;
userId: string;
workspaceId: string;
secretIds: Types.ObjectId[];
}) => {
let action;
let latestSecretVersions;
try {
const latestSecretVersions = (await getLatestNSecretSecretVersionIds({
if (name === ACTION_UPDATE_SECRETS) {
// case: action is updating secrets
// -> add old and new secret versions
latestSecretVersions = (await getLatestNSecretSecretVersionIds({
secretIds,
n: 2
}))
@ -42,7 +41,17 @@ const createActionUpdateSecret = async ({
oldSecretVersion: s.versions[0]._id,
newSecretVersion: s.versions[1]._id
}));
} else {
// case: action is adding, deleting, or reading secrets
// -> add new secret versions
latestSecretVersions = (await getLatestSecretVersionIds({
secretIds
}))
.map((s) => ({
newSecretVersion: s.versionId
}));
}
action = await new Action({
name,
user: userId,
@ -55,148 +64,10 @@ const createActionUpdateSecret = async ({
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to create update secret action');
}
return action;
}
/**
* Create an (audit) action for creating, reading, and deleting
* secrets
* @param {Object} obj
* @param {String} obj.name - name of action
* @param {Types.ObjectId} obj.secretIds - ids of relevant secrets
* @returns {Action} action - new action
*/
const createActionSecret = async ({
name,
userId,
workspaceId,
secretIds
}: {
name: string;
userId: Types.ObjectId;
workspaceId: Types.ObjectId;
secretIds: Types.ObjectId[];
}) => {
let action;
try {
// case: action is adding, deleting, or reading secrets
// -> add new secret versions
const latestSecretVersions = (await getLatestSecretVersionIds({
secretIds
}))
.map((s) => ({
newSecretVersion: s.versionId
}));
action = await new Action({
name,
user: userId,
workspace: workspaceId,
payload: {
secretVersions: latestSecretVersions
}
}).save();
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to create action create/read/delete secret action');
}
return action;
}
/**
* Create an (audit) action for user with id [userId]
* @param {Object} obj
* @param {String} obj.name - name of action
* @param {String} obj.userId - id of user associated with action
* @returns
*/
const createActionUser = ({
name,
userId
}: {
name: string;
userId: Types.ObjectId;
}) => {
let action;
try {
action = new Action({
name,
user: userId
}).save();
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to create user action');
}
return action;
}
/**
* Create an (audit) action.
* @param {Object} obj
* @param {Object} obj.name - name of action
* @param {Types.ObjectId} obj.userId - id of user associated with action
* @param {Types.ObjectId} obj.workspaceId - id of workspace associated with action
* @param {Types.ObjectId[]} obj.secretIds - ids of secrets associated with action
*/
const createActionHelper = async ({
name,
userId,
workspaceId,
secretIds,
}: {
name: string;
userId: Types.ObjectId;
workspaceId?: Types.ObjectId;
secretIds?: Types.ObjectId[];
}) => {
let action;
try {
switch (name) {
case ACTION_LOGIN:
case ACTION_LOGOUT:
action = await createActionUser({
name,
userId
});
break;
case ACTION_ADD_SECRETS:
case ACTION_READ_SECRETS:
case ACTION_DELETE_SECRETS:
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
action = await createActionSecret({
name,
userId,
workspaceId,
secretIds
});
break;
case ACTION_UPDATE_SECRETS:
if (!workspaceId || !secretIds) throw new Error('Missing required params workspace id or secret ids to create action secret');
action = await createActionUpdateSecret({
name,
userId,
workspaceId,
secretIds
});
break;
}
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to create action');
}
return action;
}
export {
createActionHelper
};
export { createActionSecretHelper };

View File

@ -1,6 +1,5 @@
import _ from "lodash";
import { Membership } from "../../models";
import { ABILITY_READ, ABILITY_WRITE } from "../../variables/organization";
export const userHasWorkspaceAccess = async (userId: any, workspaceId: any, environment: any, action: any) => {
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
@ -16,39 +15,4 @@ export const userHasWorkspaceAccess = async (userId: any, workspaceId: any, envi
}
return true
}
export const userHasWriteOnlyAbility = async (userId: any, workspaceId: any, environment: any) => {
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
if (!membershipForWorkspace) {
return false
}
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
const isWriteDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_WRITE });
const isReadDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_READ });
// case: you have write only if read is blocked and write is not
if (isReadDisallowed && !isWriteDisallowed) {
return true
}
return false
}
export const userHasNoAbility = async (userId: any, workspaceId: any, environment: any) => {
const membershipForWorkspace = await Membership.findOne({ workspace: workspaceId, user: userId })
if (!membershipForWorkspace) {
return true
}
const deniedMembershipPermissions = membershipForWorkspace.deniedPermissions;
const isWriteDisallowed = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_WRITE });
const isReadBlocked = _.some(deniedMembershipPermissions, { environmentSlug: environment, ability: ABILITY_READ });
if (isReadBlocked && isWriteDisallowed) {
return true
}
return false
}

View File

@ -1,19 +1,9 @@
import * as Sentry from '@sentry/node';
import { Types } from 'mongoose';
import {
Log,
IAction
} from '../models';
/**
* Create an (audit) log
* @param {Object} obj
* @param {Types.ObjectId} obj.userId - id of user associated with the log
* @param {Types.ObjectId} obj.workspaceId - id of workspace associated with the log
* @param {IAction[]} obj.actions - actions to include in log
* @param {String} obj.channel - channel (web/cli/auto) associated with the log
* @param {String} obj.ipAddress - ip address associated with the log
* @returns {Log} log - new audit log
*/
const createLogHelper = async ({
userId,
workspaceId,
@ -21,8 +11,8 @@ const createLogHelper = async ({
channel,
ipAddress
}: {
userId: Types.ObjectId;
workspaceId?: Types.ObjectId;
userId: string;
workspaceId: string;
actions: IAction[];
channel: string;
ipAddress: string;
@ -31,7 +21,7 @@ const createLogHelper = async ({
try {
log = await new Log({
user: userId,
workspace: workspaceId ?? undefined,
workspace: workspaceId,
actionNames: actions.map((a) => a.name),
actions,
channel,

View File

@ -1,18 +1,10 @@
import { Schema, model, Types } from 'mongoose';
import {
ACTION_LOGIN,
ACTION_LOGOUT,
ACTION_ADD_SECRETS,
ACTION_UPDATE_SECRETS,
ACTION_READ_SECRETS,
ACTION_DELETE_SECRETS
} from '../../variables';
export interface IAction {
name: string;
user?: Types.ObjectId,
workspace?: Types.ObjectId,
payload?: {
payload: {
secretVersions?: Types.ObjectId[]
}
}
@ -21,15 +13,7 @@ const actionSchema = new Schema<IAction>(
{
name: {
type: String,
required: true,
enum: [
ACTION_LOGIN,
ACTION_LOGOUT,
ACTION_ADD_SECRETS,
ACTION_UPDATE_SECRETS,
ACTION_READ_SECRETS,
ACTION_DELETE_SECRETS
]
required: true
},
user: {
type: Schema.Types.ObjectId,

View File

@ -1,7 +1,5 @@
import { Schema, model, Types } from 'mongoose';
import {
ACTION_LOGIN,
ACTION_LOGOUT,
ACTION_ADD_SECRETS,
ACTION_UPDATE_SECRETS,
ACTION_READ_SECRETS,
@ -31,8 +29,6 @@ const logSchema = new Schema<ILog>(
actionNames: {
type: [String],
enum: [
ACTION_LOGIN,
ACTION_LOGOUT,
ACTION_ADD_SECRETS,
ACTION_UPDATE_SECRETS,
ACTION_READ_SECRETS,

View File

@ -21,7 +21,6 @@ export interface ISecretVersion {
secretValueIV: string;
secretValueTag: string;
secretValueHash: string;
tags?: string[];
}
const secretVersionSchema = new Schema<ISecretVersion>(
@ -89,12 +88,7 @@ const secretVersionSchema = new Schema<ISecretVersion>(
},
secretValueHash: {
type: String
},
tags: {
ref: 'Tag',
type: [Schema.Types.ObjectId],
default: []
},
}
},
{
timestamps: true

View File

@ -1,12 +1,14 @@
import { Types } from 'mongoose';
import {
Log,
Action,
IAction
} from '../models';
import {
createLogHelper
} from '../helpers/log';
import {
createActionHelper
createActionSecretHelper
} from '../helpers/action';
import EELicenseService from './EELicenseService';
@ -31,8 +33,8 @@ class EELogService {
channel,
ipAddress
}: {
userId: Types.ObjectId;
workspaceId?: Types.ObjectId;
userId: string;
workspaceId: string;
actions: IAction[];
channel: string;
ipAddress: string;
@ -48,26 +50,26 @@ class EELogService {
}
/**
* Create an (audit) action
* Create an (audit) action for secrets including
* add, delete, update, and read actions.
* @param {Object} obj
* @param {String} obj.name - name of action
* @param {Types.ObjectId} obj.userId - id of user associated with the action
* @param {Types.ObjectId} obj.workspaceId - id of workspace associated with the action
* @param {ObjectId[]} obj.secretIds - ids of secrets associated with the action
* @param {ObjectId[]} obj.secretIds - secret ids
* @returns {Action} action - new action
*/
static async createAction({
static async createActionSecret({
name,
userId,
workspaceId,
secretIds
}: {
name: string;
userId: Types.ObjectId;
workspaceId?: Types.ObjectId;
secretIds?: Types.ObjectId[];
userId: string;
workspaceId: string;
secretIds: Types.ObjectId[];
}) {
return await createActionHelper({
if (!EELicenseService.isLicenseValid) return null;
return await createActionSecretHelper({
name,
userId,
workspaceId,

View File

@ -1,4 +1,5 @@
import mongoose from 'mongoose';
import { ISecret, Secret } from '../models';
import { EESecretService } from '../ee/services';
import { getLogger } from '../utils/logger';
@ -15,10 +16,6 @@ const initDatabaseHelper = async ({
}) => {
try {
await mongoose.connect(mongoURL);
// allow empty strings to pass the required validator
mongoose.Schema.Types.String.checkRequired(v => typeof v === 'string');
getLogger("database").info("Database connection established");
await EESecretService.initSecretVersioning();

View File

@ -30,7 +30,6 @@ interface Update {
* @param {String} obj.workspaceId - id of workspace
* @param {String} obj.integration - name of integration
* @param {String} obj.code - code
* @returns {IntegrationAuth} integrationAuth - integration auth after OAuth2 code-token exchange
*/
const handleOAuthExchangeHelper = async ({
workspaceId,
@ -43,6 +42,7 @@ const handleOAuthExchangeHelper = async ({
code: string;
environment: string;
}) => {
let action;
let integrationAuth;
try {
const bot = await Bot.findOne({
@ -94,18 +94,25 @@ const handleOAuthExchangeHelper = async ({
// set integration auth access token
await setIntegrationAuthAccessHelper({
integrationAuthId: integrationAuth._id.toString(),
accessId: null,
accessToken: res.accessToken,
accessExpiresAt: res.accessExpiresAt
});
}
// initialize new integration after exchange
await new Integration({
workspace: workspaceId,
isActive: false,
app: null,
environment,
integration,
integrationAuth: integrationAuth._id
}).save();
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to handle OAuth2 code-token exchange')
}
return integrationAuth;
}
/**
* Sync/push environment variables in workspace with id [workspaceId] to
@ -139,7 +146,7 @@ const syncIntegrationsHelper = async ({
if (!integrationAuth) throw new Error('Failed to find integration auth');
// get integration auth access token
const access = await getIntegrationAuthAccessHelper({
const accessToken = await getIntegrationAuthAccessHelper({
integrationAuthId: integration.integrationAuth.toString()
});
@ -148,8 +155,7 @@ const syncIntegrationsHelper = async ({
integration,
integrationAuth,
secrets,
accessId: access.accessId,
accessToken: access.accessToken
accessToken
});
}
} catch (err) {
@ -205,12 +211,12 @@ const syncIntegrationsHelper = async ({
* @returns {String} accessToken - decrypted access token
*/
const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrationAuthId: string }) => {
let accessId;
let accessToken;
try {
const integrationAuth = await IntegrationAuth
.findById(integrationAuthId)
.select('workspace integration +accessCiphertext +accessIV +accessTag +accessExpiresAt + refreshCiphertext +accessIdCiphertext +accessIdIV +accessIdTag');
.select('workspace integration +accessCiphertext +accessIV +accessTag +accessExpiresAt + refreshCiphertext');
if (!integrationAuth) throw UnauthorizedRequestError({message: 'Failed to locate Integration Authentication credentials'});
@ -234,15 +240,6 @@ const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrati
});
}
}
if (integrationAuth?.accessIdCiphertext && integrationAuth?.accessIdIV && integrationAuth?.accessIdTag) {
accessId = await BotService.decryptSymmetric({
workspaceId: integrationAuth.workspace.toString(),
ciphertext: integrationAuth.accessIdCiphertext as string,
iv: integrationAuth.accessIdIV as string,
tag: integrationAuth.accessIdTag as string
});
}
} catch (err) {
Sentry.setUser(null);
@ -253,10 +250,7 @@ const getIntegrationAuthAccessHelper = async ({ integrationAuthId }: { integrati
throw new Error('Failed to get integration access token');
}
return ({
accessId,
accessToken
});
return accessToken;
}
/**
@ -306,9 +300,9 @@ const setIntegrationAuthRefreshHelper = async ({
}
/**
* Encrypt access token [accessToken] and (optionally) access id [accessId]
* using the bot's copy of the workspace key for workspace belonging to
* integration auth with id [integrationAuthId] and store it along with [accessExpiresAt]
* Encrypt access token [accessToken] using the bot's copy
* of the workspace key for workspace belonging to integration auth
* with id [integrationAuthId] and store it along with [accessExpiresAt]
* @param {Object} obj
* @param {String} obj.integrationAuthId - id of integration auth
* @param {String} obj.accessToken - access token
@ -316,12 +310,10 @@ const setIntegrationAuthRefreshHelper = async ({
*/
const setIntegrationAuthAccessHelper = async ({
integrationAuthId,
accessId,
accessToken,
accessExpiresAt
}: {
integrationAuthId: string;
accessId: string | null;
accessToken: string;
accessExpiresAt: Date | undefined;
}) => {
@ -331,28 +323,17 @@ const setIntegrationAuthAccessHelper = async ({
if (!integrationAuth) throw new Error('Failed to find integration auth');
const encryptedAccessTokenObj = await BotService.encryptSymmetric({
const obj = await BotService.encryptSymmetric({
workspaceId: integrationAuth.workspace.toString(),
plaintext: accessToken
});
let encryptedAccessIdObj;
if (accessId) {
encryptedAccessIdObj = await BotService.encryptSymmetric({
workspaceId: integrationAuth.workspace.toString(),
plaintext: accessId
});
}
integrationAuth = await IntegrationAuth.findOneAndUpdate({
_id: integrationAuthId
}, {
accessIdCiphertext: encryptedAccessIdObj?.ciphertext ?? undefined,
accessIdIV: encryptedAccessIdObj?.iv ?? undefined,
accessIdTag: encryptedAccessIdObj?.tag ?? undefined,
accessCiphertext: encryptedAccessTokenObj.ciphertext,
accessIV: encryptedAccessTokenObj.iv,
accessTag: encryptedAccessTokenObj.tag,
accessCiphertext: obj.ciphertext,
accessIV: obj.iv,
accessTag: obj.tag,
accessExpiresAt
}, {
new: true

View File

@ -3,7 +3,6 @@ import Stripe from 'stripe';
import {
STRIPE_SECRET_KEY,
STRIPE_PRODUCT_STARTER,
STRIPE_PRODUCT_TEAM,
STRIPE_PRODUCT_PRO
} from '../config';
const stripe = new Stripe(STRIPE_SECRET_KEY, {
@ -15,7 +14,6 @@ import { Organization, MembershipOrg } from '../models';
const productToPriceMap = {
starter: STRIPE_PRODUCT_STARTER,
team: STRIPE_PRODUCT_TEAM,
pro: STRIPE_PRODUCT_PRO
};
@ -57,7 +55,7 @@ const createOrganization = async ({
} catch (err) {
Sentry.setUser({ email });
Sentry.captureException(err);
throw new Error(`Failed to create organization [err=${err}]`);
throw new Error('Failed to create organization');
}
return organization;

View File

@ -406,10 +406,10 @@ const v2PushSecrets = async ({
secretIds: toDelete
});
const deleteAction = await EELogService.createAction({
const deleteAction = await EELogService.createActionSecret({
name: ACTION_DELETE_SECRETS,
userId: new Types.ObjectId(userId),
workspaceId: new Types.ObjectId(userId),
userId,
workspaceId,
secretIds: toDelete
});
@ -499,10 +499,10 @@ const v2PushSecrets = async ({
})
});
const updateAction = await EELogService.createAction({
const updateAction = await EELogService.createActionSecret({
name: ACTION_UPDATE_SECRETS,
userId: new Types.ObjectId(userId),
workspaceId: new Types.ObjectId(workspaceId),
userId,
workspaceId,
secretIds: toUpdate.map((u) => u._id)
});
@ -536,10 +536,10 @@ const v2PushSecrets = async ({
})
});
const addAction = await EELogService.createAction({
const addAction = await EELogService.createActionSecret({
name: ACTION_ADD_SECRETS,
userId: new Types.ObjectId(userId),
workspaceId: new Types.ObjectId(workspaceId),
userId,
workspaceId,
secretIds: newSecrets.map((n) => n._id)
});
addAction && actions.push(addAction);
@ -553,8 +553,8 @@ const v2PushSecrets = async ({
// (EE) create (audit) log
if (actions.length > 0) {
await EELogService.createLog({
userId: new Types.ObjectId(userId),
workspaceId: new Types.ObjectId(workspaceId),
userId,
workspaceId,
actions,
channel,
ipAddress
@ -645,16 +645,16 @@ const pullSecrets = async ({
environment
})
const readAction = await EELogService.createAction({
const readAction = await EELogService.createActionSecret({
name: ACTION_READ_SECRETS,
userId: new Types.ObjectId(userId),
workspaceId: new Types.ObjectId(workspaceId),
userId,
workspaceId,
secretIds: secrets.map((n: any) => n._id)
});
readAction && await EELogService.createLog({
userId: new Types.ObjectId(userId),
workspaceId: new Types.ObjectId(workspaceId),
userId,
workspaceId,
actions: [readAction],
channel,
ipAddress

View File

@ -7,7 +7,6 @@ import { createWorkspace } from './workspace';
import { addMemberships } from './membership';
import { OWNER, ADMIN, ACCEPTED } from '../variables';
import { sendMail } from '../helpers/nodemailer';
import { EMAIL_TOKEN_LIFETIME } from '../config';
/**
* Send magic link to verify email to [email]
@ -26,8 +25,7 @@ const sendEmailVerification = async ({ email }: { email: string }) => {
{
email,
token,
createdAt: new Date(),
ttl: Math.floor(+new Date() / 1000) + EMAIL_TOKEN_LIFETIME // time in seconds, i.e unix
createdAt: new Date()
},
{ upsert: true, new: true }
);
@ -64,20 +62,11 @@ const checkEmailVerification = async ({
code: string;
}) => {
try {
const token = await Token.findOne({
const token = await Token.findOneAndDelete({
email,
token: code
});
if (token && Math.floor(Date.now() / 1000) > token.ttl) {
await Token.deleteOne({
email,
token: code
});
throw new Error('Verification token has expired')
}
if (!token) throw new Error('Failed to find email verification token');
} catch (err) {
Sentry.setUser(null);
@ -127,7 +116,7 @@ const initializeDefaultOrg = async ({
roles: [ADMIN]
});
} catch (err) {
throw new Error(`Failed to initialize default organization and workspace [err=${err}]`);
throw new Error('Failed to initialize default organization and workspace');
}
};

View File

@ -1,25 +1,20 @@
import axios from "axios";
import * as Sentry from "@sentry/node";
import { Octokit } from "@octokit/rest";
import { IIntegrationAuth } from "../models";
import axios from 'axios';
import * as Sentry from '@sentry/node';
import { Octokit } from '@octokit/rest';
import { IIntegrationAuth } from '../models';
import {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
} from "../variables";
INTEGRATION_FLYIO_API_URL
} from '../variables';
/**
* Return list of names of apps for integration named [integration]
@ -31,7 +26,7 @@ import {
*/
const getApps = async ({
integrationAuth,
accessToken,
accessToken
}: {
integrationAuth: IIntegrationAuth;
accessToken: string;
@ -45,56 +40,42 @@ const getApps = async ({
let apps: App[];
try {
switch (integrationAuth.integration) {
case INTEGRATION_AZURE_KEY_VAULT:
apps = [];
break;
case INTEGRATION_AWS_PARAMETER_STORE:
apps = [];
break;
case INTEGRATION_AWS_SECRET_MANAGER:
apps = [];
break;
case INTEGRATION_HEROKU:
apps = await getAppsHeroku({
accessToken,
accessToken
});
break;
case INTEGRATION_VERCEL:
apps = await getAppsVercel({
integrationAuth,
accessToken,
accessToken
});
break;
case INTEGRATION_NETLIFY:
apps = await getAppsNetlify({
accessToken,
accessToken
});
break;
case INTEGRATION_GITHUB:
apps = await getAppsGithub({
accessToken,
accessToken
});
break;
case INTEGRATION_RENDER:
apps = await getAppsRender({
accessToken,
accessToken
});
break;
case INTEGRATION_FLYIO:
apps = await getAppsFlyio({
accessToken,
});
break;
case INTEGRATION_CIRCLECI:
apps = await getAppsCircleCI({
accessToken,
accessToken
});
break;
}
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get integration apps");
throw new Error('Failed to get integration apps');
}
return apps;
@ -113,19 +94,19 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
const res = (
await axios.get(`${INTEGRATION_HEROKU_API_URL}/apps`, {
headers: {
Accept: "application/vnd.heroku+json; version=3",
Authorization: `Bearer ${accessToken}`,
},
Accept: 'application/vnd.heroku+json; version=3',
Authorization: `Bearer ${accessToken}`
}
})
).data;
apps = res.map((a: any) => ({
name: a.name,
name: a.name
}));
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get Heroku integration apps");
throw new Error('Failed to get Heroku integration apps');
}
return apps;
@ -138,10 +119,10 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
* @returns {Object[]} apps - names of Vercel apps
* @returns {String} apps.name - name of Vercel app
*/
const getAppsVercel = async ({
const getAppsVercel = async ({
integrationAuth,
accessToken,
}: {
accessToken
}: {
integrationAuth: IIntegrationAuth;
accessToken: string;
}) => {
@ -150,26 +131,23 @@ const getAppsVercel = async ({
const res = (
await axios.get(`${INTEGRATION_VERCEL_API_URL}/v9/projects`, {
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
Authorization: `Bearer ${accessToken}`
},
...(integrationAuth?.teamId
? {
params: {
teamId: integrationAuth.teamId,
},
}
: {}),
...( integrationAuth?.teamId ? {
params: {
teamId: integrationAuth.teamId
}
} : {})
})
).data;
apps = res.projects.map((a: any) => ({
name: a.name,
name: a.name
}));
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get Vercel integration apps");
throw new Error('Failed to get Vercel integration apps');
}
return apps;
@ -182,26 +160,29 @@ const getAppsVercel = async ({
* @returns {Object[]} apps - names of Netlify sites
* @returns {String} apps.name - name of Netlify site
*/
const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
const getAppsNetlify = async ({
accessToken
}: {
accessToken: string;
}) => {
let apps;
try {
const res = (
await axios.get(`${INTEGRATION_NETLIFY_API_URL}/api/v1/sites`, {
headers: {
Authorization: `Bearer ${accessToken}`,
'Accept-Encoding': 'application/json'
Authorization: `Bearer ${accessToken}`
}
})
).data;
apps = res.map((a: any) => ({
name: a.name,
appId: a.site_id,
appId: a.site_id
}));
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get Netlify integration apps");
throw new Error('Failed to get Netlify integration apps');
}
return apps;
@ -214,32 +195,35 @@ const getAppsNetlify = async ({ accessToken }: { accessToken: string }) => {
* @returns {Object[]} apps - names of Netlify sites
* @returns {String} apps.name - name of Netlify site
*/
const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
const getAppsGithub = async ({
accessToken
}: {
accessToken: string;
}) => {
let apps;
try {
const octokit = new Octokit({
auth: accessToken,
auth: accessToken
});
const repos = (
await octokit.request(
"GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}",
{
per_page: 100,
}
)
).data;
const repos = (await octokit.request(
'GET /user/repos{?visibility,affiliation,type,sort,direction,per_page,page,since,before}',
{
per_page: 100
}
)).data;
apps = repos
.filter((a: any) => a.permissions.admin === true)
.filter((a:any) => a.permissions.admin === true)
.map((a: any) => ({
name: a.name,
owner: a.owner.login,
}));
name: a.name,
owner: a.owner.login
})
);
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get Github repos");
throw new Error('Failed to get Github repos');
}
return apps;
@ -253,16 +237,18 @@ const getAppsGithub = async ({ accessToken }: { accessToken: string }) => {
* @returns {String} apps.name - name of Render service
* @returns {String} apps.appId - id of Render service
*/
const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
const getAppsRender = async ({
accessToken
}: {
accessToken: string;
}) => {
let apps: any;
try {
const res = (
await axios.get(`${INTEGRATION_RENDER_API_URL}/v1/services`, {
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
'Accept-Encoding': 'application/json',
},
Authorization: `Bearer ${accessToken}`
}
})
).data;
@ -271,15 +257,14 @@ const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
name: a.service.name,
appId: a.service.id
}));
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get Render services");
throw new Error('Failed to get Render services');
}
return apps;
};
}
/**
* Return list of apps for Fly.io integration
@ -288,7 +273,11 @@ const getAppsRender = async ({ accessToken }: { accessToken: string }) => {
* @returns {Object[]} apps - names and ids of Fly.io apps
* @returns {String} apps.name - name of Fly.io apps
*/
const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
const getAppsFlyio = async ({
accessToken
}: {
accessToken: string;
}) => {
let apps;
try {
const query = `
@ -302,71 +291,32 @@ const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
}
}
`;
const res = (
await axios({
url: INTEGRATION_FLYIO_API_URL,
method: "post",
headers: {
Authorization: "Bearer " + accessToken,
'Accept': 'application/json',
'Accept-Encoding': 'application/json',
},
data: {
query,
variables: {
role: null,
},
},
})
).data.data.apps.nodes;
apps = res.map((a: any) => ({
name: a.name,
}));
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get Fly.io apps");
}
return apps;
};
/**
* Return list of projects for CircleCI integration
* @param {Object} obj
* @param {String} obj.accessToken - access token for CircleCI API
* @returns {Object[]} apps -
* @returns {String} apps.name - name of CircleCI apps
*/
const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
let apps: any;
try {
const res = (
await axios.get(
`${INTEGRATION_CIRCLECI_API_URL}/v1.1/projects`,
{
headers: {
"Circle-Token": accessToken,
"Accept-Encoding": "application/json",
},
const res = (await axios({
url: INTEGRATION_FLYIO_API_URL,
method: 'post',
headers: {
'Authorization': 'Bearer ' + accessToken
},
data: {
query,
variables: {
role: null
}
)
).data
apps = res?.map((a: any) => {
return {
name: a?.reponame
}
});
})).data.data.apps.nodes;
apps = res
.map((a: any) => ({
name: a.name
}));
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error("Failed to get CircleCI projects");
throw new Error('Failed to get Fly.io apps');
}
return apps;
};
}
export { getApps };

View File

@ -1,12 +1,10 @@
import axios from 'axios';
import * as Sentry from '@sentry/node';
import {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_AZURE_TOKEN_URL,
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
@ -14,27 +12,15 @@ import {
} from '../variables';
import {
SITE_URL,
CLIENT_ID_AZURE,
CLIENT_ID_VERCEL,
CLIENT_ID_NETLIFY,
CLIENT_ID_GITHUB,
CLIENT_SECRET_AZURE,
CLIENT_SECRET_HEROKU,
CLIENT_SECRET_VERCEL,
CLIENT_SECRET_NETLIFY,
CLIENT_SECRET_GITHUB
} from '../config';
interface ExchangeCodeAzureResponse {
token_type: string;
scope: string;
expires_in: number;
ext_expires_in: number;
access_token: string;
refresh_token: string;
id_token: string;
}
interface ExchangeCodeHerokuResponse {
token_type: string;
access_token: string;
@ -89,11 +75,6 @@ const exchangeCode = async ({
try {
switch (integration) {
case INTEGRATION_AZURE_KEY_VAULT:
obj = await exchangeCodeAzure({
code
});
break;
case INTEGRATION_HEROKU:
obj = await exchangeCodeHeroku({
code
@ -124,46 +105,6 @@ const exchangeCode = async ({
return obj;
};
/**
* Return [accessToken] for Azure OAuth2 code-token exchange
* @param param0
*/
const exchangeCodeAzure = async ({
code
}: {
code: string;
}) => {
const accessExpiresAt = new Date();
let res: ExchangeCodeAzureResponse;
try {
res = (await axios.post(
INTEGRATION_AZURE_TOKEN_URL,
new URLSearchParams({
grant_type: 'authorization_code',
code: code,
scope: 'https://vault.azure.net/.default openid offline_access',
client_id: CLIENT_ID_AZURE,
client_secret: CLIENT_SECRET_AZURE,
redirect_uri: `${SITE_URL}/integrations/azure-key-vault/oauth2/callback`
} as any)
)).data;
accessExpiresAt.setSeconds(
accessExpiresAt.getSeconds() + res.expires_in
);
} catch (err: any) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed OAuth2 code-token exchange with Azure');
}
return ({
accessToken: res.access_token,
refreshToken: res.refresh_token,
accessExpiresAt
});
}
/**
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Heroku
* OAuth2 code-token exchange
@ -227,7 +168,7 @@ const exchangeCodeVercel = async ({ code }: { code: string }) => {
code: code,
client_id: CLIENT_ID_VERCEL,
client_secret: CLIENT_SECRET_VERCEL,
redirect_uri: `${SITE_URL}/integrations/vercel/oauth2/callback`
redirect_uri: `${SITE_URL}/vercel`
} as any)
)
).data;
@ -267,7 +208,7 @@ const exchangeCodeNetlify = async ({ code }: { code: string }) => {
code: code,
client_id: CLIENT_ID_NETLIFY,
client_secret: CLIENT_SECRET_NETLIFY,
redirect_uri: `${SITE_URL}/integrations/netlify/oauth2/callback`
redirect_uri: `${SITE_URL}/netlify`
} as any)
)
).data;
@ -319,11 +260,10 @@ const exchangeCodeGithub = async ({ code }: { code: string }) => {
client_id: CLIENT_ID_GITHUB,
client_secret: CLIENT_SECRET_GITHUB,
code: code,
redirect_uri: `${SITE_URL}/integrations/github/oauth2/callback`
redirect_uri: `${SITE_URL}/github`
},
headers: {
'Accept': 'application/json',
'Accept-Encoding': 'application/json'
Accept: 'application/json'
}
})
).data;

View File

@ -1,26 +1,13 @@
import axios from 'axios';
import * as Sentry from '@sentry/node';
import { INTEGRATION_AZURE_KEY_VAULT, INTEGRATION_HEROKU } from '../variables';
import { INTEGRATION_HEROKU } from '../variables';
import {
SITE_URL,
CLIENT_ID_AZURE,
CLIENT_SECRET_AZURE,
CLIENT_SECRET_HEROKU
CLIENT_SECRET_HEROKU
} from '../config';
import {
INTEGRATION_AZURE_TOKEN_URL,
INTEGRATION_HEROKU_TOKEN_URL
INTEGRATION_HEROKU_TOKEN_URL
} from '../variables';
interface RefreshTokenAzureResponse {
token_type: string;
scope: string;
expires_in: number;
ext_expires_in: 4871;
access_token: string;
refresh_token: string;
}
/**
* Return new access token by exchanging refresh token [refreshToken] for integration
* named [integration]
@ -38,11 +25,6 @@ const exchangeRefresh = async ({
let accessToken;
try {
switch (integration) {
case INTEGRATION_AZURE_KEY_VAULT:
accessToken = await exchangeRefreshAzure({
refreshToken
});
break;
case INTEGRATION_HEROKU:
accessToken = await exchangeRefreshHeroku({
refreshToken
@ -58,38 +40,6 @@ const exchangeRefresh = async ({
return accessToken;
};
/**
* Return new access token by exchanging refresh token [refreshToken] for the
* Azure integration
* @param {Object} obj
* @param {String} obj.refreshToken - refresh token to use to get new access token for Azure
* @returns
*/
const exchangeRefreshAzure = async ({
refreshToken
}: {
refreshToken: string;
}) => {
try {
const res: RefreshTokenAzureResponse = (await axios.post(
INTEGRATION_AZURE_TOKEN_URL,
new URLSearchParams({
client_id: CLIENT_ID_AZURE,
scope: 'openid offline_access',
refresh_token: refreshToken,
grant_type: 'refresh_token',
client_secret: CLIENT_SECRET_AZURE
} as any)
)).data;
return res.access_token;
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to get refresh OAuth2 access token for Azure');
}
}
/**
* Return new access token by exchanging refresh token [refreshToken] for the
* Heroku integration
@ -102,23 +52,23 @@ const exchangeRefreshHeroku = async ({
}: {
refreshToken: string;
}) => {
let accessToken;
try {
const res = await axios.post(
INTEGRATION_HEROKU_TOKEN_URL,
new URLSearchParams({
grant_type: 'refresh_token',
refresh_token: refreshToken,
client_secret: CLIENT_SECRET_HEROKU
} as any)
);
let accessToken;
//TODO: Refactor code to take advantage of using RequestError. It's possible to create new types of errors for more detailed errors
try {
const res = await axios.post(
INTEGRATION_HEROKU_TOKEN_URL,
new URLSearchParams({
grant_type: 'refresh_token',
refresh_token: refreshToken,
client_secret: CLIENT_SECRET_HEROKU
} as any)
);
accessToken = res.data.access_token;
} catch (err) {
Sentry.setUser(null);
Sentry.captureException(err);
throw new Error('Failed to refresh OAuth2 access token for Heroku');
throw new Error('Failed to get new OAuth2 access token for Heroku');
}
return accessToken;

File diff suppressed because it is too large Load Diff

View File

@ -45,10 +45,9 @@ const requireIntegrationAuthorizationAuth = ({
req.integrationAuth = integrationAuth;
if (attachAccessToken) {
const access = await IntegrationService.getIntegrationAuthAccess({
req.accessToken = await IntegrationService.getIntegrationAuthAccess({
integrationAuthId: integrationAuth._id.toString()
});
req.accessToken = access.accessToken;
}
return next();

View File

@ -16,7 +16,6 @@ import UserAction, { IUserAction } from './userAction';
import Workspace, { IWorkspace } from './workspace';
import ServiceTokenData, { IServiceTokenData } from './serviceTokenData';
import APIKeyData, { IAPIKeyData } from './apiKeyData';
import LoginSRPDetail, { ILoginSRPDetail } from './loginSRPDetail';
export {
BackupPrivateKey,
@ -54,7 +53,5 @@ export {
ServiceTokenData,
IServiceTokenData,
APIKeyData,
IAPIKeyData,
LoginSRPDetail,
ILoginSRPDetail
IAPIKeyData
};

View File

@ -1,16 +1,12 @@
import { Schema, model, Types } from "mongoose";
import { Schema, model, Types } from 'mongoose';
import {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
} from "../variables";
INTEGRATION_FLYIO
} from '../variables';
export interface IIntegration {
_id: Types.ObjectId;
@ -21,96 +17,68 @@ export interface IIntegration {
owner: string;
targetEnvironment: string;
appId: string;
path: string;
region: string;
integration:
| 'azure-key-vault'
| 'aws-parameter-store'
| 'aws-secret-manager'
| 'heroku'
| 'vercel'
| 'netlify'
| 'github'
| 'render'
| 'flyio'
| 'circleci';
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio';
integrationAuth: Types.ObjectId;
}
const integrationSchema = new Schema<IIntegration>(
{
workspace: {
type: Schema.Types.ObjectId,
ref: "Workspace",
required: true,
type: Schema.Types.ObjectId,
ref: 'Workspace',
required: true
},
environment: {
type: String,
required: true,
required: true
},
isActive: {
type: Boolean,
required: true,
required: true
},
app: {
// name of app in provider
type: String,
default: null,
default: null
},
appId: {
// (new)
appId: { // (new)
// id of app in provider
type: String,
default: null,
},
targetEnvironment: {
// (new)
// target environment
type: String,
default: null,
},
owner: {
// github-specific repo owner-login
type: String,
default: null,
},
path: {
// aws-parameter-store-specific path
type: String,
default: null
},
region: {
// aws-parameter-store-specific path
targetEnvironment: { // (new)
// target environment
type: String,
default: null
},
owner: {
// github-specific repo owner-login
type: String,
default: null
},
integration: {
type: String,
enum: [
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_FLYIO
],
required: true,
required: true
},
integrationAuth: {
type: Schema.Types.ObjectId,
ref: "IntegrationAuth",
required: true,
},
ref: 'IntegrationAuth',
required: true
}
},
{
timestamps: true,
timestamps: true
}
);
const Integration = model<IIntegration>("Integration", integrationSchema);
const Integration = model<IIntegration>('Integration', integrationSchema);
export default Integration;

View File

@ -1,29 +1,20 @@
import { Schema, model, Types } from "mongoose";
import { Schema, model, Types } from 'mongoose';
import {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
} from "../variables";
INTEGRATION_GITHUB
} from '../variables';
export interface IIntegrationAuth {
_id: Types.ObjectId;
workspace: Types.ObjectId;
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio' | 'azure-key-vault' | 'circleci' | 'aws-parameter-store' | 'aws-secret-manager';
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'render' | 'flyio';
teamId: string;
accountId: string;
refreshCiphertext?: string;
refreshIV?: string;
refreshTag?: string;
accessIdCiphertext?: string; // new
accessIdIV?: string; // new
accessIdTag?: string; // new
accessCiphertext?: string;
accessIV?: string;
accessTag?: string;
@ -33,82 +24,64 @@ export interface IIntegrationAuth {
const integrationAuthSchema = new Schema<IIntegrationAuth>(
{
workspace: {
type: Schema.Types.ObjectId,
ref: "Workspace",
required: true,
type: Schema.Types.ObjectId,
ref: 'Workspace',
required: true
},
integration: {
type: String,
enum: [
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_GITHUB
],
required: true,
required: true
},
teamId: {
// vercel-specific integration param
type: String,
type: String
},
accountId: {
// netlify-specific integration param
type: String,
type: String
},
refreshCiphertext: {
type: String,
select: false,
select: false
},
refreshIV: {
type: String,
select: false,
select: false
},
refreshTag: {
type: String,
select: false,
},
accessIdCiphertext: {
type: String,
select: false
},
accessIdIV: {
type: String,
select: false
},
accessIdTag: {
type: String,
select: false
},
accessCiphertext: {
type: String,
select: false,
select: false
},
accessIV: {
type: String,
select: false,
select: false
},
accessTag: {
type: String,
select: false,
select: false
},
accessExpiresAt: {
type: Date,
select: false,
},
select: false
}
},
{
timestamps: true,
timestamps: true
}
);
const IntegrationAuth = model<IIntegrationAuth>(
"IntegrationAuth",
'IntegrationAuth',
integrationAuthSchema
);

View File

@ -1,29 +0,0 @@
import mongoose, { Schema, model, Types } from 'mongoose';
export interface ILoginSRPDetail {
_id: Types.ObjectId;
clientPublicKey: string;
email: string;
serverBInt: mongoose.Schema.Types.Buffer;
expireAt: Date;
}
const loginSRPDetailSchema = new Schema<ILoginSRPDetail>(
{
clientPublicKey: {
type: String,
required: true
},
email: {
type: String,
required: true,
unique: true
},
serverBInt: { type: mongoose.Schema.Types.Buffer },
expireAt: { type: Date }
}
);
const LoginSRPDetail = model('LoginSRPDetail', loginSRPDetailSchema);
export default LoginSRPDetail;

View File

@ -23,7 +23,6 @@ export interface ISecret {
secretCommentIV?: string;
secretCommentTag?: string;
secretCommentHash?: string;
tags?: string[];
}
const secretSchema = new Schema<ISecret>(
@ -48,11 +47,6 @@ const secretSchema = new Schema<ISecret>(
type: Schema.Types.ObjectId,
ref: 'User'
},
tags: {
ref: 'Tag',
type: [Schema.Types.ObjectId],
default: []
},
environment: {
type: String,
required: true
@ -109,9 +103,6 @@ const secretSchema = new Schema<ISecret>(
}
);
secretSchema.index({ tags: 1 }, { background: true })
const Secret = model<ISecret>('Secret', secretSchema);
export default Secret;

View File

@ -1,83 +0,0 @@
import mongoose, { Schema, model } from 'mongoose';
import Secret, { ISecret } from './secret';
interface ISecretApprovalRequest {
secret: mongoose.Types.ObjectId;
requestedChanges: ISecret;
requestedBy: mongoose.Types.ObjectId;
approvers: IApprover[];
status: ApprovalStatus;
timestamp: Date;
requestType: RequestType;
requestId: string;
}
interface IApprover {
userId: mongoose.Types.ObjectId;
status: ApprovalStatus;
}
export enum ApprovalStatus {
PENDING = 'pending',
APPROVED = 'approved',
REJECTED = 'rejected'
}
export enum RequestType {
UPDATE = 'update',
DELETE = 'delete',
CREATE = 'create'
}
const approverSchema = new mongoose.Schema({
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true
},
status: {
type: String,
enum: [ApprovalStatus],
default: ApprovalStatus.PENDING
}
});
const secretApprovalRequestSchema = new Schema<ISecretApprovalRequest>(
{
secret: {
type: mongoose.Schema.Types.ObjectId,
ref: 'Secret'
},
requestedChanges: Secret,
requestedBy: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User'
},
approvers: [approverSchema],
status: {
type: String,
enum: ApprovalStatus,
default: ApprovalStatus.PENDING
},
timestamp: {
type: Date,
default: Date.now
},
requestType: {
type: String,
enum: RequestType,
required: true
},
requestId: {
type: String,
required: false
}
},
{
timestamps: true
}
);
const SecretApprovalRequest = model<ISecretApprovalRequest>('SecretApprovalRequest', secretApprovalRequestSchema);
export default SecretApprovalRequest;

View File

@ -1,49 +0,0 @@
import { Schema, model, Types } from 'mongoose';
export interface ITag {
_id: Types.ObjectId;
name: string;
slug: string;
user: Types.ObjectId;
workspace: Types.ObjectId;
}
const tagSchema = new Schema<ITag>(
{
name: {
type: String,
required: true,
trim: true,
},
slug: {
type: String,
required: true,
trim: true,
lowercase: true,
validate: [
function (value: any) {
return value.indexOf(' ') === -1;
},
'slug cannot contain spaces'
]
},
user: {
type: Schema.Types.ObjectId,
ref: 'User'
},
workspace: {
type: Schema.Types.ObjectId,
ref: 'Workspace'
},
},
{
timestamps: true
}
);
tagSchema.index({ slug: 1, workspace: 1 }, { unique: true })
tagSchema.index({ workspace: 1 })
const Tag = model<ITag>('Tag', tagSchema);
export default Tag;

View File

@ -5,7 +5,6 @@ export interface IToken {
email: string;
token: string;
createdAt: Date;
ttl: Number;
}
const tokenSchema = new Schema<IToken>({
@ -20,13 +19,14 @@ const tokenSchema = new Schema<IToken>({
createdAt: {
type: Date,
default: Date.now
},
ttl: {
type: Number,
}
});
tokenSchema.index({ email: 1 });
tokenSchema.index({
createdAt: 1
}, {
expireAfterSeconds: parseInt(EMAIL_TOKEN_LIFETIME)
});
const Token = model<IToken>('Token', tokenSchema);

View File

@ -12,7 +12,6 @@ export interface IUser {
salt?: string;
verifier?: string;
refreshVersion?: number;
seenIps: [string];
}
const userSchema = new Schema<IUser>(
@ -55,8 +54,7 @@ const userSchema = new Schema<IUser>(
type: Number,
default: 0,
select: false
},
seenIps: [String]
}
},
{
timestamps: true

View File

@ -8,7 +8,6 @@ export interface IWorkspace {
name: string;
slug: string;
}>;
autoCapitalization: boolean;
}
const workspaceSchema = new Schema<IWorkspace>({
@ -16,10 +15,6 @@ const workspaceSchema = new Schema<IWorkspace>({
type: String,
required: true
},
autoCapitalization: {
type: Boolean,
default: true,
},
organization: {
type: Schema.Types.ObjectId,
ref: 'Organization',

View File

@ -10,7 +10,7 @@ import { ADMIN, MEMBER } from '../../variables';
import { body, param } from 'express-validator';
import { integrationController } from '../../controllers/v1';
router.post( // new: add new integration for integration auth
router.post( // new: add new integration
'/',
requireAuth({
acceptedAuthModes: ['jwt', 'apiKey']
@ -19,15 +19,7 @@ router.post( // new: add new integration for integration auth
acceptedRoles: [ADMIN, MEMBER],
location: 'body'
}),
body('integrationAuthId').exists().isString().trim(),
body('app').trim(),
body('isActive').exists().isBoolean(),
body('appId').trim(),
body('sourceEnvironment').trim(),
body('targetEnvironment').trim(),
body('owner').trim(),
body('path').trim(),
body('region').trim(),
body('integrationAuthId').exists().trim(),
validateRequest,
integrationController.createIntegration
);

View File

@ -18,19 +18,6 @@ router.get(
integrationAuthController.getIntegrationOptions
);
router.get(
'/:integrationAuthId',
requireAuth({
acceptedAuthModes: ['jwt']
}),
requireIntegrationAuthorizationAuth({
acceptedRoles: [ADMIN, MEMBER]
}),
param('integrationAuthId'),
validateRequest,
integrationAuthController.getIntegrationAuth
);
router.post(
'/oauth-token',
requireAuth({
@ -57,7 +44,6 @@ router.post(
location: 'body'
}),
body('workspaceId').exists().trim().notEmpty(),
body('accessId').trim(),
body('accessToken').exists().trim().notEmpty(),
body('integration').exists().trim().notEmpty(),
validateRequest,

View File

@ -156,19 +156,4 @@ router.get(
organizationController.getOrganizationSubscriptions
);
router.get(
'/:organizationId/workspace-memberships',
requireAuth({
acceptedAuthModes: ['jwt']
}),
requireOrganizationAuth({
acceptedRoles: [OWNER, ADMIN, MEMBER],
acceptedStatuses: [ACCEPTED]
}),
param('organizationId').exists().trim(),
validateRequest,
organizationController.getOrganizationMembersAndTheirWorkspaces
);
export default router;

View File

@ -6,7 +6,6 @@ import secrets from './secrets';
import serviceTokenData from './serviceTokenData';
import apiKeyData from './apiKeyData';
import environment from "./environment"
import tags from "./tags"
export {
users,
@ -16,6 +15,5 @@ export {
secrets,
serviceTokenData,
apiKeyData,
environment,
tags
environment
}

View File

@ -30,7 +30,7 @@ router.patch(
'/:organizationId/memberships/:membershipId',
param('organizationId').exists().trim(),
param('membershipId').exists().trim(),
body('role').exists().isString().trim().isIn([OWNER, ADMIN, MEMBER]),
body('role').exists().isString().trim().isIn([ADMIN, MEMBER]),
validateRequest,
requireAuth({
acceptedAuthModes: ['jwt', 'apiKey']

View File

@ -32,7 +32,7 @@ router.post(
!secret.secretKeyCiphertext ||
!secret.secretKeyIV ||
!secret.secretKeyTag ||
(typeof secret.secretValueCiphertext !== 'string') ||
!secret.secretValueCiphertext ||
!secret.secretValueIV ||
!secret.secretValueTag
) {
@ -74,7 +74,6 @@ router.get(
'/',
query('workspaceId').exists().trim(),
query('environment').exists().trim(),
query('tagSlugs'),
validateRequest,
requireAuth({
acceptedAuthModes: ['jwt', 'apiKey', 'serviceToken']

View File

@ -1,50 +0,0 @@
import express, { Response, Request } from 'express';
const router = express.Router();
import { body, param } from 'express-validator';
import { tagController } from '../../controllers/v2';
import {
requireAuth,
requireWorkspaceAuth,
validateRequest,
} from '../../middleware';
import { ADMIN, MEMBER } from '../../variables';
router.get(
'/:workspaceId/tags',
requireAuth({
acceptedAuthModes: ['jwt'],
}),
requireWorkspaceAuth({
acceptedRoles: [MEMBER, ADMIN],
}),
param('workspaceId').exists().trim(),
validateRequest,
tagController.getWorkspaceTags
);
router.delete(
'/tags/:tagId',
requireAuth({
acceptedAuthModes: ['jwt'],
}),
param('tagId').exists().trim(),
validateRequest,
tagController.deleteWorkspaceTag
);
router.post(
'/:workspaceId/tags',
requireAuth({
acceptedAuthModes: ['jwt'],
}),
requireWorkspaceAuth({
acceptedRoles: [MEMBER, ADMIN],
}),
param('workspaceId').exists().trim(),
body('name').exists().trim(),
body('slug').exists().trim(),
validateRequest,
tagController.createWorkspaceTag
);
export default router;

View File

@ -118,19 +118,4 @@ router.delete( // TODO - rewire dashboard to this route
workspaceController.deleteWorkspaceMembership
);
router.patch(
'/:workspaceId/auto-capitalization',
requireAuth({
acceptedAuthModes: ['jwt']
}),
requireWorkspaceAuth({
acceptedRoles: [ADMIN, MEMBER]
}),
param('workspaceId').exists().trim(),
body('autoCapitalization').exists().trim().notEmpty(),
validateRequest,
workspaceController.toggleAutoCapitalization
);
export default router;

View File

@ -1,3 +1,7 @@
import * as Sentry from '@sentry/node';
import {
Integration
} from '../models';
import {
handleOAuthExchangeHelper,
syncIntegrationsHelper,
@ -6,6 +10,7 @@ import {
setIntegrationAuthRefreshHelper,
setIntegrationAuthAccessHelper,
} from '../helpers/integration';
import { exchangeCode } from '../integrations';
// should sync stuff be here too? Probably.
// TODO: move bot functions to IntegrationService.
@ -21,12 +26,11 @@ class IntegrationService {
* - Store integration access and refresh tokens returned from the OAuth2 code-token exchange
* - Add placeholder inactive integration
* - Create bot sequence for integration
* @param {Object} obj1
* @param {String} obj1.workspaceId - id of workspace
* @param {String} obj1.environment - workspace environment
* @param {String} obj1.integration - name of integration
* @param {String} obj1.code - code
* @returns {IntegrationAuth} integrationAuth - integration authorization after OAuth2 code-token exchange
* @param {Object} obj
* @param {String} obj.workspaceId - id of workspace
* @param {String} obj.environment - workspace environment
* @param {String} obj.integration - name of integration
* @param {String} obj.code - code
*/
static async handleOAuthExchange({
workspaceId,
@ -39,7 +43,7 @@ class IntegrationService {
code: string;
environment: string;
}) {
return await handleOAuthExchangeHelper({
await handleOAuthExchangeHelper({
workspaceId,
integration,
code,
@ -112,30 +116,26 @@ class IntegrationService {
}
/**
* Encrypt access token [accessToken] and (optionally) access id using the
* bot's copy of the workspace key for workspace belonging to integration auth
* Encrypt access token [accessToken] using the bot's copy
* of the workspace key for workspace belonging to integration auth
* with id [integrationAuthId]
* @param {Object} obj
* @param {String} obj.integrationAuthId - id of integration auth
* @param {String} obj.accessId - access id
* @param {String} obj.accessToken - access token
* @param {Date} obj.accessExpiresAt - expiration date of access token
* @returns {IntegrationAuth} - updated integration auth
*/
static async setIntegrationAuthAccess({
integrationAuthId,
accessId,
accessToken,
accessExpiresAt
}: {
integrationAuthId: string;
accessId: string | null;
accessToken: string;
accessExpiresAt: Date | undefined;
}) {
return await setIntegrationAuthAccessHelper({
integrationAuthId,
accessId,
accessToken,
accessExpiresAt
});

View File

@ -6,9 +6,10 @@
<title>Email Verification</title>
</head>
<body>
<h2>Infisical</h2>
<h2>Confirm your email address</h2>
<p>Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.</p>
<h1>{{code}}</h1>
<h2>{{code}}</h2>
<p>Questions about setting up Infisical? Email us at support@infisical.com</p>
</body>
</html>

View File

@ -7,10 +7,12 @@
<title>Organization Invitation</title>
</head>
<body>
<h2>Join your organization on Infisical</h2>
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
<h2>Infisical</h2>
<h2>Join your team on Infisical</h2>
<p>{{inviterFirstName}}({{inviterEmail}}) has invited you to their Infisical organization — {{organizationName}}</p>
<a href="{{callback_url}}?token={{token}}&to={{email}}">Join now</a>
<h3>What is Infisical?</h3>
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
<p>Infisical is a simple end-to-end encrypted solution that enables teams to sync and manage their environment
variables.</p>
</body>
</html>

View File

@ -6,6 +6,7 @@
<title>Account Recovery</title>
</head>
<body>
<h2>Infisical</h2>
<h2>Reset your password</h2>
<p>Someone requested a password reset.</p>
<a href="{{callback_url}}?token={{token}}&to={{email}}">Reset password</a>

View File

@ -6,10 +6,11 @@
<title>Project Invitation</title>
</head>
<body>
<h2>Infisical</h2>
<h2>Join your team on Infisical</h2>
<p>{{inviterFirstName}} ({{inviterEmail}}) has invited you to their Infisical project{{workspaceName}}</p>
<p>{{inviterFirstName}}({{inviterEmail}}) has invited you to their Infisical workspace{{workspaceName}}</p>
<a href="{{callback_url}}">Join now</a>
<h3>What is Infisical?</h3>
<p>Infisical is an easy-to-use end-to-end encrypted tool that enables developers to sync and manage their secrets and configs.</p>
<p>Infisical is a simple end-to-end encrypted solution that enables teams to sync and manage their environment variables.</p>
</body>
</html>

View File

@ -1,13 +1,9 @@
const ACTION_LOGIN = 'login';
const ACTION_LOGOUT = 'logout';
const ACTION_ADD_SECRETS = 'addSecrets';
const ACTION_DELETE_SECRETS = 'deleteSecrets';
const ACTION_UPDATE_SECRETS = 'updateSecrets';
const ACTION_READ_SECRETS = 'readSecrets';
export {
ACTION_LOGIN,
ACTION_LOGOUT,
ACTION_ADD_SECRETS,
ACTION_DELETE_SECRETS,
ACTION_UPDATE_SECRETS,

View File

@ -3,22 +3,17 @@ import {
ENV_TESTING,
ENV_STAGING,
ENV_PROD,
ENV_SET,
} from "./environment";
ENV_SET
} from './environment';
import {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_SET,
INTEGRATION_OAUTH2,
INTEGRATION_AZURE_TOKEN_URL,
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
@ -28,22 +23,25 @@ import {
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
INTEGRATION_OPTIONS,
} from "./integration";
import { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED } from "./organization";
import { SECRET_SHARED, SECRET_PERSONAL } from "./secret";
import { EVENT_PUSH_SECRETS, EVENT_PULL_SECRETS } from "./event";
INTEGRATION_OPTIONS
} from './integration';
import {
OWNER,
ADMIN,
MEMBER,
INVITED,
ACCEPTED,
} from './organization';
import { SECRET_SHARED, SECRET_PERSONAL } from './secret';
import { EVENT_PUSH_SECRETS, EVENT_PULL_SECRETS } from './event';
import {
ACTION_LOGIN,
ACTION_LOGOUT,
ACTION_ADD_SECRETS,
ACTION_UPDATE_SECRETS,
ACTION_DELETE_SECRETS,
ACTION_READ_SECRETS,
} from "./action";
import { SMTP_HOST_SENDGRID, SMTP_HOST_MAILGUN } from "./smtp";
import { PLAN_STARTER, PLAN_PRO } from "./stripe";
ACTION_READ_SECRETS
} from './action';
import { SMTP_HOST_SENDGRID, SMTP_HOST_MAILGUN } from './smtp';
import { PLAN_STARTER, PLAN_PRO } from './stripe';
export {
OWNER,
@ -58,19 +56,14 @@ export {
ENV_STAGING,
ENV_PROD,
ENV_SET,
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_SET,
INTEGRATION_OAUTH2,
INTEGRATION_AZURE_TOKEN_URL,
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
@ -80,11 +73,8 @@ export {
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
EVENT_PUSH_SECRETS,
EVENT_PULL_SECRETS,
ACTION_LOGIN,
ACTION_LOGOUT,
ACTION_ADD_SECRETS,
ACTION_UPDATE_SECRETS,
ACTION_DELETE_SECRETS,

View File

@ -1,55 +1,43 @@
import {
CLIENT_ID_AZURE,
TENANT_ID_AZURE
CLIENT_ID_HEROKU,
CLIENT_ID_NETLIFY,
CLIENT_ID_GITHUB,
CLIENT_SLUG_VERCEL
} from '../config';
import {
CLIENT_ID_HEROKU,
CLIENT_ID_NETLIFY,
CLIENT_ID_GITHUB,
CLIENT_SLUG_VERCEL,
} from "../config";
// integrations
const INTEGRATION_AZURE_KEY_VAULT = 'azure-key-vault';
const INTEGRATION_AWS_PARAMETER_STORE = 'aws-parameter-store';
const INTEGRATION_AWS_SECRET_MANAGER = 'aws-secret-manager';
const INTEGRATION_HEROKU = "heroku";
const INTEGRATION_VERCEL = "vercel";
const INTEGRATION_NETLIFY = "netlify";
const INTEGRATION_GITHUB = "github";
const INTEGRATION_RENDER = "render";
const INTEGRATION_FLYIO = "flyio";
const INTEGRATION_CIRCLECI = "circleci";
const INTEGRATION_HEROKU = 'heroku';
const INTEGRATION_VERCEL = 'vercel';
const INTEGRATION_NETLIFY = 'netlify';
const INTEGRATION_GITHUB = 'github';
const INTEGRATION_RENDER = 'render';
const INTEGRATION_FLYIO = 'flyio';
const INTEGRATION_SET = new Set([
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO
]);
// integration types
const INTEGRATION_OAUTH2 = "oauth2";
const INTEGRATION_OAUTH2 = 'oauth2';
// integration oauth endpoints
const INTEGRATION_AZURE_TOKEN_URL = `https://login.microsoftonline.com/${TENANT_ID_AZURE}/oauth2/v2.0/token`;
const INTEGRATION_HEROKU_TOKEN_URL = 'https://id.heroku.com/oauth/token';
const INTEGRATION_VERCEL_TOKEN_URL =
"https://api.vercel.com/v2/oauth/access_token";
const INTEGRATION_NETLIFY_TOKEN_URL = "https://api.netlify.com/oauth/token";
'https://api.vercel.com/v2/oauth/access_token';
const INTEGRATION_NETLIFY_TOKEN_URL = 'https://api.netlify.com/oauth/token';
const INTEGRATION_GITHUB_TOKEN_URL =
"https://github.com/login/oauth/access_token";
'https://github.com/login/oauth/access_token';
// integration apps endpoints
const INTEGRATION_HEROKU_API_URL = "https://api.heroku.com";
const INTEGRATION_VERCEL_API_URL = "https://api.vercel.com";
const INTEGRATION_NETLIFY_API_URL = "https://api.netlify.com";
const INTEGRATION_RENDER_API_URL = "https://api.render.com";
const INTEGRATION_FLYIO_API_URL = "https://api.fly.io/graphql";
const INTEGRATION_CIRCLECI_API_URL = "https://circleci.com/api";
const INTEGRATION_HEROKU_API_URL = 'https://api.heroku.com';
const INTEGRATION_VERCEL_API_URL = 'https://api.vercel.com';
const INTEGRATION_NETLIFY_API_URL = 'https://api.netlify.com';
const INTEGRATION_RENDER_API_URL = 'https://api.render.com';
const INTEGRATION_FLYIO_API_URL = 'https://api.fly.io/graphql';
const INTEGRATION_OPTIONS = [
{
@ -102,46 +90,9 @@ const INTEGRATION_OPTIONS = [
name: 'Fly.io',
slug: 'flyio',
image: 'Flyio.svg',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'AWS Parameter Store',
slug: 'aws-parameter-store',
image: 'Amazon Web Services.png',
isAvailable: true,
type: 'custom',
clientId: '',
docsLink: ''
},
{
name: 'AWS Secret Manager',
slug: 'aws-secret-manager',
image: 'Amazon Web Services.png',
isAvailable: true,
type: 'custom',
clientId: '',
docsLink: ''
},
{
name: 'Circle CI',
slug: 'circleci',
image: 'Circle CI.png',
isAvailable: true,
type: 'pat',
clientId: '',
docsLink: ''
},
{
name: 'Azure Key Vault',
slug: 'azure-key-vault',
image: 'Microsoft Azure.png',
isAvailable: false,
type: 'oauth',
clientId: CLIENT_ID_AZURE,
tenantId: TENANT_ID_AZURE,
type: 'pat',
clientId: '',
docsLink: ''
},
{
@ -153,6 +104,24 @@ const INTEGRATION_OPTIONS = [
clientId: '',
docsLink: ''
},
{
name: 'Amazon Web Services',
slug: 'aws',
image: 'Amazon Web Services.png',
isAvailable: false,
type: '',
clientId: '',
docsLink: ''
},
{
name: 'Microsoft Azure',
slug: 'azure',
image: 'Microsoft Azure.png',
isAvailable: false,
type: '',
clientId: '',
docsLink: ''
},
{
name: 'Travis CI',
slug: 'travisci',
@ -161,32 +130,35 @@ const INTEGRATION_OPTIONS = [
type: '',
clientId: '',
docsLink: ''
},
{
name: 'Circle CI',
slug: 'circleci',
image: 'Circle CI.png',
isAvailable: false,
type: '',
clientId: '',
docsLink: ''
}
]
export {
INTEGRATION_AZURE_KEY_VAULT,
INTEGRATION_AWS_PARAMETER_STORE,
INTEGRATION_AWS_SECRET_MANAGER,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_CIRCLECI,
INTEGRATION_SET,
INTEGRATION_OAUTH2,
INTEGRATION_AZURE_TOKEN_URL,
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
INTEGRATION_GITHUB_TOKEN_URL,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_CIRCLECI_API_URL,
INTEGRATION_OPTIONS,
INTEGRATION_HEROKU,
INTEGRATION_VERCEL,
INTEGRATION_NETLIFY,
INTEGRATION_GITHUB,
INTEGRATION_RENDER,
INTEGRATION_FLYIO,
INTEGRATION_SET,
INTEGRATION_OAUTH2,
INTEGRATION_HEROKU_TOKEN_URL,
INTEGRATION_VERCEL_TOKEN_URL,
INTEGRATION_NETLIFY_TOKEN_URL,
INTEGRATION_GITHUB_TOKEN_URL,
INTEGRATION_HEROKU_API_URL,
INTEGRATION_VERCEL_API_URL,
INTEGRATION_NETLIFY_API_URL,
INTEGRATION_RENDER_API_URL,
INTEGRATION_FLYIO_API_URL,
INTEGRATION_OPTIONS
};

View File

@ -1,16 +1,24 @@
// membership roles
const OWNER = "owner";
const ADMIN = "admin";
const MEMBER = "member";
const OWNER = 'owner';
const ADMIN = 'admin';
const MEMBER = 'member';
// membership statuses
const INVITED = "invited";
const INVITED = 'invited';
// membership permissions ability
const ABILITY_READ = "read";
const ABILITY_WRITE = "write";
const ABILITY_READ = 'read';
const ABILITY_WRITE = 'write';
// -- organization
const ACCEPTED = "accepted";
const ACCEPTED = 'accepted';
export { OWNER, ADMIN, MEMBER, INVITED, ACCEPTED, ABILITY_READ, ABILITY_WRITE };
export {
OWNER,
ADMIN,
MEMBER,
INVITED,
ACCEPTED,
ABILITY_READ,
ABILITY_WRITE
}

View File

@ -1,4 +0,0 @@
FROM alpine
RUN apk add --no-cache tini
COPY infisical /bin/infisical
ENTRYPOINT ["/sbin/tini", "--", "/bin/infisical"]

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package main

View File

@ -5,7 +5,6 @@ import (
"github.com/Infisical/infisical-merge/packages/config"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
const USER_AGENT = "cli"
@ -114,7 +113,6 @@ func CallGetSecretsV2(httpClient *resty.Client, request GetEncryptedSecretsV2Req
SetHeader("User-Agent", USER_AGENT).
SetQueryParam("environment", request.Environment).
SetQueryParam("workspaceId", request.WorkspaceId).
SetQueryParam("tagSlugs", request.TagSlugs).
Get(fmt.Sprintf("%v/v2/secrets", config.INFISICAL_URL))
if err != nil {
@ -146,42 +144,3 @@ func CallGetAllWorkSpacesUserBelongsTo(httpClient *resty.Client) (GetWorkSpacesR
return workSpacesResponse, nil
}
func CallIsAuthenticated(httpClient *resty.Client) bool {
var workSpacesResponse GetWorkSpacesResponse
response, err := httpClient.
R().
SetResult(&workSpacesResponse).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/auth/checkAuth", config.INFISICAL_URL))
if err != nil {
return false
}
if response.IsError() {
log.Debugln(fmt.Errorf("CallIsAuthenticated: Unsuccessful response: [response=%v]", response))
return false
}
return true
}
func CallGetAccessibleEnvironments(httpClient *resty.Client, request GetAccessibleEnvironmentsRequest) (GetAccessibleEnvironmentsResponse, error) {
var accessibleEnvironmentsResponse GetAccessibleEnvironmentsResponse
response, err := httpClient.
R().
SetResult(&accessibleEnvironmentsResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v2/workspace/%s/environments", config.INFISICAL_URL, request.WorkspaceId))
if err != nil {
return GetAccessibleEnvironmentsResponse{}, err
}
if response.IsError() {
return GetAccessibleEnvironmentsResponse{}, fmt.Errorf("CallGetAccessibleEnvironments: Unsuccessful response: [response=%v]", response)
}
return accessibleEnvironmentsResponse, nil
}

View File

@ -197,35 +197,25 @@ type GetSecretsByWorkspaceIdAndEnvironmentRequest struct {
type GetEncryptedSecretsV2Request struct {
Environment string `json:"environment"`
WorkspaceId string `json:"workspaceId"`
TagSlugs string `json:"tagSlugs"`
}
type GetEncryptedSecretsV2Response struct {
Secrets []struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
SecretCommentIV string `json:"secretCommentIV"`
SecretCommentTag string `json:"secretCommentTag"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
User string `json:"user,omitempty"`
Tags []struct {
ID string `json:"_id"`
Name string `json:"name"`
Slug string `json:"slug"`
Workspace string `json:"workspace"`
} `json:"tags"`
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
User string `json:"user,omitempty"`
} `json:"secrets"`
}
@ -251,15 +241,3 @@ type GetServiceTokenDetailsResponse struct {
UpdatedAt time.Time `json:"updatedAt"`
V int `json:"__v"`
}
type GetAccessibleEnvironmentsRequest struct {
WorkspaceId string `json:"workspaceId"`
}
type GetAccessibleEnvironmentsResponse struct {
AccessibleEnvironments []struct {
Name string `json:"name"`
Slug string `json:"slug"`
IsWriteDenied bool `json:"isWriteDenied"`
} `json:"accessibleEnvironments"`
}

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
@ -56,17 +56,7 @@ var exportCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
infisicalToken, err := cmd.Flags().GetString("token")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: envName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
secrets, err := util.GetAllEnvironmentVariables(envName)
if err != nil {
util.HandleError(err, "Unable to fetch secrets")
}
@ -101,8 +91,6 @@ func init() {
exportCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
}
// Format according to the format flag

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
@ -33,13 +33,13 @@ var loginCmd = &cobra.Command{
PreRun: toggleDebug,
Run: func(cmd *cobra.Command, args []string) {
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
if err != nil && (strings.Contains(err.Error(), "The specified item could not be found in the keyring") || strings.Contains(err.Error(), "unable to get key from Keyring")) { // if the key can't be found allow them to override
if err != nil && strings.Contains(err.Error(), "The specified item could not be found in the keyring") { // if the key can't be found allow them to override
log.Debug(err)
} else if err != nil {
util.HandleError(err)
}
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired { // if you are logged in but not expired
if currentLoggedInUserDetails.IsUserLoggedIn {
shouldOverride, err := shouldOverrideLoginPrompt(currentLoggedInUserDetails.UserCredentials.Email)
if err != nil {
util.HandleError(err)
@ -101,9 +101,6 @@ var loginCmd = &cobra.Command{
util.HandleError(err, "Unable to write write to Infisical Config file. Please try again")
}
// clear backed up secrets from prev account
util.DeleteBackupSecrets()
color.Green("Nice! You are logged in as: %v", email)
},

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd

View File

@ -1,45 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"os"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/spf13/cobra"
)
var resetCmd = &cobra.Command{
Use: "reset",
Short: "Used delete all Infisical related data on your machine",
DisableFlagsInUseLine: true,
Example: "infisical reset",
Args: cobra.NoArgs,
PreRun: func(cmd *cobra.Command, args []string) {
toggleDebug(cmd, args)
},
Run: func(cmd *cobra.Command, args []string) {
// delete config
_, pathToDir, err := util.GetFullConfigFilePath()
if err != nil {
util.HandleError(err)
}
os.RemoveAll(pathToDir)
// delete keyring
keyringInstance, err := util.GetKeyRing()
if err != nil {
util.HandleError(err)
}
keyringInstance.Remove(util.KEYRING_SERVICE_NAME)
util.PrintSuccessMessage("Reset successful")
},
}
func init() {
rootCmd.AddCommand(resetCmd)
}

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
@ -12,7 +12,6 @@ import (
"strings"
"syscall"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/fatih/color"
log "github.com/sirupsen/logrus"
@ -59,9 +58,8 @@ var runCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
infisicalToken, err := cmd.Flags().GetString("token")
if err != nil {
util.HandleError(err, "Unable to parse flag")
if !util.IsSecretEnvironmentValid(envName) {
util.PrintMessageAndExit("Invalid environment name passed. Environment names can only be prod, dev, test or staging")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
@ -74,13 +72,7 @@ var runCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: envName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
secrets, err := util.GetAllEnvironmentVariables(envName)
if err != nil {
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
}
@ -148,12 +140,10 @@ var runCmd = &cobra.Command{
func init() {
rootCmd.AddCommand(runCmd)
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
runCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")")
runCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs ")
}
// Will execute a single command and pass in the given secrets into the process

View File

@ -1,13 +1,11 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"encoding/base64"
"fmt"
"regexp"
"sort"
"strings"
"unicode"
@ -24,7 +22,7 @@ import (
)
var secretsCmd = &cobra.Command{
Example: `infisical secrets`,
Example: `infisical secrets"`,
Short: "Used to create, read update and delete secrets",
Use: "secrets",
DisableFlagsInUseLine: true,
@ -36,22 +34,12 @@ var secretsCmd = &cobra.Command{
util.HandleError(err)
}
infisicalToken, err := cmd.Flags().GetString("token")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
if err != nil {
util.HandleError(err)
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
secrets, err := util.GetAllEnvironmentVariables(environmentName)
if err != nil {
util.HandleError(err)
}
@ -74,16 +62,6 @@ var secretsGetCmd = &cobra.Command{
Run: getSecretsByNames,
}
var secretsGenerateExampleEnvCmd = &cobra.Command{
Example: `secrets generate-example-env > .example-env`,
Short: "Used to generate a example .env file",
Use: "generate-example-env",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
PreRun: toggleDebug,
Run: generateExampleEnv,
}
var secretsSetCmd = &cobra.Command{
Example: `secrets set <secretName=secretValue> <secretName=secretValue>..."`,
Short: "Used set secrets",
@ -133,7 +111,7 @@ var secretsSetCmd = &cobra.Command{
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
// pull current secrets
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName})
secrets, err := util.GetAllEnvironmentVariables(environmentName)
if err != nil {
util.HandleError(err, "unable to retrieve secrets")
}
@ -289,7 +267,7 @@ var secretsDeleteCmd = &cobra.Command{
util.HandleError(err, "Unable to get local project details")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName})
secrets, err := util.GetAllEnvironmentVariables(environmentName)
if err != nil {
util.HandleError(err, "Unable to fetch secrets")
}
@ -331,6 +309,30 @@ var secretsDeleteCmd = &cobra.Command{
},
}
func init() {
secretsCmd.AddCommand(secretsGetCmd)
secretsGetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
}
secretsCmd.AddCommand(secretsSetCmd)
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
}
secretsCmd.AddCommand(secretsDeleteCmd)
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
}
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
rootCmd.AddCommand(secretsCmd)
}
func getSecretsByNames(cmd *cobra.Command, args []string) {
environmentName, err := cmd.Flags().GetString("env")
if err != nil {
@ -342,17 +344,7 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
infisicalToken, err := cmd.Flags().GetString("token")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
secrets, err := util.GetAllEnvironmentVariables(environmentName)
if err != nil {
util.HandleError(err, "To fetch all secrets")
}
@ -379,176 +371,6 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
visualize.PrintAllSecretDetails(requestedSecrets)
}
func generateExampleEnv(cmd *cobra.Command, args []string) {
environmentName, err := cmd.Flags().GetString("env")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
workspaceFileExists := util.WorkspaceConfigFileExistsInCurrentPath()
if !workspaceFileExists {
util.HandleError(err, "Unable to parse flag")
}
infisicalToken, err := cmd.Flags().GetString("token")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs})
if err != nil {
util.HandleError(err, "To fetch all secrets")
}
tagsHashToSecretKey := make(map[string]int)
type TagsAndSecrets struct {
Secrets []models.SingleEnvironmentVariable
Tags []struct {
ID string `json:"_id"`
Name string `json:"name"`
Slug string `json:"slug"`
Workspace string `json:"workspace"`
}
}
// sort secrets by associated tags (most number of tags to least tags)
sort.Slice(secrets, func(i, j int) bool {
return len(secrets[i].Tags) > len(secrets[j].Tags)
})
for _, secret := range secrets {
listOfTagSlugs := []string{}
for _, tag := range secret.Tags {
listOfTagSlugs = append(listOfTagSlugs, tag.Slug)
}
sort.Strings(listOfTagSlugs)
tagsHash := util.GetHashFromStringList(listOfTagSlugs)
tagsHashToSecretKey[tagsHash] += 1
}
finalTagHashToSecretKey := make(map[string]TagsAndSecrets)
for _, secret := range secrets {
listOfTagSlugs := []string{}
for _, tag := range secret.Tags {
listOfTagSlugs = append(listOfTagSlugs, tag.Slug)
}
// sort the slug so we get the same hash each time
sort.Strings(listOfTagSlugs)
tagsHash := util.GetHashFromStringList(listOfTagSlugs)
occurrence, exists := tagsHashToSecretKey[tagsHash]
if exists && occurrence > 0 {
value, exists2 := finalTagHashToSecretKey[tagsHash]
allSecretsForTags := append(value.Secrets, secret)
// sort the the secrets by keys so that they can later be sorted by the first item in the secrets array
sort.Slice(allSecretsForTags, func(i, j int) bool {
return allSecretsForTags[i].Key < allSecretsForTags[j].Key
})
if exists2 {
finalTagHashToSecretKey[tagsHash] = TagsAndSecrets{
Tags: secret.Tags,
Secrets: allSecretsForTags,
}
} else {
finalTagHashToSecretKey[tagsHash] = TagsAndSecrets{
Tags: secret.Tags,
Secrets: []models.SingleEnvironmentVariable{secret},
}
}
tagsHashToSecretKey[tagsHash] -= 1
}
}
// sort the fianl result by secret key fo consistent print order
listOfsecretDetails := make([]TagsAndSecrets, 0, len(finalTagHashToSecretKey))
for _, secretDetails := range finalTagHashToSecretKey {
listOfsecretDetails = append(listOfsecretDetails, secretDetails)
}
// sort the order of the headings by the order of the secrets
sort.Slice(listOfsecretDetails, func(i, j int) bool {
return len(listOfsecretDetails[i].Tags) < len(listOfsecretDetails[j].Tags)
})
for _, secretDetails := range listOfsecretDetails {
listOfKeyValue := []string{}
for _, secret := range secretDetails.Secrets {
re := regexp.MustCompile(`(?s)(.*)DEFAULT:(.*)`)
match := re.FindStringSubmatch(secret.Comment)
defaultValue := ""
comment := secret.Comment
// Case: Only has default value
if len(match) == 2 {
defaultValue = strings.TrimSpace(match[1])
}
// Case: has a comment and a default value
if len(match) == 3 {
comment = match[1]
defaultValue = match[2]
}
row := ""
if comment != "" {
comment = addHash(comment)
row = fmt.Sprintf("%s \n%s=%s", strings.TrimSpace(comment), strings.TrimSpace(secret.Key), strings.TrimSpace(defaultValue))
} else {
row = fmt.Sprintf("%s=%s", strings.TrimSpace(secret.Key), strings.TrimSpace(defaultValue))
}
// each secret row to be added to the file
listOfKeyValue = append(listOfKeyValue, row)
}
listOfTagNames := []string{}
for _, tag := range secretDetails.Tags {
listOfTagNames = append(listOfTagNames, tag.Name)
}
heading := CenterString(strings.Join(listOfTagNames, " & "), 80)
if len(listOfTagNames) == 0 {
fmt.Printf("\n%s \n", strings.Join(listOfKeyValue, "\n \n"))
} else {
fmt.Printf("\n\n\n%s \n%s \n", heading, strings.Join(listOfKeyValue, "\n \n"))
}
}
}
func CenterString(s string, numStars int) string {
stars := strings.Repeat("*", numStars)
padding := (numStars - len(s)) / 2
cenetredTextWithStar := stars[:padding] + " " + strings.ToUpper(s) + " " + stars[padding:]
hashes := strings.Repeat("#", len(cenetredTextWithStar)+2)
return fmt.Sprintf("%s \n# %s \n%s", hashes, cenetredTextWithStar, hashes)
}
func addHash(input string) string {
lines := strings.Split(input, "\n")
for i, line := range lines {
lines[i] = "# " + line
}
return strings.Join(lines, "\n")
}
func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]models.SingleEnvironmentVariable {
secretMapByName := make(map[string]models.SingleEnvironmentVariable)
@ -558,30 +380,3 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod
return secretMapByName
}
func init() {
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsCmd.AddCommand(secretsGenerateExampleEnvCmd)
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsCmd.AddCommand(secretsGetCmd)
secretsCmd.AddCommand(secretsSetCmd)
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
}
secretsCmd.AddCommand(secretsDeleteCmd)
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
}
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
secretsCmd.PersistentFlags().StringP("tags", "t", "", "filter secrets by tag slugs")
rootCmd.AddCommand(secretsCmd)
}

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2023 Infisical Inc.
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd

View File

@ -12,11 +12,6 @@ import (
// will decrypt cipher text to plain text using iv and tag
func DecryptSymmetric(key []byte, cipherText []byte, tag []byte, iv []byte) ([]byte, error) {
// Case: empty string
if len(cipherText) == 0 && len(tag) == 0 && len(iv) == 0 {
return []byte{}, nil
}
block, err := aes.NewCipher(key)
if err != nil {
return nil, err

View File

@ -1,8 +1,6 @@
package models
import (
"github.com/99designs/keyring"
)
import "github.com/99designs/keyring"
type UserCredentials struct {
Email string `json:"email"`
@ -21,13 +19,6 @@ type SingleEnvironmentVariable struct {
Value string `json:"value"`
Type string `json:"type"`
ID string `json:"_id"`
Tags []struct {
ID string `json:"_id"`
Name string `json:"name"`
Slug string `json:"slug"`
Workspace string `json:"workspace"`
} `json:"tags"`
Comment string `json:"comment"`
}
type Workspace struct {
@ -43,13 +34,7 @@ type WorkspaceConfigFile struct {
}
type SymmetricEncryptionResult struct {
CipherText []byte `json:"CipherText"`
Nonce []byte `json:"Nonce"`
AuthTag []byte `json:"AuthTag"`
}
type GetAllSecretsParameters struct {
Environment string
InfisicalToken string
TagSlugs string
CipherText []byte
Nonce []byte
AuthTag []byte
}

View File

@ -2,14 +2,13 @@ package util
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
)
func CheckForUpdate() {
latestVersion, err := getLatestTag("Infisical", "infisical")
latestVersion, err := getLatestTag("infisical", "infisical")
if err != nil {
// do nothing and continue
return
@ -25,9 +24,6 @@ func getLatestTag(repoOwner string, repoName string) (string, error) {
if err != nil {
return "", err
}
if resp.StatusCode != 200 {
return "", errors.New(fmt.Sprintf("GitHub API returned status code %d", resp.StatusCode))
}
defer resp.Body.Close()
@ -42,5 +38,5 @@ func getLatestTag(repoOwner string, repoName string) (string, error) {
json.Unmarshal(body, &tags)
return tags[0].Name[1:], nil
return tags[0].Name, nil
}

View File

@ -2,7 +2,6 @@ package util
import (
"fmt"
"net/http"
"os"
)
@ -20,11 +19,3 @@ func WriteToFile(fileName string, dataToWrite []byte, filePerm os.FileMode) erro
return nil
}
func CheckIsConnectedToInternet() (ok bool) {
_, err := http.Get("http://clients3.google.com/generate_204")
if err != nil {
return false
}
return true
}

View File

@ -11,8 +11,5 @@ const (
KEYRING_SERVICE_NAME = "infisical"
PERSONAL_SECRET_TYPE_NAME = "personal"
SHARED_SECRET_TYPE_NAME = "shared"
)
var (
CLI_VERSION = "devel"
CLI_VERSION = "v0.2.7"
)

View File

@ -5,7 +5,7 @@ import (
"fmt"
"github.com/99designs/keyring"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/go-resty/resty/v2"
)
@ -87,10 +87,17 @@ func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) {
SetAuthToken(userCreds.JTWToken).
SetHeader("Accept", "application/json")
isAuthenticated := api.CallIsAuthenticated(httpClient)
if !isAuthenticated {
response, err := httpClient.
R().
Post(fmt.Sprintf("%v/v1/auth/checkAuth", config.INFISICAL_URL))
if err != nil {
return LoggedInUserDetails{}, err
}
if response.StatusCode() > 299 {
return LoggedInUserDetails{
IsUserLoggedIn: true, // was logged in
IsUserLoggedIn: true,
LoginExpired: true,
UserCredentials: userCreds,
}, nil

View File

@ -1,7 +1,6 @@
package util
import (
"crypto/sha256"
"encoding/base64"
"fmt"
"os"
@ -99,14 +98,3 @@ func RequireLocalWorkspaceFile() {
PrintMessageAndExit("Your project id is missing in your local config file. Please add it or run again [infisical init]")
}
}
func GetHashFromStringList(list []string) string {
hash := sha256.New()
for _, item := range list {
hash.Write([]byte(item))
}
sum := sha256.Sum256(hash.Sum(nil))
return fmt.Sprintf("%x", sum)
}

View File

@ -24,11 +24,7 @@ func PrintErrorAndExit(exitCode int, err error, messages ...string) {
}
func PrintWarning(message string) {
color.New(color.FgYellow).Fprintf(os.Stderr, "Warning: %v \n", message)
}
func PrintSuccessMessage(message string) {
color.New(color.FgGreen).Println(message)
color.Yellow("Warning: %v", message)
}
func PrintMessageAndExit(messages ...string) {

View File

@ -2,8 +2,6 @@ package util
import (
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"os"
"regexp"
@ -62,7 +60,7 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string) ([]models.Singl
return plainTextSecrets, nil
}
func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string, tagSlugs string) ([]models.SingleEnvironmentVariable, error) {
func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string) ([]models.SingleEnvironmentVariable, error) {
httpClient := resty.New()
httpClient.SetAuthToken(JTWToken).
SetHeader("Accept", "application/json")
@ -85,7 +83,6 @@ func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, work
encryptedSecrets, err := api.CallGetSecretsV2(httpClient, api.GetEncryptedSecretsV2Request{
WorkspaceId: workspaceId,
Environment: environmentName,
TagSlugs: tagSlugs,
})
if err != nil {
@ -100,26 +97,13 @@ func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, work
return plainTextSecrets, nil
}
func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models.SingleEnvironmentVariable, error) {
var infisicalToken string
if params.InfisicalToken == "" {
infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
} else {
infisicalToken = params.InfisicalToken
}
isConnected := CheckIsConnectedToInternet()
var secretsToReturn []models.SingleEnvironmentVariable
var errorToReturn error
func GetAllEnvironmentVariables(envName string) ([]models.SingleEnvironmentVariable, error) {
infisicalToken := os.Getenv(INFISICAL_TOKEN_NAME)
if infisicalToken == "" {
if isConnected {
log.Debug("GetAllEnvironmentVariables: Connected to internet, checking logged in creds")
RequireLocalWorkspaceFile()
RequireLogin()
}
log.Debug("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
RequireLocalWorkspaceFile()
RequireLogin()
log.Debug("Trying to fetch secrets using logged in details")
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
if err != nil {
@ -131,63 +115,13 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models
return nil, err
}
// Verify environment
err = ValidateEnvironmentName(params.Environment, workspaceFile.WorkspaceId, loggedInUserDetails.UserCredentials)
if err != nil {
return nil, fmt.Errorf("unable to validate environment name because [err=%s]", err)
}
secretsToReturn, errorToReturn = GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, workspaceFile.WorkspaceId, params.Environment, params.TagSlugs)
log.Debugf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", errorToReturn)
backupSecretsEncryptionKey := []byte(loggedInUserDetails.UserCredentials.PrivateKey)[0:32]
if errorToReturn == nil {
WriteBackupSecrets(workspaceFile.WorkspaceId, params.Environment, backupSecretsEncryptionKey, secretsToReturn)
}
// only attempt to serve cached secrets if no internet connection and if at least one secret cached
if !isConnected {
backedSecrets, err := ReadBackupSecrets(workspaceFile.WorkspaceId, params.Environment, backupSecretsEncryptionKey)
if len(backedSecrets) > 0 {
PrintWarning("Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug")
secretsToReturn = backedSecrets
errorToReturn = err
}
}
secrets, err := GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, workspaceFile.WorkspaceId, envName)
return secrets, err
} else {
log.Debug("Trying to fetch secrets using service token")
secretsToReturn, errorToReturn = GetPlainTextSecretsViaServiceToken(infisicalToken)
return GetPlainTextSecretsViaServiceToken(infisicalToken)
}
return secretsToReturn, errorToReturn
}
func ValidateEnvironmentName(environmentName string, workspaceId string, userLoggedInDetails models.UserCredentials) error {
httpClient := resty.New()
httpClient.SetAuthToken(userLoggedInDetails.JTWToken).
SetHeader("Accept", "application/json")
response, err := api.CallGetAccessibleEnvironments(httpClient, api.GetAccessibleEnvironmentsRequest{WorkspaceId: workspaceId})
if err != nil {
return err
}
listOfEnvSlugs := []string{}
mapOfEnvSlugs := make(map[string]interface{})
for _, environment := range response.AccessibleEnvironments {
listOfEnvSlugs = append(listOfEnvSlugs, environment.Slug)
mapOfEnvSlugs[environment.Slug] = environment
}
_, exists := mapOfEnvSlugs[environmentName]
if !exists {
HandleError(fmt.Errorf("the environment [%s] does not exist in project with [id=%s]. Only [%s] are available", environmentName, workspaceId, strings.Join(listOfEnvSlugs, ",")))
}
return nil
}
func getExpandedEnvVariable(secrets []models.SingleEnvironmentVariable, variableWeAreLookingFor string, hashMapOfCompleteVariables map[string]string, hashMapOfSelfRefs map[string]string) string {
@ -349,34 +283,11 @@ func GetPlainTextSecrets(key []byte, encryptedSecrets api.GetEncryptedSecretsV2R
return nil, fmt.Errorf("unable to symmetrically decrypt secret value")
}
// Decrypt comment
comment_iv, err := base64.StdEncoding.DecodeString(secret.SecretCommentIV)
if err != nil {
return nil, fmt.Errorf("unable to decode secret IV for secret value")
}
comment_tag, err := base64.StdEncoding.DecodeString(secret.SecretCommentTag)
if err != nil {
return nil, fmt.Errorf("unable to decode secret authentication tag for secret value")
}
comment_ciphertext, _ := base64.StdEncoding.DecodeString(secret.SecretCommentCiphertext)
if err != nil {
return nil, fmt.Errorf("unable to decode secret cipher text for secret key")
}
plainTextComment, err := crypto.DecryptSymmetric(key, comment_ciphertext, comment_tag, comment_iv)
if err != nil {
return nil, fmt.Errorf("unable to symmetrically decrypt secret comment")
}
plainTextSecret := models.SingleEnvironmentVariable{
Key: string(plainTextKey),
Value: string(plainTextValue),
Type: string(secret.Type),
ID: secret.ID,
Tags: secret.Tags,
Comment: string(plainTextComment),
Key: string(plainTextKey),
Value: string(plainTextValue),
Type: string(secret.Type),
ID: secret.ID,
}
plainTextSecrets = append(plainTextSecrets, plainTextSecret)
@ -384,100 +295,3 @@ func GetPlainTextSecrets(key []byte, encryptedSecrets api.GetEncryptedSecretsV2R
return plainTextSecrets, nil
}
func WriteBackupSecrets(workspace string, environment string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error {
fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment)
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
if err != nil {
return fmt.Errorf("WriteBackupSecrets: unable to get full config folder path [err=%s]", err)
}
// create secrets backup directory
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
if _, err := os.Stat(fullPathToSecretsBackupFolder); errors.Is(err, os.ErrNotExist) {
err := os.Mkdir(fullPathToSecretsBackupFolder, os.ModePerm)
if err != nil {
return err
}
}
var encryptedSecrets []models.SymmetricEncryptionResult
for _, secret := range secrets {
marshaledSecrets, _ := json.Marshal(secret)
result, err := crypto.EncryptSymmetric(marshaledSecrets, encryptionKey)
if err != nil {
return err
}
encryptedSecrets = append(encryptedSecrets, result)
}
listOfSecretsMarshalled, _ := json.Marshal(encryptedSecrets)
err = os.WriteFile(fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName), listOfSecretsMarshalled, os.ModePerm)
if err != nil {
return fmt.Errorf("WriteBackupSecrets: Unable to write backup secrets to file [err=%s]", err)
}
return nil
}
func ReadBackupSecrets(workspace string, environment string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) {
fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment)
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
if err != nil {
return nil, fmt.Errorf("ReadBackupSecrets: unable to write config file because an error occurred when getting config file path [err=%s]", err)
}
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
if _, err := os.Stat(fullPathToSecretsBackupFolder); errors.Is(err, os.ErrNotExist) {
return nil, nil
}
encryptedBackupSecretsFilePath := fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName)
encryptedBackupSecretsAsBytes, err := os.ReadFile(encryptedBackupSecretsFilePath)
if err != nil {
return nil, err
}
var listOfEncryptedBackupSecrets []models.SymmetricEncryptionResult
_ = json.Unmarshal(encryptedBackupSecretsAsBytes, &listOfEncryptedBackupSecrets)
var plainTextSecrets []models.SingleEnvironmentVariable
for _, encryptedSecret := range listOfEncryptedBackupSecrets {
result, err := crypto.DecryptSymmetric(encryptionKey, encryptedSecret.CipherText, encryptedSecret.AuthTag, encryptedSecret.Nonce)
if err != nil {
return nil, err
}
var plainTextSecret models.SingleEnvironmentVariable
err = json.Unmarshal(result, &plainTextSecret)
if err != nil {
return nil, err
}
plainTextSecrets = append(plainTextSecrets, plainTextSecret)
}
return plainTextSecrets, nil
}
func DeleteBackupSecrets() error {
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
if err != nil {
return fmt.Errorf("ReadBackupSecrets: unable to write config file because an error occurred when getting config file path [err=%s]", err)
}
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
return os.RemoveAll(fullPathToSecretsBackupFolder)
}

View File

@ -57,7 +57,7 @@ func fileKeyringPassphrasePrompt(prompt string) (string, error) {
if password, ok := os.LookupEnv("INFISICAL_VAULT_FILE_PASSPHRASE"); ok {
return password, nil
} else {
fmt.Println("You may set the environment variable `INFISICAL_VAULT_FILE_PASSPHRASE` with your password to avoid typing it")
fmt.Println("You may set the `INFISICAL_VAULT_FILE_PASSPHRASE` environment variable to avoid typing password")
}
fmt.Fprintf(os.Stderr, "%s:", prompt)
@ -65,7 +65,6 @@ func fileKeyringPassphrasePrompt(prompt string) (string, error) {
if err != nil {
return "", err
}
fmt.Println("")
return string(b), nil
}

View File

@ -46,7 +46,11 @@ services:
context: ./frontend
dockerfile: Dockerfile.dev
volumes:
- ./frontend/src:/app/src/ # mounted whole src to avoid missing reload on new files
- ./frontend/src/pages:/app/src/pages
- ./frontend/src/components:/app/src/components
- ./frontend/src/ee:/app/src/ee
- ./frontend/src/locales:/app/src/locales
- ./frontend/src/styles:/app/src/styles
- ./frontend/public:/app/public
- ./frontend/next-i18next.config.js:/app/next-i18next.config.js
env_file: .env
@ -54,7 +58,6 @@ services:
- NEXT_PUBLIC_ENV=development
- INFISICAL_TELEMETRY_ENABLED=${TELEMETRY_ENABLED}
- NEXT_PUBLIC_STRIPE_PRODUCT_PRO=${STRIPE_PRODUCT_PRO}
- NEXT_PUBLIC_STRIPE_PRODUCT_TEAM=${STRIPE_PRODUCT_TEAM}
- NEXT_PUBLIC_STRIPE_PRODUCT_STARTER=${STRIPE_PRODUCT_STARTER}
networks:
- infisical-dev

View File

@ -40,7 +40,6 @@ services:
# - NEXT_PUBLIC_POSTHOG_API_KEY=${POSTHOG_PROJECT_API_KEY}
- INFISICAL_TELEMETRY_ENABLED=${TELEMETRY_ENABLED}
- NEXT_PUBLIC_STRIPE_PRODUCT_PRO=${STRIPE_PRODUCT_PRO}
- NEXT_PUBLIC_STRIPE_PRODUCT_TEAM=${STRIPE_PRODUCT_TEAM}
- NEXT_PUBLIC_STRIPE_PRODUCT_STARTER=${STRIPE_PRODUCT_STARTER}
networks:
- infisical

View File

@ -1,6 +1,5 @@
---
title: "Commands"
description: "Infisical CLI command overview"
---
## Commands

View File

@ -1,6 +1,5 @@
---
title: "infisical export"
description: "Export Infisical secrets from CLI into different file formats"
---
```bash

View File

@ -1,6 +1,5 @@
---
title: "infisical init"
description: "Switch between Infisical projects within CLI"
---
```bash
@ -9,8 +8,6 @@ infisical init
## Description
Link a local project to your Infisical project. Once connected, you can then access the secrets locally from the connected Infisical project.
Link a local project to the platform
<Info>
This command creates a `infisical.json` file containing your Project ID.
</Info>
The command creates a `infisical.json` file containing your Project ID.

View File

@ -1,6 +1,5 @@
---
title: "infisical login"
description: "Login into Infisical from the CLI"
---
```bash

View File

@ -1,11 +0,0 @@
---
title: "infisical reset"
description: "Reset Infisical"
---
```bash
infisical reset
```
## Description
This command provides a way to clear all Infisical-generated configuration data, effectively resetting the software to its default settings. This can be an effective way to address any persistent issues that arise while using the CLI.

View File

@ -1,6 +1,5 @@
---
title: "infisical run"
description: "The command that injects your secrets into local environment"
---
<Tabs>
@ -25,58 +24,13 @@ description: "The command that injects your secrets into local environment"
## Description
Inject secrets from Infisical into your application process.
Inject environment variables from the platform into an application process.
## Options
## Subcommands & flags
<Accordion title="infisical run" defaultOpen="true">
Use this command to inject secrets into your applications process
```bash
$ infisical run -- <your application command>
# Example
$ infisical run -- npm run dev
```
### flags
<Accordion title="--command">
Pass secrets into multiple commands at once
```bash
# Example
infisical run --command="npm run build && npm run dev; more-commands..."
```
</Accordion>
<Accordion title="--token">
If you are using a [service token](../../getting-started/dashboard/token) to authenticate, you can pass the token as a flag
```bash
# Example
infisical run --token="st.63e03c4a97cb4a747186c71e.ed5b46a34c078a8f94e8228f4ab0ff97.4f7f38034811995997d72badf44b42ec" -- npm run start
```
You may also expose the token to the CLI by setting the environment variable `INFISICAL_TOKEN` before executing the run command. This will have the same effect as setting the token with `--token` flag
</Accordion>
<Accordion title="--expand">
Turn on or off the shell parameter expansion in your secrets. If you have used shell parameters in your secret(s), activating this feature will populate them before injecting them into your application process.
Default value: `true`
</Accordion>
<Accordion title="--env">
This is used to specify the environment from which secrets should be retrieved. The accepted values are the environment slugs defined for your project, such as `dev`, `staging`, `test`, and `prod`.
Default value: `dev`
</Accordion>
<Accordion title="--secret-overriding">
Prioritizes personal secrets with the same name over shared secrets
Default value: `true`
</Accordion>
</Accordion>
| Option | Description | Default value |
| -------------- | ----------------------------------------------------------------------------------------------------------- | ------------- |
| `--env` | Used to set the environment that secrets are pulled from. Accepted values: `dev`, `staging`, `test`, `prod` | `dev` |
| `--expand` | Parse shell parameter expansions in your secrets (e.g., `${DOMAIN}`) | `true` |
| `--command` | Pass secrets into chained commands (e.g., `"first-command && second-command; more-commands..."`) | None |
| `--secret-overriding`| Prioritizes personal secrets with the same name over shared secrets | `true` |

Some files were not shown because too many files have changed in this diff Show More