mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-27 09:40:45 +00:00
chore: resolve merge conflicts
This commit is contained in:
@ -47,11 +47,13 @@ CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
|
@ -1,11 +1,17 @@
|
||||
name: Release standalone docker image
|
||||
on: [workflow_dispatch]
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
|
||||
jobs:
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
@ -64,5 +70,6 @@ jobs:
|
||||
tags: |
|
||||
infisical/infisical:latest
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
|
@ -1,10 +1,16 @@
|
||||
name: Release Docker image for K8 operator
|
||||
on: [workflow_dispatch]
|
||||
name: Release Docker image for K8 operator
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
@ -26,4 +32,6 @@ jobs:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: infisical/kubernetes-operator:latest
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
10
README.md
10
README.md
@ -7,7 +7,7 @@
|
||||
</p>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1ye0tm8ab-899qZ6ZbpfESuo6TEikyOQ">Slack</a> |
|
||||
<a href="https://infisical.com/slack">Slack</a> |
|
||||
<a href="https://infisical.com/">Infisical Cloud</a> |
|
||||
<a href="https://infisical.com/docs/self-hosting/overview">Self-Hosting</a> |
|
||||
<a href="https://infisical.com/docs/documentation/getting-started/introduction">Docs</a> |
|
||||
@ -36,7 +36,7 @@
|
||||
<a href="https://cloudsmith.io/~infisical/repos/">
|
||||
<img src="https://img.shields.io/badge/Downloads-395.8k-orange" alt="Cloudsmith downloads" />
|
||||
</a>
|
||||
<a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1wehzfnzn-1aMo5JcGENJiNAC2SD8Jlg">
|
||||
<a href="https://infisical.com/slack">
|
||||
<img src="https://img.shields.io/badge/chat-on%20Slack-blueviolet" alt="Slack community channel" />
|
||||
</a>
|
||||
<a href="https://twitter.com/infisical">
|
||||
@ -135,15 +135,15 @@ Whether it's big or small, we love contributions. Check out our guide to see how
|
||||
Not sure where to get started? You can:
|
||||
|
||||
- [Book a free, non-pressure pairing session / code walkthrough with one of our teammates](https://cal.com/tony-infisical/30-min-meeting-contributing)!
|
||||
- Join our <a href="https://join.slack.com/t/infisical-users/shared_invite/zt-1wehzfnzn-1aMo5JcGENJiNAC2SD8Jlg">Slack</a>, and ask us any questions there.
|
||||
- Join our <a href="https://infisical.com/slack">Slack</a>, and ask us any questions there.
|
||||
|
||||
## Resources
|
||||
|
||||
- [Docs](https://infisical.com/docs/documentation/getting-started/introduction) for comprehensive documentation and guides
|
||||
- [Slack](https://join.slack.com/t/infisical-users/shared_invite/zt-1wehzfnzn-1aMo5JcGENJiNAC2SD8Jlg) for discussion with the community and Infisical team.
|
||||
- [Slack](https://infisical.com/slack) for discussion with the community and Infisical team.
|
||||
- [GitHub](https://github.com/Infisical/infisical) for code, issues, and pull requests
|
||||
- [Twitter](https://twitter.com/infisical) for fast news
|
||||
- [YouTube](https://www.youtube.com/@infisical_od) for videos on secret management
|
||||
- [YouTube](https://www.youtube.com/@infisical_os) for videos on secret management
|
||||
- [Blog](https://infisical.com/blog) for secret management insights, articles, tutorials, and updates
|
||||
- [Roadmap](https://www.notion.so/infisical/be2d2585a6694e40889b03aef96ea36b?v=5b19a8127d1a4060b54769567a8785fa) for planned features
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"no-empty-function": "off",
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"no-console": 2,
|
||||
"quotes": [
|
||||
@ -34,11 +35,6 @@
|
||||
"argsIgnorePattern": "^_"
|
||||
}
|
||||
],
|
||||
"sort-imports": [
|
||||
"error",
|
||||
{
|
||||
"ignoreDeclarationSort": true
|
||||
}
|
||||
]
|
||||
"sort-imports": 1
|
||||
}
|
||||
}
|
@ -19,6 +19,10 @@ RUN npm ci --only-production
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
|
9235
backend/package-lock.json
generated
9235
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -2,6 +2,7 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-secrets-manager": "^3.319.0",
|
||||
"@godaddy/terminus": "^4.12.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.49.0",
|
||||
"@sentry/tracing": "^7.48.0",
|
||||
@ -36,6 +37,7 @@
|
||||
"passport": "^0.6.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"posthog-node": "^2.6.0",
|
||||
"probot": "^12.3.1",
|
||||
"query-string": "^7.1.3",
|
||||
"rate-limit-mongo": "^2.3.2",
|
||||
"rimraf": "^3.0.2",
|
||||
@ -103,6 +105,7 @@
|
||||
"jest-junit": "^15.0.0",
|
||||
"nodemon": "^2.0.19",
|
||||
"npm": "^8.19.3",
|
||||
"smee-client": "^1.2.3",
|
||||
"supertest": "^6.3.3",
|
||||
"ts-jest": "^29.0.3",
|
||||
"ts-node": "^10.9.1"
|
||||
|
@ -10,7 +10,7 @@ export const getEncryptionKey = async () => {
|
||||
return secretValue === "" ? undefined : secretValue;
|
||||
}
|
||||
export const getRootEncryptionKey = async () => {
|
||||
const secretValue = (await client.getSecret("ROOT_ENCRYPTION_KEY")).secretValue;
|
||||
const secretValue = (await client.getSecret("ROOT_ENCRYPTION_KEY")).secretValue;
|
||||
return secretValue === "" ? undefined : secretValue;
|
||||
}
|
||||
export const getInviteOnlySignup = async () => (await client.getSecret("INVITE_ONLY_SIGNUP")).secretValue === "true"
|
||||
@ -37,6 +37,7 @@ export const getClientIdNetlify = async () => (await client.getSecret("CLIENT_ID
|
||||
export const getClientIdGitHub = async () => (await client.getSecret("CLIENT_ID_GITHUB")).secretValue;
|
||||
export const getClientIdGitLab = async () => (await client.getSecret("CLIENT_ID_GITLAB")).secretValue;
|
||||
export const getClientIdGoogle = async () => (await client.getSecret("CLIENT_ID_GOOGLE")).secretValue;
|
||||
export const getClientIdBitBucket = async () => (await client.getSecret("CLIENT_ID_BITBUCKET")).secretValue;
|
||||
export const getClientSecretAzure = async () => (await client.getSecret("CLIENT_SECRET_AZURE")).secretValue;
|
||||
export const getClientSecretHeroku = async () => (await client.getSecret("CLIENT_SECRET_HEROKU")).secretValue;
|
||||
export const getClientSecretVercel = async () => (await client.getSecret("CLIENT_SECRET_VERCEL")).secretValue;
|
||||
@ -44,6 +45,7 @@ export const getClientSecretNetlify = async () => (await client.getSecret("CLIEN
|
||||
export const getClientSecretGitHub = async () => (await client.getSecret("CLIENT_SECRET_GITHUB")).secretValue;
|
||||
export const getClientSecretGitLab = async () => (await client.getSecret("CLIENT_SECRET_GITLAB")).secretValue;
|
||||
export const getClientSecretGoogle = async () => (await client.getSecret("CLIENT_SECRET_GOOGLE")).secretValue;
|
||||
export const getClientSecretBitBucket = async () => (await client.getSecret("CLIENT_SECRET_BITBUCKET")).secretValue;
|
||||
export const getClientSlugVercel = async () => (await client.getSecret("CLIENT_SLUG_VERCEL")).secretValue;
|
||||
export const getPostHogHost = async () => (await client.getSecret("POSTHOG_HOST")).secretValue || "https://app.posthog.com";
|
||||
export const getPostHogProjectApiKey = async () => (await client.getSecret("POSTHOG_PROJECT_API_KEY")).secretValue || "phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE";
|
||||
@ -57,6 +59,11 @@ export const getSmtpPassword = async () => (await client.getSecret("SMTP_PASSWOR
|
||||
export const getSmtpFromAddress = async () => (await client.getSecret("SMTP_FROM_ADDRESS")).secretValue;
|
||||
export const getSmtpFromName = async () => (await client.getSecret("SMTP_FROM_NAME")).secretValue || "Infisical";
|
||||
|
||||
export const getSecretScanningWebhookProxy = async () => (await client.getSecret("SECRET_SCANNING_WEBHOOK_PROXY")).secretValue;
|
||||
export const getSecretScanningWebhookSecret = async () => (await client.getSecret("SECRET_SCANNING_WEBHOOK_SECRET")).secretValue;
|
||||
export const getSecretScanningGitAppId = async () => (await client.getSecret("SECRET_SCANNING_GIT_APP_ID")).secretValue;
|
||||
export const getSecretScanningPrivateKey = async () => (await client.getSecret("SECRET_SCANNING_PRIVATE_KEY")).secretValue;
|
||||
|
||||
export const getLicenseKey = async () => {
|
||||
const secretValue = (await client.getSecret("LICENSE_KEY")).secretValue;
|
||||
return secretValue === "" ? undefined : secretValue;
|
||||
|
@ -13,21 +13,27 @@ import * as signupController from "./signupController";
|
||||
import * as userActionController from "./userActionController";
|
||||
import * as userController from "./userController";
|
||||
import * as workspaceController from "./workspaceController";
|
||||
import * as secretScanningController from "./secretScanningController";
|
||||
import * as webhookController from "./webhookController";
|
||||
import * as secretImportController from "./secretImportController";
|
||||
|
||||
export {
|
||||
authController,
|
||||
botController,
|
||||
integrationAuthController,
|
||||
integrationController,
|
||||
keyController,
|
||||
membershipController,
|
||||
membershipOrgController,
|
||||
organizationController,
|
||||
passwordController,
|
||||
secretController,
|
||||
serviceTokenController,
|
||||
signupController,
|
||||
userActionController,
|
||||
userController,
|
||||
workspaceController,
|
||||
authController,
|
||||
botController,
|
||||
integrationAuthController,
|
||||
integrationController,
|
||||
keyController,
|
||||
membershipController,
|
||||
membershipOrgController,
|
||||
organizationController,
|
||||
passwordController,
|
||||
secretController,
|
||||
serviceTokenController,
|
||||
signupController,
|
||||
userActionController,
|
||||
userController,
|
||||
workspaceController,
|
||||
secretScanningController,
|
||||
webhookController,
|
||||
secretImportController
|
||||
};
|
||||
|
@ -7,6 +7,7 @@ import { IntegrationService } from "../../services";
|
||||
import {
|
||||
ALGORITHM_AES_256_GCM,
|
||||
ENCODING_SCHEME_UTF8,
|
||||
INTEGRATION_BITBUCKET_API_URL,
|
||||
INTEGRATION_RAILWAY_API_URL,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
@ -141,12 +142,14 @@ export const saveIntegrationAccessToken = async (req: Request, res: Response) =>
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
const teamId = req.query.teamId as string;
|
||||
const workspaceSlug = req.query.workspaceSlug as string;
|
||||
|
||||
const apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
accessId: req.accessId,
|
||||
...(teamId && { teamId })
|
||||
...(teamId && { teamId }),
|
||||
...(workspaceSlug && { workspaceSlug })
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
@ -382,6 +385,66 @@ export const getIntegrationAuthRailwayServices = async (req: Request, res: Respo
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of workspaces allowed for Bitbucket integration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthBitBucketWorkspaces = async (req: Request, res: Response) => {
|
||||
|
||||
interface WorkspaceResponse {
|
||||
size: number;
|
||||
page: number;
|
||||
pageLen: number;
|
||||
next: string;
|
||||
previous: string;
|
||||
values: Array<Workspace>;
|
||||
}
|
||||
|
||||
interface Workspace {
|
||||
type: string;
|
||||
uuid: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
is_private: boolean;
|
||||
created_on: string;
|
||||
updated_on: string;
|
||||
}
|
||||
|
||||
const workspaces: Workspace[] = [];
|
||||
let hasNextPage = true;
|
||||
let workspaceUrl = `${INTEGRATION_BITBUCKET_API_URL}/2.0/workspaces`
|
||||
|
||||
while (hasNextPage) {
|
||||
const { data }: { data: WorkspaceResponse } = await standardRequest.get(
|
||||
workspaceUrl,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${req.accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (data?.values.length > 0) {
|
||||
data.values.forEach((workspace) => {
|
||||
workspaces.push(workspace)
|
||||
})
|
||||
}
|
||||
|
||||
if (data.next) {
|
||||
workspaceUrl = data.next
|
||||
} else {
|
||||
hasNextPage = false
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspaces
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete integration authorization with id [integrationAuthId]
|
||||
* @param req
|
||||
|
@ -2,7 +2,7 @@ import { Request, Response } from "express";
|
||||
import { Types } from "mongoose";
|
||||
import { Integration } from "../../models";
|
||||
import { EventService } from "../../services";
|
||||
import { eventPushSecrets } from "../../events";
|
||||
import { eventStartIntegration } from "../../events";
|
||||
import Folder from "../../models/folder";
|
||||
import { getFolderByPath } from "../../services/FolderService";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
@ -27,19 +27,19 @@ export const createIntegration = async (req: Request, res: Response) => {
|
||||
owner,
|
||||
path,
|
||||
region,
|
||||
secretPath,
|
||||
secretPath
|
||||
} = req.body;
|
||||
|
||||
const folders = await Folder.findOne({
|
||||
workspace: req.integrationAuth.workspace._id,
|
||||
environment: sourceEnvironment,
|
||||
environment: sourceEnvironment
|
||||
});
|
||||
|
||||
if (folders) {
|
||||
const folder = getFolderByPath(folders.nodes, secretPath);
|
||||
if (!folder) {
|
||||
throw BadRequestError({
|
||||
message: "Path for service token does not exist",
|
||||
message: "Path for service token does not exist"
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -62,21 +62,21 @@ export const createIntegration = async (req: Request, res: Response) => {
|
||||
region,
|
||||
secretPath,
|
||||
integration: req.integrationAuth.integration,
|
||||
integrationAuth: new Types.ObjectId(integrationAuthId),
|
||||
integrationAuth: new Types.ObjectId(integrationAuthId)
|
||||
}).save();
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
event: eventStartIntegration({
|
||||
workspaceId: integration.workspace,
|
||||
environment: sourceEnvironment,
|
||||
}),
|
||||
environment: sourceEnvironment
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
integration
|
||||
});
|
||||
};
|
||||
|
||||
@ -97,26 +97,26 @@ export const updateIntegration = async (req: Request, res: Response) => {
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner, // github-specific integration param
|
||||
secretPath,
|
||||
secretPath
|
||||
} = req.body;
|
||||
|
||||
const folders = await Folder.findOne({
|
||||
workspace: req.integration.workspace,
|
||||
environment,
|
||||
environment
|
||||
});
|
||||
|
||||
if (folders) {
|
||||
const folder = getFolderByPath(folders.nodes, secretPath);
|
||||
if (!folder) {
|
||||
throw BadRequestError({
|
||||
message: "Path for service token does not exist",
|
||||
message: "Path for service token does not exist"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const integration = await Integration.findOneAndUpdate(
|
||||
{
|
||||
_id: req.integration._id,
|
||||
_id: req.integration._id
|
||||
},
|
||||
{
|
||||
environment,
|
||||
@ -125,25 +125,25 @@ export const updateIntegration = async (req: Request, res: Response) => {
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner,
|
||||
secretPath,
|
||||
secretPath
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
if (integration) {
|
||||
// trigger event - push secrets
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
event: eventStartIntegration({
|
||||
workspaceId: integration.workspace,
|
||||
environment,
|
||||
}),
|
||||
environment
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
integration
|
||||
});
|
||||
};
|
||||
|
||||
@ -158,12 +158,12 @@ export const deleteIntegration = async (req: Request, res: Response) => {
|
||||
const { integrationId } = req.params;
|
||||
|
||||
const integration = await Integration.findOneAndDelete({
|
||||
_id: integrationId,
|
||||
_id: integrationId
|
||||
});
|
||||
|
||||
if (!integration) throw new Error("Failed to find integration");
|
||||
|
||||
return res.status(200).send({
|
||||
integration,
|
||||
integration
|
||||
});
|
||||
};
|
||||
|
@ -9,7 +9,7 @@ import {
|
||||
import { createOrganization as create } from "../../helpers/organization";
|
||||
import { addMembershipsOrg } from "../../helpers/membershipOrg";
|
||||
import { ACCEPTED, OWNER } from "../../variables";
|
||||
import { getSiteURL, getLicenseServerUrl } from "../../config";
|
||||
import { getLicenseServerUrl, getSiteURL } from "../../config";
|
||||
import { licenseServerKeyRequest } from "../../config/request";
|
||||
|
||||
export const getOrganizations = async (req: Request, res: Response) => {
|
||||
|
@ -80,7 +80,8 @@ export const pushSecrets = async (req: Request, res: Response) => {
|
||||
EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment
|
||||
environment,
|
||||
secretPath: "/"
|
||||
})
|
||||
});
|
||||
|
||||
|
117
backend/src/controllers/v1/secretImportController.ts
Normal file
117
backend/src/controllers/v1/secretImportController.ts
Normal file
@ -0,0 +1,117 @@
|
||||
import { Request, Response } from "express";
|
||||
import { validateMembership } from "../../helpers";
|
||||
import SecretImport from "../../models/secretImports";
|
||||
import { getAllImportedSecrets } from "../../services/SecretImportService";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
import { ADMIN, MEMBER } from "../../variables";
|
||||
|
||||
export const createSecretImport = async (req: Request, res: Response) => {
|
||||
const { workspaceId, environment, folderId, secretImport } = req.body;
|
||||
const importSecDoc = await SecretImport.findOne({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
folderId
|
||||
});
|
||||
|
||||
if (!importSecDoc) {
|
||||
const doc = new SecretImport({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
folderId,
|
||||
imports: [{ environment: secretImport.environment, secretPath: secretImport.secretPath }]
|
||||
});
|
||||
await doc.save();
|
||||
return res.status(200).json({ message: "successfully created secret import" });
|
||||
}
|
||||
|
||||
const doesImportExist = importSecDoc.imports.find(
|
||||
(el) => el.environment === secretImport.environment && el.secretPath === secretImport.secretPath
|
||||
);
|
||||
if (doesImportExist) {
|
||||
throw BadRequestError({ message: "Secret import already exist" });
|
||||
}
|
||||
|
||||
importSecDoc.imports.push({
|
||||
environment: secretImport.environment,
|
||||
secretPath: secretImport.secretPath
|
||||
});
|
||||
await importSecDoc.save();
|
||||
return res.status(200).json({ message: "successfully created secret import" });
|
||||
};
|
||||
|
||||
// to keep the ordering, you must pass all the imports in here not the only updated one
|
||||
// this is because the order decide which import gets overriden
|
||||
export const updateSecretImport = async (req: Request, res: Response) => {
|
||||
const { id } = req.params;
|
||||
const { secretImports } = req.body;
|
||||
const importSecDoc = await SecretImport.findById(id);
|
||||
if (!importSecDoc) {
|
||||
throw BadRequestError({ message: "Import not found" });
|
||||
}
|
||||
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: importSecDoc.workspace,
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
});
|
||||
|
||||
importSecDoc.imports = secretImports;
|
||||
await importSecDoc.save();
|
||||
return res.status(200).json({ message: "successfully updated secret import" });
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (req: Request, res: Response) => {
|
||||
const { id } = req.params;
|
||||
const { secretImportEnv, secretImportPath } = req.body;
|
||||
const importSecDoc = await SecretImport.findById(id);
|
||||
if (!importSecDoc) {
|
||||
throw BadRequestError({ message: "Import not found" });
|
||||
}
|
||||
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: importSecDoc.workspace,
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
});
|
||||
importSecDoc.imports = importSecDoc.imports.filter(
|
||||
({ environment, secretPath }) =>
|
||||
!(environment === secretImportEnv && secretPath === secretImportPath)
|
||||
);
|
||||
await importSecDoc.save();
|
||||
return res.status(200).json({ message: "successfully delete secret import" });
|
||||
};
|
||||
|
||||
export const getSecretImports = async (req: Request, res: Response) => {
|
||||
const { workspaceId, environment, folderId } = req.query;
|
||||
const importSecDoc = await SecretImport.findOne({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
folderId
|
||||
});
|
||||
|
||||
if (!importSecDoc) {
|
||||
return res.status(200).json({ secretImport: {} });
|
||||
}
|
||||
|
||||
return res.status(200).json({ secretImport: importSecDoc });
|
||||
};
|
||||
|
||||
export const getAllSecretsFromImport = async (req: Request, res: Response) => {
|
||||
const { workspaceId, environment, folderId } = req.query as {
|
||||
workspaceId: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
};
|
||||
const importSecDoc = await SecretImport.findOne({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
folderId
|
||||
});
|
||||
|
||||
if (!importSecDoc) {
|
||||
return res.status(200).json({ secrets: {} });
|
||||
}
|
||||
|
||||
const secrets = await getAllImportedSecrets(workspaceId, environment, folderId);
|
||||
return res.status(200).json({ secrets });
|
||||
};
|
91
backend/src/controllers/v1/secretScanningController.ts
Normal file
91
backend/src/controllers/v1/secretScanningController.ts
Normal file
@ -0,0 +1,91 @@
|
||||
import { Request, Response } from "express";
|
||||
import GitAppInstallationSession from "../../models/gitAppInstallationSession";
|
||||
import crypto from "crypto";
|
||||
import { Types } from "mongoose";
|
||||
import { UnauthorizedRequestError } from "../../utils/errors";
|
||||
import GitAppOrganizationInstallation from "../../models/gitAppOrganizationInstallation";
|
||||
import { MembershipOrg } from "../../models";
|
||||
import GitRisks, { STATUS_RESOLVED_FALSE_POSITIVE, STATUS_RESOLVED_NOT_REVOKED, STATUS_RESOLVED_REVOKED } from "../../models/gitRisks";
|
||||
|
||||
export const createInstallationSession = async (req: Request, res: Response) => {
|
||||
const sessionId = crypto.randomBytes(16).toString("hex");
|
||||
await GitAppInstallationSession.findByIdAndUpdate(
|
||||
req.organization,
|
||||
{
|
||||
organization: new Types.ObjectId(req.organization),
|
||||
sessionId: sessionId,
|
||||
user: new Types.ObjectId(req.user._id)
|
||||
},
|
||||
{ upsert: true }
|
||||
).lean();
|
||||
|
||||
res.send({
|
||||
sessionId: sessionId
|
||||
})
|
||||
}
|
||||
|
||||
export const linkInstallationToOrganization = async (req: Request, res: Response) => {
|
||||
const { installationId, sessionId } = req.body
|
||||
|
||||
const installationSession = await GitAppInstallationSession.findOneAndDelete({ sessionId: sessionId })
|
||||
if (!installationSession) {
|
||||
throw UnauthorizedRequestError()
|
||||
}
|
||||
|
||||
const userMembership = await MembershipOrg.find({ user: req.user._id, organization: installationSession.organization })
|
||||
if (!userMembership) {
|
||||
throw UnauthorizedRequestError()
|
||||
}
|
||||
|
||||
const installationLink = await GitAppOrganizationInstallation.findOneAndUpdate({
|
||||
organizationId: installationSession.organization,
|
||||
}, {
|
||||
installationId: installationId,
|
||||
organizationId: installationSession.organization,
|
||||
user: installationSession.user
|
||||
}, {
|
||||
upsert: true
|
||||
}).lean()
|
||||
|
||||
res.json(installationLink)
|
||||
}
|
||||
|
||||
export const getCurrentOrganizationInstallationStatus = async (req: Request, res: Response) => {
|
||||
const { organizationId } = req.params
|
||||
try {
|
||||
const appInstallation = await GitAppOrganizationInstallation.findOne({ organizationId: organizationId }).lean()
|
||||
if (!appInstallation) {
|
||||
res.json({
|
||||
appInstallationComplete: false
|
||||
})
|
||||
}
|
||||
|
||||
res.json({
|
||||
appInstallationComplete: true
|
||||
})
|
||||
} catch {
|
||||
res.json({
|
||||
appInstallationComplete: false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const getRisksForOrganization = async (req: Request, res: Response) => {
|
||||
const { organizationId } = req.params
|
||||
const risks = await GitRisks.find({ organization: organizationId }).sort({ createdAt: -1 }).lean()
|
||||
res.json({
|
||||
risks: risks
|
||||
})
|
||||
}
|
||||
|
||||
export const updateRisksStatus = async (req: Request, res: Response) => {
|
||||
const { riskId } = req.params
|
||||
const { status } = req.body
|
||||
const isRiskResolved = status == STATUS_RESOLVED_FALSE_POSITIVE || status == STATUS_RESOLVED_REVOKED || status == STATUS_RESOLVED_NOT_REVOKED ? true : false
|
||||
const risk = await GitRisks.findByIdAndUpdate(riskId, {
|
||||
status: status,
|
||||
isResolved: isRiskResolved
|
||||
}).lean()
|
||||
|
||||
res.json(risk)
|
||||
}
|
140
backend/src/controllers/v1/webhookController.ts
Normal file
140
backend/src/controllers/v1/webhookController.ts
Normal file
@ -0,0 +1,140 @@
|
||||
import { Request, Response } from "express";
|
||||
import { Types } from "mongoose";
|
||||
import { client, getRootEncryptionKey } from "../../config";
|
||||
import { validateMembership } from "../../helpers";
|
||||
import Webhook from "../../models/webhooks";
|
||||
import { getWebhookPayload, triggerWebhookRequest } from "../../services/WebhookService";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
import { ADMIN, ALGORITHM_AES_256_GCM, ENCODING_SCHEME_BASE64, MEMBER } from "../../variables";
|
||||
|
||||
export const createWebhook = async (req: Request, res: Response) => {
|
||||
const { webhookUrl, webhookSecretKey, environment, workspaceId, secretPath } = req.body;
|
||||
const webhook = new Webhook({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
secretPath,
|
||||
url: webhookUrl,
|
||||
algorithm: ALGORITHM_AES_256_GCM,
|
||||
keyEncoding: ENCODING_SCHEME_BASE64
|
||||
});
|
||||
|
||||
if (webhookSecretKey) {
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
const { ciphertext, iv, tag } = client.encryptSymmetric(webhookSecretKey, rootEncryptionKey);
|
||||
webhook.iv = iv;
|
||||
webhook.tag = tag;
|
||||
webhook.encryptedSecretKey = ciphertext;
|
||||
}
|
||||
|
||||
await webhook.save();
|
||||
|
||||
return res.status(200).send({
|
||||
webhook,
|
||||
message: "successfully created webhook"
|
||||
});
|
||||
};
|
||||
|
||||
export const updateWebhook = async (req: Request, res: Response) => {
|
||||
const { webhookId } = req.params;
|
||||
const { isDisabled } = req.body;
|
||||
const webhook = await Webhook.findById(webhookId);
|
||||
if (!webhook) {
|
||||
throw BadRequestError({ message: "Webhook not found!!" });
|
||||
}
|
||||
|
||||
// check that user is a member of the workspace
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: webhook.workspace,
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
});
|
||||
|
||||
if (typeof isDisabled !== undefined) {
|
||||
webhook.isDisabled = isDisabled;
|
||||
}
|
||||
await webhook.save();
|
||||
|
||||
return res.status(200).send({
|
||||
webhook,
|
||||
message: "successfully updated webhook"
|
||||
});
|
||||
};
|
||||
|
||||
export const deleteWebhook = async (req: Request, res: Response) => {
|
||||
const { webhookId } = req.params;
|
||||
const webhook = await Webhook.findById(webhookId);
|
||||
if (!webhook) {
|
||||
throw BadRequestError({ message: "Webhook not found!!" });
|
||||
}
|
||||
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: webhook.workspace,
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
});
|
||||
await webhook.remove();
|
||||
|
||||
return res.status(200).send({
|
||||
message: "successfully removed webhook"
|
||||
});
|
||||
};
|
||||
|
||||
export const testWebhook = async (req: Request, res: Response) => {
|
||||
const { webhookId } = req.params;
|
||||
const webhook = await Webhook.findById(webhookId);
|
||||
if (!webhook) {
|
||||
throw BadRequestError({ message: "Webhook not found!!" });
|
||||
}
|
||||
|
||||
await validateMembership({
|
||||
userId: req.user._id.toString(),
|
||||
workspaceId: webhook.workspace,
|
||||
acceptedRoles: [ADMIN, MEMBER]
|
||||
});
|
||||
|
||||
try {
|
||||
await triggerWebhookRequest(
|
||||
webhook,
|
||||
getWebhookPayload(
|
||||
"test",
|
||||
webhook.workspace.toString(),
|
||||
webhook.environment,
|
||||
webhook.secretPath
|
||||
)
|
||||
);
|
||||
await Webhook.findByIdAndUpdate(webhookId, {
|
||||
lastStatus: "success",
|
||||
lastRunErrorMessage: null
|
||||
});
|
||||
} catch (err) {
|
||||
await Webhook.findByIdAndUpdate(webhookId, {
|
||||
lastStatus: "failed",
|
||||
lastRunErrorMessage: (err as Error).message
|
||||
});
|
||||
return res.status(400).send({
|
||||
message: "Failed to receive response",
|
||||
error: (err as Error).message
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
message: "Successfully received response"
|
||||
});
|
||||
};
|
||||
|
||||
export const listWebhooks = async (req: Request, res: Response) => {
|
||||
const { environment, workspaceId, secretPath } = req.query;
|
||||
|
||||
const optionalFilters: Record<string, string> = {};
|
||||
if (environment) optionalFilters.environment = environment as string;
|
||||
if (secretPath) optionalFilters.secretPath = secretPath as string;
|
||||
|
||||
const webhooks = await Webhook.find({
|
||||
workspace: new Types.ObjectId(workspaceId as string),
|
||||
...optionalFilters
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
webhooks
|
||||
});
|
||||
};
|
@ -27,16 +27,16 @@ export const createWorkspaceEnvironment = async (
|
||||
const { workspaceId } = req.params;
|
||||
const { environmentName, environmentSlug } = req.body;
|
||||
const workspace = await Workspace.findById(workspaceId).exec();
|
||||
|
||||
|
||||
if (!workspace) throw WorkspaceNotFoundError();
|
||||
|
||||
|
||||
const plan = await EELicenseService.getPlan(workspace.organization.toString());
|
||||
|
||||
|
||||
if (plan.environmentLimit !== null) {
|
||||
// case: limit imposed on number of environments allowed
|
||||
if (workspace.environments.length >= plan.environmentLimit) {
|
||||
// case: number of environments used exceeds the number of environments allowed
|
||||
|
||||
|
||||
return res.status(400).send({
|
||||
message: "Failed to create environment due to environment limit reached. Upgrade plan to create more environments.",
|
||||
});
|
||||
@ -191,14 +191,21 @@ export const deleteWorkspaceEnvironment = async (
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
});
|
||||
await ServiceToken.deleteMany({
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
});
|
||||
await ServiceTokenData.deleteMany({
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
});
|
||||
|
||||
// await ServiceToken.deleteMany({
|
||||
// workspace: workspaceId,
|
||||
// environment: environmentSlug,
|
||||
// });
|
||||
|
||||
const result = await ServiceTokenData.updateMany(
|
||||
{ workspace: workspaceId },
|
||||
{ $pull: { scopes: { environment: environmentSlug } } }
|
||||
);
|
||||
|
||||
if (result.modifiedCount > 0) {
|
||||
await ServiceTokenData.deleteMany({ workspace: workspaceId, scopes: { $size: 0 } });
|
||||
}
|
||||
|
||||
await Integration.deleteMany({
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
|
@ -30,9 +30,12 @@ import Folder from "../../models/folder";
|
||||
import {
|
||||
getFolderByPath,
|
||||
getFolderIdFromServiceToken,
|
||||
searchByFolderId
|
||||
searchByFolderId,
|
||||
searchByFolderIdWithDir
|
||||
} from "../../services/FolderService";
|
||||
import { isValidScope } from "../../helpers/secrets";
|
||||
import path from "path";
|
||||
import { getAllImportedSecrets } from "../../services/SecretImportService";
|
||||
|
||||
/**
|
||||
* Peform a batch of any specified CUD secret operations
|
||||
@ -47,14 +50,13 @@ export const batchSecrets = async (req: Request, res: Response) => {
|
||||
const {
|
||||
workspaceId,
|
||||
environment,
|
||||
requests,
|
||||
secretPath
|
||||
requests
|
||||
}: {
|
||||
workspaceId: string;
|
||||
environment: string;
|
||||
requests: BatchSecretRequest[];
|
||||
secretPath: string;
|
||||
} = req.body;
|
||||
let secretPath = req.body.secretPath as string;
|
||||
let folderId = req.body.folderId as string;
|
||||
|
||||
const createSecrets: BatchSecret[] = [];
|
||||
@ -68,10 +70,6 @@ export const batchSecrets = async (req: Request, res: Response) => {
|
||||
});
|
||||
|
||||
const folders = await Folder.findOne({ workspace: workspaceId, environment });
|
||||
if (folders && folderId !== "root") {
|
||||
const folder = searchByFolderId(folders.nodes, folderId as string);
|
||||
if (!folder) throw BadRequestError({ message: "Folder not found" });
|
||||
}
|
||||
|
||||
if (req.authData.authPayload instanceof ServiceTokenData) {
|
||||
const isValidScopeAccess = isValidScope(req.authData.authPayload, environment, secretPath);
|
||||
@ -87,6 +85,15 @@ export const batchSecrets = async (req: Request, res: Response) => {
|
||||
folderId = await getFolderIdFromServiceToken(workspaceId, environment, secretPath);
|
||||
}
|
||||
|
||||
if (folders && folderId !== "root") {
|
||||
const folder = searchByFolderIdWithDir(folders.nodes, folderId as string);
|
||||
if (!folder?.folder) throw BadRequestError({ message: "Folder not found" });
|
||||
secretPath = path.join(
|
||||
"/",
|
||||
...folder.dir.map(({ name }) => name).filter((name) => name !== "root")
|
||||
);
|
||||
}
|
||||
|
||||
for await (const request of requests) {
|
||||
// do a validation
|
||||
|
||||
@ -319,7 +326,10 @@ export const batchSecrets = async (req: Request, res: Response) => {
|
||||
// // trigger event - push secrets
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
// root condition else this will be filled according to the path or folderid
|
||||
secretPath: secretPath || "/"
|
||||
})
|
||||
});
|
||||
|
||||
@ -535,7 +545,9 @@ export const createSecrets = async (req: Request, res: Response) => {
|
||||
// trigger event - push secrets
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
secretPath: secretPath || "/"
|
||||
})
|
||||
});
|
||||
}, 5000);
|
||||
@ -679,7 +691,7 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
*/
|
||||
|
||||
const { tagSlugs, secretPath } = req.query;
|
||||
const { tagSlugs, secretPath, include_imports } = req.query;
|
||||
let { folderId } = req.query;
|
||||
const workspaceId = req.query.workspaceId as string;
|
||||
const environment = req.query.environment as string;
|
||||
@ -816,6 +828,12 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
secrets = await Secret.find(secretQuery).populate("tags");
|
||||
}
|
||||
|
||||
// TODO(akhilmhdh) - secret-imp change this to org type
|
||||
let importedSecrets: any[] = [];
|
||||
if (include_imports) {
|
||||
importedSecrets = await getAllImportedSecrets(workspaceId, environment, folderId as string);
|
||||
}
|
||||
|
||||
const channel = getChannelFromUserAgent(req.headers["user-agent"]);
|
||||
|
||||
const readAction = await EELogService.createAction({
|
||||
@ -857,7 +875,8 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets
|
||||
secrets,
|
||||
...(include_imports && { imports: importedSecrets })
|
||||
});
|
||||
};
|
||||
|
||||
@ -1033,13 +1052,16 @@ export const updateSecrets = async (req: Request, res: Response) => {
|
||||
|
||||
Object.keys(workspaceSecretObj).forEach(async (key) => {
|
||||
// trigger event - push secrets
|
||||
setTimeout(async () => {
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(key)
|
||||
})
|
||||
});
|
||||
}, 10000);
|
||||
// This route is not used anymore thus keep it commented out as it does not expose environment
|
||||
// it will end up creating a lot of requests from the server
|
||||
// setTimeout(async () => {
|
||||
// await EventService.handleEvent({
|
||||
// event: eventPushSecrets({
|
||||
// workspaceId: new Types.ObjectId(key),
|
||||
// environment,
|
||||
// })
|
||||
// });
|
||||
// }, 10000);
|
||||
|
||||
const updateAction = await EELogService.createAction({
|
||||
name: ACTION_UPDATE_SECRETS,
|
||||
@ -1174,11 +1196,13 @@ export const deleteSecrets = async (req: Request, res: Response) => {
|
||||
|
||||
Object.keys(workspaceSecretObj).forEach(async (key) => {
|
||||
// trigger event - push secrets
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(key)
|
||||
})
|
||||
});
|
||||
// DEPRECIATED(akhilmhdh): as this would cause server to send so many request
|
||||
// and this route is not used anymore thus like snapshot keeping it commented out
|
||||
// await EventService.handleEvent({
|
||||
// event: eventPushSecrets({
|
||||
// workspaceId: new Types.ObjectId(key)
|
||||
// })
|
||||
// });
|
||||
const deleteAction = await EELogService.createAction({
|
||||
name: ACTION_DELETE_SECRETS,
|
||||
userId: req.user?._id,
|
||||
|
@ -18,7 +18,7 @@ import { updateSubscriptionOrgQuantity } from "../../helpers/organization";
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
let user;
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
@ -119,7 +119,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
userAgent: req.headers["user-agent"] ?? "",
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
const token = tokens.token;
|
||||
|
||||
// sending a welcome email to new users
|
||||
if (await getLoopsApiKey()) {
|
||||
@ -159,7 +159,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
let user;
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
@ -244,7 +244,7 @@ export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
userAgent: req.headers["user-agent"] ?? "",
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
const token = tokens.token;
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie("jid", tokens.refreshToken, {
|
||||
|
@ -3,10 +3,10 @@ import { Types } from "mongoose";
|
||||
import crypto from "crypto";
|
||||
import bcrypt from "bcrypt";
|
||||
import {
|
||||
MembershipOrg,
|
||||
User,
|
||||
APIKeyData,
|
||||
TokenVersion
|
||||
MembershipOrg,
|
||||
TokenVersion,
|
||||
User
|
||||
} from "../../models";
|
||||
import { getSaltRounds } from "../../config";
|
||||
|
||||
@ -80,6 +80,63 @@ export const updateMyMfaEnabled = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update name of the current user to [firstName, lastName].
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateName = async (req: Request, res: Response) => {
|
||||
const {
|
||||
firstName,
|
||||
lastName
|
||||
}: {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
} = req.body;
|
||||
|
||||
const user = await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
firstName,
|
||||
lastName: lastName ?? ""
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
return res.status(200).send({
|
||||
user,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update auth provider of the current user to [authProvider]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateAuthProvider = async (req: Request, res: Response) => {
|
||||
const {
|
||||
authProvider
|
||||
} = req.body;
|
||||
|
||||
const user = await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
authProvider
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
return res.status(200).send({
|
||||
user
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return organizations that the current user is part of.
|
||||
* @param req
|
||||
|
@ -1,34 +1,29 @@
|
||||
import { Request, Response } from "express";
|
||||
import { Types } from "mongoose";
|
||||
import { Key, Membership, ServiceTokenData, Workspace } from "../../models";
|
||||
import {
|
||||
Key,
|
||||
Membership,
|
||||
ServiceTokenData,
|
||||
Workspace,
|
||||
} from "../../models";
|
||||
import {
|
||||
pullSecrets as pull,
|
||||
v2PushSecrets as push,
|
||||
reformatPullSecrets,
|
||||
pullSecrets as pull,
|
||||
v2PushSecrets as push,
|
||||
reformatPullSecrets
|
||||
} from "../../helpers/secret";
|
||||
import { pushKeys } from "../../helpers/key";
|
||||
import { EventService, TelemetryService } from "../../services";
|
||||
import { eventPushSecrets } from "../../events";
|
||||
|
||||
interface V2PushSecret {
|
||||
type: string; // personal or shared
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretKeyHash: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretValueHash: string;
|
||||
secretCommentCiphertext?: string;
|
||||
secretCommentIV?: string;
|
||||
secretCommentTag?: string;
|
||||
secretCommentHash?: string;
|
||||
type: string; // personal or shared
|
||||
secretKeyCiphertext: string;
|
||||
secretKeyIV: string;
|
||||
secretKeyTag: string;
|
||||
secretKeyHash: string;
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
secretValueHash: string;
|
||||
secretCommentCiphertext?: string;
|
||||
secretCommentIV?: string;
|
||||
secretCommentTag?: string;
|
||||
secretCommentHash?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -39,7 +34,7 @@ interface V2PushSecret {
|
||||
* @returns
|
||||
*/
|
||||
export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
// upload (encrypted) secrets to workspace with id [workspaceId]
|
||||
// upload (encrypted) secrets to workspace with id [workspaceId]
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
let { secrets }: { secrets: V2PushSecret[] } = req.body;
|
||||
const { keys, environment, channel } = req.body;
|
||||
@ -62,13 +57,13 @@ export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
environment,
|
||||
secrets,
|
||||
channel: channel ? channel : "cli",
|
||||
ipAddress: req.realIP,
|
||||
ipAddress: req.realIP
|
||||
});
|
||||
|
||||
await pushKeys({
|
||||
userId: req.user._id,
|
||||
workspaceId,
|
||||
keys,
|
||||
keys
|
||||
});
|
||||
|
||||
if (postHogClient) {
|
||||
@ -79,8 +74,8 @@ export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
numberOfSecrets: secrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel: channel ? channel : "cli",
|
||||
},
|
||||
channel: channel ? channel : "cli"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -89,12 +84,13 @@ export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
}),
|
||||
secretPath: "/"
|
||||
})
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
message: "Successfully uploaded workspace secrets",
|
||||
});
|
||||
return res.status(200).send({
|
||||
message: "Successfully uploaded workspace secrets"
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -105,7 +101,7 @@ export const pushWorkspaceSecrets = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const pullSecrets = async (req: Request, res: Response) => {
|
||||
let secrets;
|
||||
let secrets;
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
const environment: string = req.query.environment as string;
|
||||
const channel: string = req.query.channel as string;
|
||||
@ -128,7 +124,7 @@ export const pullSecrets = async (req: Request, res: Response) => {
|
||||
workspaceId,
|
||||
environment,
|
||||
channel: channel ? channel : "cli",
|
||||
ipAddress: req.realIP,
|
||||
ipAddress: req.realIP
|
||||
});
|
||||
|
||||
if (channel !== "cli") {
|
||||
@ -144,18 +140,18 @@ export const pullSecrets = async (req: Request, res: Response) => {
|
||||
numberOfSecrets: secrets.length,
|
||||
environment,
|
||||
workspaceId,
|
||||
channel: channel ? channel : "cli",
|
||||
},
|
||||
channel: channel ? channel : "cli"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets,
|
||||
});
|
||||
return res.status(200).send({
|
||||
secrets
|
||||
});
|
||||
};
|
||||
|
||||
export const getWorkspaceKey = async (req: Request, res: Response) => {
|
||||
/*
|
||||
/*
|
||||
#swagger.summary = 'Return encrypted project key'
|
||||
#swagger.description = 'Return encrypted project key'
|
||||
|
||||
@ -183,43 +179,37 @@ export const getWorkspaceKey = async (req: Request, res: Response) => {
|
||||
}
|
||||
}
|
||||
*/
|
||||
let key;
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
key = await Key.findOne({
|
||||
const key = await Key.findOne({
|
||||
workspace: workspaceId,
|
||||
receiver: req.user._id,
|
||||
receiver: req.user._id
|
||||
}).populate("sender", "+publicKey");
|
||||
|
||||
if (!key) throw new Error("Failed to find workspace key");
|
||||
|
||||
return res.status(200).json(key);
|
||||
}
|
||||
export const getWorkspaceServiceTokenData = async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
return res.status(200).json(key);
|
||||
};
|
||||
export const getWorkspaceServiceTokenData = async (req: Request, res: Response) => {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
const serviceTokenData = await ServiceTokenData
|
||||
.find({
|
||||
workspace: workspaceId,
|
||||
})
|
||||
.select("+encryptedKey +iv +tag");
|
||||
const serviceTokenData = await ServiceTokenData.find({
|
||||
workspace: workspaceId
|
||||
}).select("+encryptedKey +iv +tag");
|
||||
|
||||
return res.status(200).send({
|
||||
serviceTokenData,
|
||||
});
|
||||
}
|
||||
return res.status(200).send({
|
||||
serviceTokenData
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return memberships for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
/*
|
||||
/*
|
||||
#swagger.summary = 'Return project memberships'
|
||||
#swagger.description = 'Return project memberships'
|
||||
|
||||
@ -255,22 +245,22 @@ export const getWorkspaceMemberships = async (req: Request, res: Response) => {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
const memberships = await Membership.find({
|
||||
workspace: workspaceId,
|
||||
workspace: workspaceId
|
||||
}).populate("user", "+publicKey");
|
||||
|
||||
return res.status(200).send({
|
||||
memberships,
|
||||
});
|
||||
}
|
||||
return res.status(200).send({
|
||||
memberships
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Update role of membership with id [membershipId] to role [role]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateWorkspaceMembership = async (req: Request, res: Response) => {
|
||||
/*
|
||||
/*
|
||||
#swagger.summary = 'Update project membership'
|
||||
#swagger.description = 'Update project membership'
|
||||
|
||||
@ -323,33 +313,32 @@ export const updateWorkspaceMembership = async (req: Request, res: Response) =>
|
||||
}
|
||||
}
|
||||
*/
|
||||
const {
|
||||
membershipId,
|
||||
} = req.params;
|
||||
const { membershipId } = req.params;
|
||||
const { role } = req.body;
|
||||
|
||||
|
||||
const membership = await Membership.findByIdAndUpdate(
|
||||
membershipId,
|
||||
{
|
||||
role,
|
||||
}, {
|
||||
new: true,
|
||||
role
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
return res.status(200).send({
|
||||
membership,
|
||||
});
|
||||
}
|
||||
return res.status(200).send({
|
||||
membership
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete workspace membership with id [membershipId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteWorkspaceMembership = async (req: Request, res: Response) => {
|
||||
/*
|
||||
/*
|
||||
#swagger.summary = 'Delete project membership'
|
||||
#swagger.description = 'Delete project membership'
|
||||
|
||||
@ -385,23 +374,21 @@ export const deleteWorkspaceMembership = async (req: Request, res: Response) =>
|
||||
}
|
||||
}
|
||||
*/
|
||||
const {
|
||||
membershipId,
|
||||
} = req.params;
|
||||
|
||||
const { membershipId } = req.params;
|
||||
|
||||
const membership = await Membership.findByIdAndDelete(membershipId);
|
||||
|
||||
|
||||
if (!membership) throw new Error("Failed to delete workspace membership");
|
||||
|
||||
|
||||
await Key.deleteMany({
|
||||
receiver: membership.user,
|
||||
workspace: membership.workspace,
|
||||
workspace: membership.workspace
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
membership,
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
membership
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Change autoCapitilzation Rule of workspace
|
||||
@ -415,18 +402,18 @@ export const toggleAutoCapitalization = async (req: Request, res: Response) => {
|
||||
|
||||
const workspace = await Workspace.findOneAndUpdate(
|
||||
{
|
||||
_id: workspaceId,
|
||||
_id: workspaceId
|
||||
},
|
||||
{
|
||||
autoCapitalization,
|
||||
autoCapitalization
|
||||
},
|
||||
{
|
||||
new: true,
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
return res.status(200).send({
|
||||
message: "Successfully changed autoCapitalization setting",
|
||||
workspace,
|
||||
});
|
||||
return res.status(200).send({
|
||||
message: "Successfully changed autoCapitalization setting",
|
||||
workspace
|
||||
});
|
||||
};
|
||||
|
@ -56,7 +56,7 @@ export const login1 = async (req: Request, res: Response) => {
|
||||
|
||||
if (!user) throw new Error("Failed to find user");
|
||||
|
||||
if (user.authProvider) {
|
||||
if (user.authProvider && user.authProvider !== AuthProvider.EMAIL) {
|
||||
await validateProviderAuthToken({
|
||||
email,
|
||||
user,
|
||||
@ -117,7 +117,7 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
|
||||
if (!user) throw new Error("Failed to find user");
|
||||
|
||||
if (user.authProvider) {
|
||||
if (user.authProvider && user.authProvider !== AuthProvider.EMAIL) {
|
||||
await validateProviderAuthToken({
|
||||
email,
|
||||
user,
|
||||
|
@ -5,6 +5,10 @@ import { eventPushSecrets } from "../../events";
|
||||
import { BotService } from "../../services";
|
||||
import { repackageSecretToRaw } from "../../helpers/secrets";
|
||||
import { encryptSymmetric128BitHexKeyUTF8 } from "../../utils/crypto";
|
||||
import { getAllImportedSecrets } from "../../services/SecretImportService";
|
||||
import Folder from "../../models/folder";
|
||||
import { getFolderByPath } from "../../services/FolderService";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
|
||||
/**
|
||||
* Return secrets for workspace with id [workspaceId] and environment
|
||||
@ -16,27 +20,53 @@ export const getSecretsRaw = async (req: Request, res: Response) => {
|
||||
const workspaceId = req.query.workspaceId as string;
|
||||
const environment = req.query.environment as string;
|
||||
const secretPath = req.query.secretPath as string;
|
||||
const includeImports = req.query.include_imports as string;
|
||||
|
||||
const secrets = await SecretService.getSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
secretPath,
|
||||
authData: req.authData,
|
||||
authData: req.authData
|
||||
});
|
||||
|
||||
const key = await BotService.getWorkspaceKeyWithBot({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
if (includeImports) {
|
||||
const folders = await Folder.findOne({ workspace: workspaceId, environment });
|
||||
let folderId = "root";
|
||||
// if folder exist get it and replace folderid with new one
|
||||
if (folders) {
|
||||
const folder = getFolderByPath(folders.nodes, secretPath as string);
|
||||
if (!folder) {
|
||||
throw BadRequestError({ message: "Folder not found" });
|
||||
}
|
||||
folderId = folder.id;
|
||||
}
|
||||
const importedSecrets = await getAllImportedSecrets(workspaceId, environment, folderId);
|
||||
return res.status(200).send({
|
||||
secrets: secrets.map((secret) =>
|
||||
repackageSecretToRaw({
|
||||
secret,
|
||||
key
|
||||
})
|
||||
),
|
||||
imports: importedSecrets.map((el) => ({
|
||||
...el,
|
||||
secrets: el.secrets.map((secret) => repackageSecretToRaw({ secret, key }))
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets: secrets.map((secret) => {
|
||||
const rep = repackageSecretToRaw({
|
||||
secret,
|
||||
key,
|
||||
key
|
||||
});
|
||||
|
||||
return rep;
|
||||
}),
|
||||
})
|
||||
});
|
||||
};
|
||||
|
||||
@ -58,54 +88,47 @@ export const getSecretByNameRaw = async (req: Request, res: Response) => {
|
||||
environment,
|
||||
type,
|
||||
secretPath,
|
||||
authData: req.authData,
|
||||
authData: req.authData
|
||||
});
|
||||
|
||||
const key = await BotService.getWorkspaceKeyWithBot({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
secret: repackageSecretToRaw({
|
||||
secret,
|
||||
key,
|
||||
}),
|
||||
key
|
||||
})
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create secret with name [secretName] in plaintext
|
||||
* @param req
|
||||
* @param res
|
||||
* @param res
|
||||
*/
|
||||
export const createSecretRaw = async (req: Request, res: Response) => {
|
||||
const { secretName } = req.params;
|
||||
const {
|
||||
workspaceId,
|
||||
environment,
|
||||
type,
|
||||
secretValue,
|
||||
secretComment,
|
||||
secretPath = "/",
|
||||
} = req.body;
|
||||
const { workspaceId, environment, type, secretValue, secretComment, secretPath = "/" } = req.body;
|
||||
|
||||
const key = await BotService.getWorkspaceKeyWithBot({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: secretName,
|
||||
key,
|
||||
key
|
||||
});
|
||||
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: secretValue,
|
||||
key,
|
||||
key
|
||||
});
|
||||
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: secretComment,
|
||||
key,
|
||||
key
|
||||
});
|
||||
|
||||
const secret = await SecretService.createSecret({
|
||||
@ -123,14 +146,15 @@ export const createSecretRaw = async (req: Request, res: Response) => {
|
||||
secretPath,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag,
|
||||
secretCommentTag: secretCommentEncrypted.tag
|
||||
});
|
||||
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
}),
|
||||
secretPath
|
||||
})
|
||||
});
|
||||
|
||||
const secretWithoutBlindIndex = secret.toObject();
|
||||
@ -139,10 +163,10 @@ export const createSecretRaw = async (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
secret: repackageSecretToRaw({
|
||||
secret: secretWithoutBlindIndex,
|
||||
key,
|
||||
}),
|
||||
key
|
||||
})
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Update secret with name [secretName]
|
||||
@ -151,21 +175,15 @@ export const createSecretRaw = async (req: Request, res: Response) => {
|
||||
*/
|
||||
export const updateSecretByNameRaw = async (req: Request, res: Response) => {
|
||||
const { secretName } = req.params;
|
||||
const {
|
||||
workspaceId,
|
||||
environment,
|
||||
type,
|
||||
secretValue,
|
||||
secretPath = "/",
|
||||
} = req.body;
|
||||
const { workspaceId, environment, type, secretValue, secretPath = "/" } = req.body;
|
||||
|
||||
const key = await BotService.getWorkspaceKeyWithBot({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: secretValue,
|
||||
key,
|
||||
key
|
||||
});
|
||||
|
||||
const secret = await SecretService.updateSecret({
|
||||
@ -177,21 +195,22 @@ export const updateSecretByNameRaw = async (req: Request, res: Response) => {
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretPath,
|
||||
secretPath
|
||||
});
|
||||
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
}),
|
||||
secretPath
|
||||
})
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
secret: repackageSecretToRaw({
|
||||
secret,
|
||||
key,
|
||||
}),
|
||||
key
|
||||
})
|
||||
});
|
||||
};
|
||||
|
||||
@ -202,12 +221,7 @@ export const updateSecretByNameRaw = async (req: Request, res: Response) => {
|
||||
*/
|
||||
export const deleteSecretByNameRaw = async (req: Request, res: Response) => {
|
||||
const { secretName } = req.params;
|
||||
const {
|
||||
workspaceId,
|
||||
environment,
|
||||
type,
|
||||
secretPath = "/",
|
||||
} = req.body;
|
||||
const { workspaceId, environment, type, secretPath = "/" } = req.body;
|
||||
|
||||
const { secret } = await SecretService.deleteSecret({
|
||||
secretName,
|
||||
@ -215,25 +229,26 @@ export const deleteSecretByNameRaw = async (req: Request, res: Response) => {
|
||||
environment,
|
||||
type,
|
||||
authData: req.authData,
|
||||
secretPath,
|
||||
secretPath
|
||||
});
|
||||
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
}),
|
||||
secretPath
|
||||
})
|
||||
});
|
||||
|
||||
const key = await BotService.getWorkspaceKeyWithBot({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
secret: repackageSecretToRaw({
|
||||
secret,
|
||||
key,
|
||||
}),
|
||||
key
|
||||
})
|
||||
});
|
||||
};
|
||||
|
||||
@ -247,16 +262,35 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
const workspaceId = req.query.workspaceId as string;
|
||||
const environment = req.query.environment as string;
|
||||
const secretPath = req.query.secretPath as string;
|
||||
const includeImports = req.query.include_imports as string;
|
||||
|
||||
const secrets = await SecretService.getSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
secretPath,
|
||||
authData: req.authData,
|
||||
authData: req.authData
|
||||
});
|
||||
|
||||
if (includeImports) {
|
||||
const folders = await Folder.findOne({ workspace: workspaceId, environment });
|
||||
let folderId = "root";
|
||||
// if folder exist get it and replace folderid with new one
|
||||
if (folders) {
|
||||
const folder = getFolderByPath(folders.nodes, secretPath as string);
|
||||
if (!folder) {
|
||||
throw BadRequestError({ message: "Folder not found" });
|
||||
}
|
||||
folderId = folder.id;
|
||||
}
|
||||
const importedSecrets = await getAllImportedSecrets(workspaceId, environment, folderId);
|
||||
return res.status(200).send({
|
||||
secrets,
|
||||
imports: importedSecrets
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secrets,
|
||||
secrets
|
||||
});
|
||||
};
|
||||
|
||||
@ -278,11 +312,11 @@ export const getSecretByName = async (req: Request, res: Response) => {
|
||||
environment,
|
||||
type,
|
||||
secretPath,
|
||||
authData: req.authData,
|
||||
authData: req.authData
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
secret,
|
||||
secret
|
||||
});
|
||||
};
|
||||
|
||||
@ -306,7 +340,7 @@ export const createSecret = async (req: Request, res: Response) => {
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
secretPath = "/",
|
||||
secretPath = "/"
|
||||
} = req.body;
|
||||
|
||||
const secret = await SecretService.createSecret({
|
||||
@ -324,25 +358,25 @@ export const createSecret = async (req: Request, res: Response) => {
|
||||
secretPath,
|
||||
secretCommentCiphertext,
|
||||
secretCommentIV,
|
||||
secretCommentTag,
|
||||
secretCommentTag
|
||||
});
|
||||
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
}),
|
||||
secretPath
|
||||
})
|
||||
});
|
||||
|
||||
const secretWithoutBlindIndex = secret.toObject();
|
||||
delete secretWithoutBlindIndex.secretBlindIndex;
|
||||
|
||||
return res.status(200).send({
|
||||
secret: secretWithoutBlindIndex,
|
||||
secret: secretWithoutBlindIndex
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update secret with name [secretName]
|
||||
* @param req
|
||||
@ -357,7 +391,7 @@ export const updateSecretByName = async (req: Request, res: Response) => {
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretPath = "/",
|
||||
secretPath = "/"
|
||||
} = req.body;
|
||||
|
||||
const secret = await SecretService.updateSecret({
|
||||
@ -369,18 +403,19 @@ export const updateSecretByName = async (req: Request, res: Response) => {
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag,
|
||||
secretPath,
|
||||
secretPath
|
||||
});
|
||||
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
}),
|
||||
secretPath
|
||||
})
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
secret,
|
||||
secret
|
||||
});
|
||||
};
|
||||
|
||||
@ -391,12 +426,7 @@ export const updateSecretByName = async (req: Request, res: Response) => {
|
||||
*/
|
||||
export const deleteSecretByName = async (req: Request, res: Response) => {
|
||||
const { secretName } = req.params;
|
||||
const {
|
||||
workspaceId,
|
||||
environment,
|
||||
type,
|
||||
secretPath = "/",
|
||||
} = req.body;
|
||||
const { workspaceId, environment, type, secretPath = "/" } = req.body;
|
||||
|
||||
const { secret } = await SecretService.deleteSecret({
|
||||
secretName,
|
||||
@ -404,17 +434,18 @@ export const deleteSecretByName = async (req: Request, res: Response) => {
|
||||
environment,
|
||||
type,
|
||||
authData: req.authData,
|
||||
secretPath,
|
||||
secretPath
|
||||
});
|
||||
|
||||
await EventService.handleEvent({
|
||||
event: eventPushSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
}),
|
||||
secretPath
|
||||
})
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
secret,
|
||||
secret
|
||||
});
|
||||
};
|
||||
|
@ -12,6 +12,7 @@ import { standardRequest } from "../../config/request";
|
||||
import { getHttpsEnabled, getJwtSignupSecret, getLoopsApiKey } from "../../config";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
import { TelemetryService } from "../../services";
|
||||
import { AuthProvider } from "../../models";
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
@ -116,11 +117,13 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
if (!user)
|
||||
throw new Error("Failed to complete account for non-existent user"); // ensure user is non-null
|
||||
|
||||
// initialize default organization and workspace
|
||||
await initializeDefaultOrg({
|
||||
organizationName,
|
||||
user,
|
||||
});
|
||||
if (user.authProvider !== AuthProvider.OKTA_SAML) {
|
||||
// initialize default organization and workspace
|
||||
await initializeDefaultOrg({
|
||||
organizationName,
|
||||
user,
|
||||
});
|
||||
}
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
@ -174,7 +177,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
distinctId: email,
|
||||
properties: {
|
||||
email,
|
||||
attributionSource,
|
||||
...(attributionSource ? { attributionSource } : {})
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
import * as secretController from "./secretController";
|
||||
import * as secretSnapshotController from "./secretSnapshotController";
|
||||
import * as organizationsController from "./organizationsController";
|
||||
import * as ssoController from "./ssoController";
|
||||
import * as workspaceController from "./workspaceController";
|
||||
import * as actionController from "./actionController";
|
||||
import * as membershipController from "./membershipController";
|
||||
@ -10,6 +11,7 @@ export {
|
||||
secretController,
|
||||
secretSnapshotController,
|
||||
organizationsController,
|
||||
ssoController,
|
||||
workspaceController,
|
||||
actionController,
|
||||
membershipController,
|
||||
|
@ -27,6 +27,30 @@ export const getOrganizationPlan = async (req: Request, res: Response) => {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return checkout url for pro trial
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const startOrganizationTrial = async (req: Request, res: Response) => {
|
||||
const { organizationId } = req.params;
|
||||
const { success_url } = req.body;
|
||||
|
||||
const { data: { url } } = await licenseServerKeyRequest.post(
|
||||
`${await getLicenseServerUrl()}/api/license-server/v1/customers/${req.organization.customerId}/session/trial`,
|
||||
{
|
||||
success_url
|
||||
}
|
||||
);
|
||||
|
||||
EELicenseService.delPlan(organizationId);
|
||||
|
||||
return res.status(200).send({
|
||||
url
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the organization's current plan's billing info
|
||||
* @param req
|
||||
@ -154,6 +178,12 @@ export const addOrganizationTaxId = async (req: Request, res: Response) => {
|
||||
return res.status(200).send(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete tax id with id [taxId] from organization tax ids on file
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteOrganizationTaxId = async (req: Request, res: Response) => {
|
||||
const { taxId } = req.params;
|
||||
|
||||
@ -164,6 +194,12 @@ export const deleteOrganizationTaxId = async (req: Request, res: Response) => {
|
||||
return res.status(200).send(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return organization's invoices on file
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationInvoices = async (req: Request, res: Response) => {
|
||||
const { data: { invoices } } = await licenseServerKeyRequest.get(
|
||||
`${await getLicenseServerUrl()}/api/license-server/v1/customers/${req.organization.customerId}/invoices`
|
||||
|
254
backend/src/ee/controllers/v1/ssoController.ts
Normal file
254
backend/src/ee/controllers/v1/ssoController.ts
Normal file
@ -0,0 +1,254 @@
|
||||
import { Request, Response } from "express";
|
||||
import { Types } from "mongoose";
|
||||
import { BotOrgService } from "../../../services";
|
||||
import { SSOConfig } from "../../models";
|
||||
import {
|
||||
MembershipOrg,
|
||||
User
|
||||
} from "../../../models";
|
||||
import { getSSOConfigHelper } from "../../helpers/organizations";
|
||||
import { client } from "../../../config";
|
||||
import { ResourceNotFoundError } from "../../../utils/errors";
|
||||
import { getSiteURL } from "../../../config";
|
||||
|
||||
/**
|
||||
* Redirect user to appropriate SSO endpoint after successful authentication
|
||||
* to finish inputting their master key for logging in or signing up
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const redirectSSO = async (req: Request, res: Response) => {
|
||||
if (req.isUserCompleted) {
|
||||
return res.redirect(`${await getSiteURL()}/login/sso?token=${encodeURIComponent(req.providerAuthToken)}`);
|
||||
}
|
||||
|
||||
return res.redirect(`${await getSiteURL()}/signup/sso?token=${encodeURIComponent(req.providerAuthToken)}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return organization SAML SSO configuration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getSSOConfig = async (req: Request, res: Response) => {
|
||||
const organizationId = req.query.organizationId as string;
|
||||
|
||||
const data = await getSSOConfigHelper({
|
||||
organizationId: new Types.ObjectId(organizationId)
|
||||
});
|
||||
|
||||
return res.status(200).send(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update organization SAML SSO configuration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateSSOConfig = async (req: Request, res: Response) => {
|
||||
const {
|
||||
organizationId,
|
||||
authProvider,
|
||||
isActive,
|
||||
entryPoint,
|
||||
issuer,
|
||||
cert,
|
||||
audience
|
||||
} = req.body;
|
||||
|
||||
interface PatchUpdate {
|
||||
authProvider?: string;
|
||||
isActive?: boolean;
|
||||
encryptedEntryPoint?: string;
|
||||
entryPointIV?: string;
|
||||
entryPointTag?: string;
|
||||
encryptedIssuer?: string;
|
||||
issuerIV?: string;
|
||||
issuerTag?: string;
|
||||
encryptedCert?: string;
|
||||
certIV?: string;
|
||||
certTag?: string;
|
||||
encryptedAudience?: string;
|
||||
audienceIV?: string;
|
||||
audienceTag?: string;
|
||||
}
|
||||
|
||||
const update: PatchUpdate = {};
|
||||
|
||||
if (authProvider) {
|
||||
update.authProvider = authProvider;
|
||||
}
|
||||
|
||||
if (isActive !== undefined) {
|
||||
update.isActive = isActive;
|
||||
}
|
||||
|
||||
const key = await BotOrgService.getSymmetricKey(
|
||||
new Types.ObjectId(organizationId)
|
||||
);
|
||||
|
||||
if (entryPoint) {
|
||||
const {
|
||||
ciphertext: encryptedEntryPoint,
|
||||
iv: entryPointIV,
|
||||
tag: entryPointTag
|
||||
} = client.encryptSymmetric(entryPoint, key);
|
||||
|
||||
update.encryptedEntryPoint = encryptedEntryPoint;
|
||||
update.entryPointIV = entryPointIV;
|
||||
update.entryPointTag = entryPointTag;
|
||||
}
|
||||
|
||||
if (issuer) {
|
||||
const {
|
||||
ciphertext: encryptedIssuer,
|
||||
iv: issuerIV,
|
||||
tag: issuerTag
|
||||
} = client.encryptSymmetric(issuer, key);
|
||||
|
||||
update.encryptedIssuer = encryptedIssuer;
|
||||
update.issuerIV = issuerIV;
|
||||
update.issuerTag = issuerTag;
|
||||
}
|
||||
|
||||
if (cert) {
|
||||
const {
|
||||
ciphertext: encryptedCert,
|
||||
iv: certIV,
|
||||
tag: certTag
|
||||
} = client.encryptSymmetric(cert, key);
|
||||
|
||||
update.encryptedCert = encryptedCert;
|
||||
update.certIV = certIV;
|
||||
update.certTag = certTag;
|
||||
}
|
||||
|
||||
if (audience) {
|
||||
const {
|
||||
ciphertext: encryptedAudience,
|
||||
iv: audienceIV,
|
||||
tag: audienceTag
|
||||
} = client.encryptSymmetric(audience, key);
|
||||
|
||||
update.encryptedAudience = encryptedAudience;
|
||||
update.audienceIV = audienceIV;
|
||||
update.audienceTag = audienceTag;
|
||||
}
|
||||
|
||||
const ssoConfig = await SSOConfig.findOneAndUpdate(
|
||||
{
|
||||
organization: new Types.ObjectId(organizationId)
|
||||
},
|
||||
update,
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
if (!ssoConfig) throw ResourceNotFoundError({
|
||||
message: "Failed to find SSO config to update"
|
||||
});
|
||||
|
||||
if (update.isActive !== undefined) {
|
||||
const membershipOrgs = await MembershipOrg.find({
|
||||
organization: new Types.ObjectId(organizationId)
|
||||
}).select("user");
|
||||
|
||||
if (update.isActive) {
|
||||
await User.updateMany(
|
||||
{
|
||||
_id: {
|
||||
$in: membershipOrgs.map((membershipOrg) => membershipOrg.user)
|
||||
}
|
||||
},
|
||||
{
|
||||
authProvider: ssoConfig.authProvider
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await User.updateMany(
|
||||
{
|
||||
_id: {
|
||||
$in: membershipOrgs.map((membershipOrg) => membershipOrg.user)
|
||||
}
|
||||
},
|
||||
{
|
||||
$unset: {
|
||||
authProvider: 1
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send(ssoConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create organization SAML SSO configuration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createSSOConfig = async (req: Request, res: Response) => {
|
||||
const {
|
||||
organizationId,
|
||||
authProvider,
|
||||
isActive,
|
||||
entryPoint,
|
||||
issuer,
|
||||
cert,
|
||||
audience
|
||||
} = req.body;
|
||||
|
||||
const key = await BotOrgService.getSymmetricKey(
|
||||
new Types.ObjectId(organizationId)
|
||||
);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedEntryPoint,
|
||||
iv: entryPointIV,
|
||||
tag: entryPointTag
|
||||
} = client.encryptSymmetric(entryPoint, key);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedIssuer,
|
||||
iv: issuerIV,
|
||||
tag: issuerTag
|
||||
} = client.encryptSymmetric(issuer, key);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedCert,
|
||||
iv: certIV,
|
||||
tag: certTag
|
||||
} = client.encryptSymmetric(cert, key);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedAudience,
|
||||
iv: audienceIV,
|
||||
tag: audienceTag
|
||||
} = client.encryptSymmetric(audience, key);
|
||||
|
||||
const ssoConfig = await new SSOConfig({
|
||||
organization: new Types.ObjectId(organizationId),
|
||||
authProvider,
|
||||
isActive,
|
||||
encryptedEntryPoint,
|
||||
entryPointIV,
|
||||
entryPointTag,
|
||||
encryptedIssuer,
|
||||
issuerIV,
|
||||
issuerTag,
|
||||
encryptedCert,
|
||||
certIV,
|
||||
certTag,
|
||||
encryptedAudience,
|
||||
audienceIV,
|
||||
audienceTag
|
||||
}).save();
|
||||
|
||||
return res.status(200).send(ssoConfig);
|
||||
}
|
72
backend/src/ee/helpers/organizations.ts
Normal file
72
backend/src/ee/helpers/organizations.ts
Normal file
@ -0,0 +1,72 @@
|
||||
import { Types } from "mongoose";
|
||||
import {
|
||||
SSOConfig
|
||||
} from "../models";
|
||||
import {
|
||||
BotOrgService
|
||||
} from "../../services";
|
||||
import { client } from "../../config";
|
||||
import { ValidationError } from "../../utils/errors";
|
||||
|
||||
export const getSSOConfigHelper = async ({
|
||||
organizationId,
|
||||
ssoConfigId
|
||||
}: {
|
||||
organizationId?: Types.ObjectId;
|
||||
ssoConfigId?: Types.ObjectId;
|
||||
}) => {
|
||||
|
||||
if (!organizationId && !ssoConfigId) throw ValidationError({
|
||||
message: "Getting SSO data requires either id of organization or SSO data"
|
||||
});
|
||||
|
||||
const ssoConfig = await SSOConfig.findOne({
|
||||
...(organizationId ? { organization: organizationId } : {}),
|
||||
...(ssoConfigId ? { _id: ssoConfigId } : {})
|
||||
});
|
||||
|
||||
if (!ssoConfig) throw new Error("Failed to find organization SSO data");
|
||||
|
||||
const key = await BotOrgService.getSymmetricKey(
|
||||
ssoConfig.organization
|
||||
);
|
||||
|
||||
const entryPoint = client.decryptSymmetric(
|
||||
ssoConfig.encryptedEntryPoint,
|
||||
key,
|
||||
ssoConfig.entryPointIV,
|
||||
ssoConfig.entryPointTag
|
||||
);
|
||||
|
||||
const issuer = client.decryptSymmetric(
|
||||
ssoConfig.encryptedIssuer,
|
||||
key,
|
||||
ssoConfig.issuerIV,
|
||||
ssoConfig.issuerTag
|
||||
);
|
||||
|
||||
const cert = client.decryptSymmetric(
|
||||
ssoConfig.encryptedCert,
|
||||
key,
|
||||
ssoConfig.certIV,
|
||||
ssoConfig.certTag
|
||||
);
|
||||
|
||||
const audience = client.decryptSymmetric(
|
||||
ssoConfig.encryptedAudience,
|
||||
key,
|
||||
ssoConfig.audienceIV,
|
||||
ssoConfig.audienceTag
|
||||
);
|
||||
|
||||
return ({
|
||||
_id: ssoConfig._id,
|
||||
organization: ssoConfig.organization,
|
||||
authProvider: ssoConfig.authProvider,
|
||||
isActive: ssoConfig.isActive,
|
||||
entryPoint,
|
||||
issuer,
|
||||
cert,
|
||||
audience
|
||||
});
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
import requireLicenseAuth from "./requireLicenseAuth";
|
||||
import requireSecretSnapshotAuth from "./requireSecretSnapshotAuth";
|
||||
|
||||
export {
|
||||
requireLicenseAuth,
|
||||
requireSecretSnapshotAuth,
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
|
||||
/**
|
||||
* Validate if organization hosting meets license requirements to
|
||||
* access a license-specific route.
|
||||
* @param {Object} obj
|
||||
* @param {String[]} obj.acceptedTiers
|
||||
*/
|
||||
const requireLicenseAuth = ({
|
||||
acceptedTiers,
|
||||
}: {
|
||||
acceptedTiers: string[];
|
||||
}) => {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
|
||||
} catch (err) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default requireLicenseAuth;
|
@ -3,6 +3,7 @@ import SecretVersion, { ISecretVersion } from "./secretVersion";
|
||||
import FolderVersion, { TFolderRootVersionSchema } from "./folderVersion";
|
||||
import Log, { ILog } from "./log";
|
||||
import Action, { IAction } from "./action";
|
||||
import SSOConfig, { ISSOConfig } from "./ssoConfig";
|
||||
|
||||
export {
|
||||
SecretSnapshot,
|
||||
@ -15,4 +16,6 @@ export {
|
||||
ILog,
|
||||
Action,
|
||||
IAction,
|
||||
SSOConfig,
|
||||
ISSOConfig
|
||||
};
|
||||
|
@ -63,9 +63,10 @@ const logSchema = new Schema<ILog>(
|
||||
ipAddress: {
|
||||
type: String,
|
||||
},
|
||||
}, {
|
||||
timestamps: true,
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const Log = model<ILog>("Log", logSchema);
|
||||
|
82
backend/src/ee/models/ssoConfig.ts
Normal file
82
backend/src/ee/models/ssoConfig.ts
Normal file
@ -0,0 +1,82 @@
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
|
||||
export interface ISSOConfig {
|
||||
organization: Types.ObjectId;
|
||||
authProvider: "okta-saml"
|
||||
isActive: boolean;
|
||||
encryptedEntryPoint: string;
|
||||
entryPointIV: string;
|
||||
entryPointTag: string;
|
||||
encryptedIssuer: string;
|
||||
issuerIV: string;
|
||||
issuerTag: string;
|
||||
encryptedCert: string;
|
||||
certIV: string;
|
||||
certTag: string;
|
||||
encryptedAudience: string;
|
||||
audienceIV: string;
|
||||
audienceTag: string;
|
||||
}
|
||||
|
||||
const ssoConfigSchema = new Schema<ISSOConfig>(
|
||||
{
|
||||
organization: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Organization"
|
||||
},
|
||||
authProvider: {
|
||||
type: String,
|
||||
enum: [
|
||||
"okta-saml"
|
||||
],
|
||||
required: true
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
required: true
|
||||
},
|
||||
encryptedEntryPoint: {
|
||||
type: String
|
||||
},
|
||||
entryPointIV: {
|
||||
type: String
|
||||
},
|
||||
entryPointTag: {
|
||||
type: String
|
||||
},
|
||||
encryptedIssuer: {
|
||||
type: String
|
||||
},
|
||||
issuerIV: {
|
||||
type: String
|
||||
},
|
||||
issuerTag: {
|
||||
type: String
|
||||
},
|
||||
encryptedCert: {
|
||||
type: String
|
||||
},
|
||||
certIV: {
|
||||
type: String
|
||||
},
|
||||
certTag: {
|
||||
type: String
|
||||
},
|
||||
encryptedAudience: {
|
||||
type: String
|
||||
},
|
||||
audienceIV: {
|
||||
type: String
|
||||
},
|
||||
audienceTag: {
|
||||
type: String
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
const SSOConfig = model<ISSOConfig>("SSOConfig", ssoConfigSchema);
|
||||
|
||||
export default SSOConfig;
|
@ -1,6 +1,7 @@
|
||||
import secret from "./secret";
|
||||
import secretSnapshot from "./secretSnapshot";
|
||||
import organizations from "./organizations";
|
||||
import sso from "./sso";
|
||||
import workspace from "./workspace";
|
||||
import action from "./action";
|
||||
import cloudProducts from "./cloudProducts";
|
||||
@ -9,6 +10,7 @@ export {
|
||||
secret,
|
||||
secretSnapshot,
|
||||
organizations,
|
||||
sso,
|
||||
workspace,
|
||||
action,
|
||||
cloudProducts,
|
||||
|
@ -41,6 +41,21 @@ router.get(
|
||||
organizationsController.getOrganizationPlan
|
||||
);
|
||||
|
||||
router.post(
|
||||
"/:organizationId/session/trial",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
}),
|
||||
param("organizationId").exists().trim(),
|
||||
body("success_url").exists().trim(),
|
||||
validateRequest,
|
||||
organizationsController.startOrganizationTrial
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/:organizationId/plan/billing",
|
||||
requireAuth({
|
||||
|
110
backend/src/ee/routes/v1/sso.ts
Normal file
110
backend/src/ee/routes/v1/sso.ts
Normal file
@ -0,0 +1,110 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import passport from "passport";
|
||||
import {
|
||||
requireAuth,
|
||||
requireOrganizationAuth,
|
||||
validateRequest,
|
||||
} from "../../../middleware";
|
||||
import { body, query } from "express-validator";
|
||||
import { ssoController } from "../../controllers/v1";
|
||||
import { authLimiter } from "../../../helpers/rateLimiter";
|
||||
import {
|
||||
ACCEPTED,
|
||||
ADMIN,
|
||||
OWNER
|
||||
} from "../../../variables";
|
||||
|
||||
router.get(
|
||||
"/redirect/google",
|
||||
authLimiter,
|
||||
passport.authenticate("google", {
|
||||
scope: ["profile", "email"],
|
||||
session: false,
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/google",
|
||||
passport.authenticate("google", {
|
||||
failureRedirect: "/login/provider/error",
|
||||
session: false
|
||||
}),
|
||||
ssoController.redirectSSO
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/redirect/saml2/:ssoIdentifier",
|
||||
authLimiter,
|
||||
passport.authenticate("saml", {
|
||||
failureRedirect: "/login/fail"
|
||||
})
|
||||
);
|
||||
|
||||
router.post("/saml2/:ssoIdentifier",
|
||||
passport.authenticate("saml", {
|
||||
failureRedirect: "/login/provider/error",
|
||||
failureFlash: true,
|
||||
session: false
|
||||
}),
|
||||
ssoController.redirectSSO
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/config",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
locationOrganizationId: "query"
|
||||
}),
|
||||
query("organizationId").exists().trim(),
|
||||
validateRequest,
|
||||
ssoController.getSSOConfig
|
||||
);
|
||||
|
||||
router.post(
|
||||
"/config",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
locationOrganizationId: "body"
|
||||
}),
|
||||
body("organizationId").exists().trim(),
|
||||
body("authProvider").exists().isString(),
|
||||
body("isActive").exists().isBoolean(),
|
||||
body("entryPoint").exists().isString(),
|
||||
body("issuer").exists().isString(),
|
||||
body("cert").exists().isString(),
|
||||
body("audience").exists().isString(),
|
||||
validateRequest,
|
||||
ssoController.createSSOConfig
|
||||
);
|
||||
|
||||
router.patch(
|
||||
"/config",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
locationOrganizationId: "body"
|
||||
}),
|
||||
body("organizationId").exists().trim(),
|
||||
body("authProvider").optional().isString(),
|
||||
body("isActive").optional().isBoolean(),
|
||||
body("entryPoint").optional().isString(),
|
||||
body("issuer").optional().isString(),
|
||||
body("cert").optional().isString(),
|
||||
body("audience").optional().isString(),
|
||||
validateRequest,
|
||||
ssoController.updateSSOConfig
|
||||
);
|
||||
|
||||
export default router;
|
@ -30,8 +30,10 @@ interface FeatureSet {
|
||||
customRateLimits: boolean;
|
||||
customAlerts: boolean;
|
||||
auditLogs: boolean;
|
||||
status: 'incomplete' | 'incomplete_expired' | 'trialing' | 'active' | 'past_due' | 'canceled' | 'unpaid' | null;
|
||||
samlSSO: boolean;
|
||||
status: "incomplete" | "incomplete_expired" | "trialing" | "active" | "past_due" | "canceled" | "unpaid" | null;
|
||||
trial_end: number | null;
|
||||
has_used_trial: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -62,8 +64,10 @@ class EELicenseService {
|
||||
customRateLimits: true,
|
||||
customAlerts: true,
|
||||
auditLogs: false,
|
||||
samlSSO: false,
|
||||
status: null,
|
||||
trial_end: null
|
||||
trial_end: null,
|
||||
has_used_trial: true
|
||||
}
|
||||
|
||||
public localFeatureSet: NodeCache;
|
||||
@ -71,7 +75,7 @@ class EELicenseService {
|
||||
constructor() {
|
||||
this._isLicenseValid = true;
|
||||
this.localFeatureSet = new NodeCache({
|
||||
stdTTL: 300,
|
||||
stdTTL: 60,
|
||||
});
|
||||
}
|
||||
|
||||
@ -112,6 +116,12 @@ class EELicenseService {
|
||||
await this.getPlan(organizationId, workspaceId);
|
||||
}
|
||||
}
|
||||
|
||||
public async delPlan(organizationId: string) {
|
||||
if (this.instanceType === "cloud") {
|
||||
this.localFeatureSet.del(`${organizationId}-`);
|
||||
}
|
||||
}
|
||||
|
||||
public async initGlobalFeatureSet() {
|
||||
const licenseServerKey = await getLicenseServerKey();
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { eventPushSecrets } from "./secret"
|
||||
import { eventPushSecrets } from "./secret";
|
||||
import { eventStartIntegration } from "./integration";
|
||||
|
||||
export {
|
||||
eventPushSecrets,
|
||||
}
|
||||
export { eventPushSecrets, eventStartIntegration };
|
||||
|
23
backend/src/events/integration.ts
Normal file
23
backend/src/events/integration.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import { Types } from "mongoose";
|
||||
import { EVENT_START_INTEGRATION } from "../variables";
|
||||
|
||||
/*
|
||||
* Return event for starting integrations
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace to push secrets to
|
||||
* @returns
|
||||
*/
|
||||
export const eventStartIntegration = ({
|
||||
workspaceId,
|
||||
environment
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
environment: string;
|
||||
}) => {
|
||||
return {
|
||||
name: EVENT_START_INTEGRATION,
|
||||
workspaceId,
|
||||
environment,
|
||||
payload: {}
|
||||
};
|
||||
};
|
@ -1,64 +1,54 @@
|
||||
import { Types } from "mongoose";
|
||||
import {
|
||||
EVENT_PULL_SECRETS,
|
||||
EVENT_PUSH_SECRETS,
|
||||
} from "../variables";
|
||||
import { EVENT_PULL_SECRETS, EVENT_PUSH_SECRETS } from "../variables";
|
||||
|
||||
interface PushSecret {
|
||||
ciphertextKey: string;
|
||||
ivKey: string;
|
||||
tagKey: string;
|
||||
hashKey: string;
|
||||
ciphertextValue: string;
|
||||
ivValue: string;
|
||||
tagValue: string;
|
||||
hashValue: string;
|
||||
type: "shared" | "personal";
|
||||
ciphertextKey: string;
|
||||
ivKey: string;
|
||||
tagKey: string;
|
||||
hashKey: string;
|
||||
ciphertextValue: string;
|
||||
ivValue: string;
|
||||
tagValue: string;
|
||||
hashValue: string;
|
||||
type: "shared" | "personal";
|
||||
}
|
||||
|
||||
/**
|
||||
* Return event for pushing secrets
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace to push secrets to
|
||||
* @returns
|
||||
* @returns
|
||||
*/
|
||||
const eventPushSecrets = ({
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
}) => {
|
||||
return {
|
||||
name: EVENT_PUSH_SECRETS,
|
||||
workspaceId,
|
||||
environment,
|
||||
}: {
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
}) => {
|
||||
return ({
|
||||
name: EVENT_PUSH_SECRETS,
|
||||
workspaceId,
|
||||
environment,
|
||||
payload: {
|
||||
|
||||
},
|
||||
});
|
||||
}
|
||||
secretPath,
|
||||
payload: {}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return event for pulling secrets
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.workspaceId - id of workspace to pull secrets from
|
||||
* @returns
|
||||
* @returns
|
||||
*/
|
||||
const eventPullSecrets = ({
|
||||
const eventPullSecrets = ({ workspaceId }: { workspaceId: string }) => {
|
||||
return {
|
||||
name: EVENT_PULL_SECRETS,
|
||||
workspaceId,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
}) => {
|
||||
return ({
|
||||
name: EVENT_PULL_SECRETS,
|
||||
workspaceId,
|
||||
payload: {
|
||||
payload: {}
|
||||
};
|
||||
};
|
||||
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
eventPushSecrets,
|
||||
}
|
||||
export { eventPushSecrets };
|
||||
|
46
backend/src/helpers/botOrg.ts
Normal file
46
backend/src/helpers/botOrg.ts
Normal file
@ -0,0 +1,46 @@
|
||||
import { Types } from "mongoose";
|
||||
import { client, getEncryptionKey, getRootEncryptionKey } from "../config";
|
||||
import { BotOrg } from "../models";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "../utils/crypto";
|
||||
import {
|
||||
ENCODING_SCHEME_BASE64,
|
||||
ENCODING_SCHEME_UTF8
|
||||
} from "../variables";
|
||||
import { InternalServerError } from "../utils/errors";
|
||||
|
||||
// TODO: DOCstrings
|
||||
|
||||
export const getSymmetricKeyHelper = async (organizationId: Types.ObjectId) => {
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
const encryptionKey = await getEncryptionKey();
|
||||
|
||||
const botOrg = await BotOrg.findOne({
|
||||
organization: organizationId
|
||||
});
|
||||
|
||||
if (!botOrg) throw new Error("Failed to find organization bot");
|
||||
|
||||
if (rootEncryptionKey && botOrg.symmetricKeyKeyEncoding == ENCODING_SCHEME_BASE64) {
|
||||
const key = client.decryptSymmetric(
|
||||
botOrg.encryptedSymmetricKey,
|
||||
rootEncryptionKey,
|
||||
botOrg.symmetricKeyIV,
|
||||
botOrg.symmetricKeyTag
|
||||
);
|
||||
|
||||
return key;
|
||||
} else if (encryptionKey && botOrg.symmetricKeyKeyEncoding === ENCODING_SCHEME_UTF8) {
|
||||
const key = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: botOrg.encryptedSymmetricKey,
|
||||
iv: botOrg.symmetricKeyIV,
|
||||
tag: botOrg.symmetricKeyTag,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
throw InternalServerError({
|
||||
message: "Failed to match encryption key with organization bot symmetric key encoding"
|
||||
});
|
||||
}
|
@ -1,12 +1,14 @@
|
||||
import { Types } from "mongoose";
|
||||
import { Bot } from "../models";
|
||||
import { EVENT_PUSH_SECRETS } from "../variables";
|
||||
import { EVENT_PUSH_SECRETS, EVENT_START_INTEGRATION } from "../variables";
|
||||
import { IntegrationService } from "../services";
|
||||
import { triggerWebhook } from "../services/WebhookService";
|
||||
|
||||
interface Event {
|
||||
name: string;
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
secretPath?: string;
|
||||
payload: any;
|
||||
}
|
||||
|
||||
@ -19,22 +21,31 @@ interface Event {
|
||||
* @param {Object} obj.event.payload - payload of event (depends on event)
|
||||
*/
|
||||
export const handleEventHelper = async ({ event }: { event: Event }) => {
|
||||
const { workspaceId, environment } = event;
|
||||
const { workspaceId, environment, secretPath } = event;
|
||||
|
||||
// TODO: moduralize bot check into separate function
|
||||
const bot = await Bot.findOne({
|
||||
workspace: workspaceId,
|
||||
isActive: true,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (!bot) return;
|
||||
|
||||
switch (event.name) {
|
||||
case EVENT_PUSH_SECRETS:
|
||||
IntegrationService.syncIntegrations({
|
||||
workspaceId,
|
||||
environment,
|
||||
});
|
||||
if (bot) {
|
||||
await IntegrationService.syncIntegrations({
|
||||
workspaceId,
|
||||
environment
|
||||
});
|
||||
}
|
||||
triggerWebhook(workspaceId.toString(), environment || "", secretPath || "");
|
||||
break;
|
||||
case EVENT_START_INTEGRATION:
|
||||
if (bot) {
|
||||
IntegrationService.syncIntegrations({
|
||||
workspaceId,
|
||||
environment
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
INTEGRATION_VERCEL,
|
||||
} from "../variables";
|
||||
import { UnauthorizedRequestError } from "../utils/errors";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
interface Update {
|
||||
workspace: string;
|
||||
@ -115,46 +116,52 @@ export const syncIntegrationsHelper = async ({
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
}) => {
|
||||
const integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
...(environment
|
||||
? {
|
||||
try {
|
||||
const integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
...(environment
|
||||
? {
|
||||
environment,
|
||||
}
|
||||
: {}),
|
||||
isActive: true,
|
||||
app: { $ne: null },
|
||||
});
|
||||
|
||||
// for each workspace integration, sync/push secrets
|
||||
// to that integration
|
||||
for await (const integration of integrations) {
|
||||
// get workspace, environment (shared) secrets
|
||||
const secrets = await BotService.getSecrets({
|
||||
// issue here?
|
||||
workspaceId: integration.workspace,
|
||||
environment: integration.environment,
|
||||
secretPath: integration.secretPath,
|
||||
: {}),
|
||||
isActive: true,
|
||||
app: { $ne: null },
|
||||
});
|
||||
|
||||
const integrationAuth = await IntegrationAuth.findById(
|
||||
integration.integrationAuth
|
||||
);
|
||||
if (!integrationAuth) throw new Error("Failed to find integration auth");
|
||||
// for each workspace integration, sync/push secrets
|
||||
// to that integration
|
||||
for await (const integration of integrations) {
|
||||
// get workspace, environment (shared) secrets
|
||||
const secrets = await BotService.getSecrets({
|
||||
// issue here?
|
||||
workspaceId: integration.workspace,
|
||||
environment: integration.environment,
|
||||
secretPath: integration.secretPath,
|
||||
});
|
||||
|
||||
// get integration auth access token
|
||||
const access = await getIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integration.integrationAuth,
|
||||
});
|
||||
const integrationAuth = await IntegrationAuth.findById(
|
||||
integration.integrationAuth
|
||||
);
|
||||
if (!integrationAuth) throw new Error("Failed to find integration auth");
|
||||
|
||||
// sync secrets to integration
|
||||
await syncSecrets({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessId: access.accessId === undefined ? null : access.accessId,
|
||||
accessToken: access.accessToken,
|
||||
});
|
||||
// get integration auth access token
|
||||
const access = await getIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integration.integrationAuth,
|
||||
});
|
||||
|
||||
// sync secrets to integration
|
||||
await syncSecrets({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessId: access.accessId === undefined ? null : access.accessId,
|
||||
accessToken: access.accessToken,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.captureException(err);
|
||||
console.log(`syncIntegrationsHelper: failed with [workspaceId=${workspaceId}] [environment=${environment}]`, err) // eslint-disable-line no-use-before-define
|
||||
throw err
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -5,11 +5,12 @@ import express from "express";
|
||||
require("express-async-errors");
|
||||
import helmet from "helmet";
|
||||
import cors from "cors";
|
||||
import { DatabaseService } from "./services";
|
||||
import { DatabaseService, GithubSecretScanningService } from "./services";
|
||||
import { EELicenseService } from "./ee/services";
|
||||
import { setUpHealthEndpoint } from "./services/health";
|
||||
import cookieParser from "cookie-parser";
|
||||
import swaggerUi = require("swagger-ui-express");
|
||||
import { Probot, createNodeMiddleware } from "probot";
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const swaggerFile = require("../spec.json");
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
@ -18,9 +19,10 @@ import {
|
||||
action as eeActionRouter,
|
||||
cloudProducts as eeCloudProductsRouter,
|
||||
organizations as eeOrganizationsRouter,
|
||||
sso as eeSSORouter,
|
||||
secret as eeSecretRouter,
|
||||
secretSnapshot as eeSecretSnapshotRouter,
|
||||
workspace as eeWorkspaceRouter,
|
||||
workspace as eeWorkspaceRouter
|
||||
} from "./ee/routes/v1";
|
||||
import {
|
||||
auth as v1AuthRouter,
|
||||
@ -33,41 +35,46 @@ import {
|
||||
membership as v1MembershipRouter,
|
||||
organization as v1OrganizationRouter,
|
||||
password as v1PasswordRouter,
|
||||
secretImport as v1SecretImportRouter,
|
||||
secret as v1SecretRouter,
|
||||
secretScanning as v1SecretScanningRouter,
|
||||
secretsFolder as v1SecretsFolder,
|
||||
serviceToken as v1ServiceTokenRouter,
|
||||
signup as v1SignupRouter,
|
||||
userAction as v1UserActionRouter,
|
||||
user as v1UserRouter,
|
||||
workspace as v1WorkspaceRouter,
|
||||
webhooks as v1WebhooksRouter,
|
||||
workspace as v1WorkspaceRouter
|
||||
} from "./routes/v1";
|
||||
import {
|
||||
signup as v2SignupRouter,
|
||||
auth as v2AuthRouter,
|
||||
users as v2UsersRouter,
|
||||
environment as v2EnvironmentRouter,
|
||||
organizations as v2OrganizationsRouter,
|
||||
workspace as v2WorkspaceRouter,
|
||||
secret as v2SecretRouter, // begin to phase out
|
||||
secrets as v2SecretsRouter,
|
||||
serviceTokenData as v2ServiceTokenDataRouter,
|
||||
serviceAccounts as v2ServiceAccountsRouter,
|
||||
environment as v2EnvironmentRouter,
|
||||
serviceTokenData as v2ServiceTokenDataRouter,
|
||||
signup as v2SignupRouter,
|
||||
tags as v2TagsRouter,
|
||||
users as v2UsersRouter,
|
||||
workspace as v2WorkspaceRouter,
|
||||
} from "./routes/v2";
|
||||
import {
|
||||
auth as v3AuthRouter,
|
||||
secrets as v3SecretsRouter,
|
||||
signup as v3SignupRouter,
|
||||
workspaces as v3WorkspacesRouter,
|
||||
workspaces as v3WorkspacesRouter
|
||||
} from "./routes/v3";
|
||||
import { healthCheck } from "./routes/status";
|
||||
import { getLogger } from "./utils/logger";
|
||||
import { RouteNotFoundError } from "./utils/errors";
|
||||
import { requestErrorHandler } from "./middleware/requestErrorHandler";
|
||||
import { getNodeEnv, getPort, getSiteURL } from "./config";
|
||||
import { getNodeEnv, getPort, getSecretScanningGitAppId, getSecretScanningPrivateKey, getSecretScanningWebhookProxy, getSecretScanningWebhookSecret, getSiteURL } from "./config";
|
||||
import { setup } from "./utils/setup";
|
||||
const SmeeClient = require('smee-client') // eslint-disable-line
|
||||
|
||||
const main = async () => {
|
||||
|
||||
await setup();
|
||||
|
||||
await EELicenseService.initGlobalFeatureSet();
|
||||
@ -75,14 +82,35 @@ const main = async () => {
|
||||
const app = express();
|
||||
app.enable("trust proxy");
|
||||
app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: false }));
|
||||
app.use(cookieParser());
|
||||
app.use(
|
||||
cors({
|
||||
credentials: true,
|
||||
origin: await getSiteURL(),
|
||||
origin: await getSiteURL()
|
||||
})
|
||||
);
|
||||
|
||||
if (await getSecretScanningGitAppId() && await getSecretScanningWebhookSecret() && await getSecretScanningPrivateKey()) {
|
||||
const probot = new Probot({
|
||||
appId: await getSecretScanningGitAppId(),
|
||||
privateKey: await getSecretScanningPrivateKey(),
|
||||
secret: await getSecretScanningWebhookSecret(),
|
||||
});
|
||||
|
||||
if ((await getNodeEnv()) != "production") {
|
||||
const smee = new SmeeClient({
|
||||
source: await getSecretScanningWebhookProxy(),
|
||||
target: "http://backend:4000/ss-webhook",
|
||||
logger: console
|
||||
})
|
||||
|
||||
smee.start()
|
||||
}
|
||||
|
||||
app.use(createNodeMiddleware(GithubSecretScanningService, { probot, webhooksPath: "/ss-webhook" })); // secret scanning webhook
|
||||
}
|
||||
|
||||
if ((await getNodeEnv()) === "production") {
|
||||
// enable app-wide rate-limiting + helmet security
|
||||
// in production
|
||||
@ -104,6 +132,7 @@ const main = async () => {
|
||||
app.use("/api/v1/workspace", eeWorkspaceRouter);
|
||||
app.use("/api/v1/action", eeActionRouter);
|
||||
app.use("/api/v1/organizations", eeOrganizationsRouter);
|
||||
app.use("/api/v1/sso", eeSSORouter);
|
||||
app.use("/api/v1/cloud-products", eeCloudProductsRouter);
|
||||
|
||||
// v1 routes (default)
|
||||
@ -124,6 +153,9 @@ const main = async () => {
|
||||
app.use("/api/v1/integration", v1IntegrationRouter);
|
||||
app.use("/api/v1/integration-auth", v1IntegrationAuthRouter);
|
||||
app.use("/api/v1/folders", v1SecretsFolder);
|
||||
app.use("/api/v1/secret-scanning", v1SecretScanningRouter);
|
||||
app.use("/api/v1/webhooks", v1WebhooksRouter);
|
||||
app.use("/api/v1/secret-imports", v1SecretImportRouter);
|
||||
|
||||
// v2 routes (improvements)
|
||||
app.use("/api/v2/signup", v2SignupRouter);
|
||||
@ -155,7 +187,7 @@ const main = async () => {
|
||||
if (res.headersSent) return next();
|
||||
next(
|
||||
RouteNotFoundError({
|
||||
message: `The requested source '(${req.method})${req.url}' was not found`,
|
||||
message: `The requested source '(${req.method})${req.url}' was not found`
|
||||
})
|
||||
);
|
||||
});
|
||||
@ -163,9 +195,7 @@ const main = async () => {
|
||||
app.use(requestErrorHandler);
|
||||
|
||||
const server = app.listen(await getPort(), async () => {
|
||||
(await getLogger("backend-main")).info(
|
||||
`Server started listening at port ${await getPort()}`
|
||||
);
|
||||
(await getLogger("backend-main")).info(`Server started listening at port ${await getPort()}`);
|
||||
});
|
||||
|
||||
// await createTestUserForDevelopment();
|
||||
|
@ -5,12 +5,16 @@ import {
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_BITBUCKET_API_URL,
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_CHECKLY_API_URL,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CLOUDFLARE_PAGES_API_URL,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_CODEFRESH_API_URL,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_GITHUB,
|
||||
@ -56,11 +60,13 @@ const getApps = async ({
|
||||
accessToken,
|
||||
accessId,
|
||||
teamId,
|
||||
workspaceSlug,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
accessId?: string;
|
||||
teamId?: string;
|
||||
workspaceSlug?: string;
|
||||
}) => {
|
||||
let apps: App[] = [];
|
||||
switch (integrationAuth.integration) {
|
||||
@ -152,6 +158,17 @@ const getApps = async ({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_BITBUCKET:
|
||||
apps = await getAppsBitBucket({
|
||||
accessToken,
|
||||
workspaceSlug
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CODEFRESH:
|
||||
apps = await getAppsCodefresh({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return apps;
|
||||
@ -203,10 +220,10 @@ const getAppsVercel = async ({
|
||||
},
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
params: {
|
||||
teamId: integrationAuth.teamId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
).data;
|
||||
@ -702,15 +719,76 @@ const getAppsCheckly = async ({ accessToken }: { accessToken: string }) => {
|
||||
* @returns {Object[]} apps - Cloudflare Pages projects
|
||||
* @returns {String} apps.name - name of Cloudflare Pages project
|
||||
*/
|
||||
const getAppsCloudflarePages = async ({
|
||||
accessToken,
|
||||
accountId
|
||||
const getAppsCloudflarePages = async ({
|
||||
accessToken,
|
||||
accountId
|
||||
}: {
|
||||
accessToken: string;
|
||||
accountId?: string;
|
||||
accessToken: string;
|
||||
accountId?: string;
|
||||
}) => {
|
||||
const { data } = await standardRequest.get(
|
||||
`${INTEGRATION_CLOUDFLARE_PAGES_API_URL}/client/v4/accounts/${accountId}/pages/projects`,
|
||||
const { data } = await standardRequest.get(
|
||||
`${INTEGRATION_CLOUDFLARE_PAGES_API_URL}/client/v4/accounts/${accountId}/pages/projects`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const apps = data.result.map((a: any) => {
|
||||
return {
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
};
|
||||
});
|
||||
return apps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of repositories for the BitBucket integration based on provided BitBucket workspace
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for BitBucket API
|
||||
* @param {String} obj.workspaceSlug - Workspace identifier for fetching BitBucket repositories
|
||||
* @returns {Object[]} apps - BitBucket repositories
|
||||
* @returns {String} apps.name - name of BitBucket repository
|
||||
*/
|
||||
const getAppsBitBucket = async ({
|
||||
accessToken,
|
||||
workspaceSlug,
|
||||
}: {
|
||||
accessToken: string;
|
||||
workspaceSlug?: string;
|
||||
}) => {
|
||||
interface RepositoriesResponse {
|
||||
size: number;
|
||||
page: number;
|
||||
pageLen: number;
|
||||
next: string;
|
||||
previous: string;
|
||||
values: Array<Repository>;
|
||||
}
|
||||
|
||||
interface Repository {
|
||||
type: string;
|
||||
uuid: string;
|
||||
name: string;
|
||||
is_private: boolean;
|
||||
created_on: string;
|
||||
updated_on: string;
|
||||
}
|
||||
|
||||
if (!workspaceSlug) {
|
||||
return []
|
||||
}
|
||||
|
||||
const repositories: Repository[] = [];
|
||||
let hasNextPage = true;
|
||||
let repositoriesUrl = `${INTEGRATION_BITBUCKET_API_URL}/2.0/repositories/${workspaceSlug}`
|
||||
|
||||
while (hasNextPage) {
|
||||
const { data }: { data: RepositoriesResponse } = await standardRequest.get(
|
||||
repositoriesUrl,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
@ -719,13 +797,26 @@ const getAppsCloudflarePages = async ({
|
||||
}
|
||||
);
|
||||
|
||||
const apps = data.result.map((a: any) => {
|
||||
return {
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
};
|
||||
});
|
||||
return apps;
|
||||
if (data?.values.length > 0) {
|
||||
data.values.forEach((repository) => {
|
||||
repositories.push(repository)
|
||||
})
|
||||
}
|
||||
|
||||
if (data.next) {
|
||||
repositoriesUrl = data.next
|
||||
} else {
|
||||
hasNextPage = false
|
||||
}
|
||||
}
|
||||
|
||||
const apps = repositories.map((repository) => {
|
||||
return {
|
||||
name: repository.name,
|
||||
appId: repository.uuid,
|
||||
};
|
||||
});
|
||||
return apps;
|
||||
}
|
||||
/* Return list of projects for Northflank integration
|
||||
* @param {Object} obj
|
||||
@ -786,4 +877,34 @@ const getAppsNorthflank = async ({ accessToken }: { accessToken: string }) => {
|
||||
return apps;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of projects for Supabase integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for Supabase API
|
||||
* @returns {Object[]} apps - names of Supabase apps
|
||||
* @returns {String} apps.name - name of Supabase app
|
||||
*/
|
||||
|
||||
const getAppsCodefresh = async ({
|
||||
accessToken,
|
||||
}: {
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const res = (
|
||||
await standardRequest.get(`${INTEGRATION_CODEFRESH_API_URL}/projects`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
})
|
||||
).data;
|
||||
|
||||
const apps = res.projects.map((a: any) => ({
|
||||
name: a.projectName,
|
||||
appId: a.id,
|
||||
}));
|
||||
|
||||
return apps;
|
||||
|
||||
};
|
||||
export { getApps };
|
||||
|
@ -2,6 +2,8 @@ import { standardRequest } from "../config/request";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_BITBUCKET_TOKEN_URL,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_GITLAB,
|
||||
@ -15,11 +17,13 @@ import {
|
||||
} from "../variables";
|
||||
import {
|
||||
getClientIdAzure,
|
||||
getClientIdBitBucket,
|
||||
getClientIdGitHub,
|
||||
getClientIdGitLab,
|
||||
getClientIdNetlify,
|
||||
getClientIdVercel,
|
||||
getClientSecretAzure,
|
||||
getClientSecretBitBucket,
|
||||
getClientSecretGitHub,
|
||||
getClientSecretGitLab,
|
||||
getClientSecretHeroku,
|
||||
@ -78,6 +82,15 @@ interface ExchangeCodeGitlabResponse {
|
||||
created_at: number;
|
||||
}
|
||||
|
||||
interface ExchangeCodeBitBucketResponse {
|
||||
access_token: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
refresh_token: string;
|
||||
scopes: string;
|
||||
state: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for OAuth2
|
||||
* code-token exchange for integration named [integration]
|
||||
@ -129,6 +142,12 @@ const exchangeCode = async ({
|
||||
obj = await exchangeCodeGitlab({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_BITBUCKET:
|
||||
obj = await exchangeCodeBitBucket({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return obj;
|
||||
@ -347,4 +366,43 @@ const exchangeCodeGitlab = async ({ code }: { code: string }) => {
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for BitBucket
|
||||
* code-token exchange
|
||||
* @param {Object} obj1
|
||||
* @param {Object} obj1.code - code for code-token exchange
|
||||
* @returns {Object} obj2
|
||||
* @returns {String} obj2.accessToken - access token for BitBucket API
|
||||
* @returns {String} obj2.refreshToken - refresh token for BitBucket API
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeBitBucket = async ({ code }: { code: string }) => {
|
||||
const accessExpiresAt = new Date();
|
||||
const res: ExchangeCodeBitBucketResponse = (
|
||||
await standardRequest.post(
|
||||
INTEGRATION_BITBUCKET_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
client_id: await getClientIdBitBucket(),
|
||||
client_secret: await getClientSecretBitBucket(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/bitbucket/oauth2/callback`,
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in);
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
export { exchangeCode };
|
||||
|
@ -2,6 +2,8 @@ import { standardRequest } from "../config/request";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_BITBUCKET_TOKEN_URL,
|
||||
INTEGRATION_GITLAB,
|
||||
INTEGRATION_HEROKU,
|
||||
} from "../variables";
|
||||
@ -13,8 +15,10 @@ import {
|
||||
import { IntegrationService } from "../services";
|
||||
import {
|
||||
getClientIdAzure,
|
||||
getClientIdBitBucket,
|
||||
getClientIdGitLab,
|
||||
getClientSecretAzure,
|
||||
getClientSecretBitBucket,
|
||||
getClientSecretGitLab,
|
||||
getClientSecretHeroku,
|
||||
getSiteURL,
|
||||
@ -46,6 +50,15 @@ interface RefreshTokenGitLabResponse {
|
||||
created_at: number;
|
||||
}
|
||||
|
||||
interface RefreshTokenBitBucketResponse {
|
||||
access_token: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
refresh_token: string;
|
||||
scopes: string;
|
||||
state: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for integration
|
||||
* named [integration]
|
||||
@ -83,6 +96,11 @@ const exchangeRefresh = async ({
|
||||
refreshToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_BITBUCKET:
|
||||
tokenDetails = await exchangeRefreshBitBucket({
|
||||
refreshToken,
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error("Failed to exchange token for incompatible integration");
|
||||
}
|
||||
@ -218,4 +236,46 @@ const exchangeRefreshGitLab = async ({
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||
* BitBucket integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.refreshToken - refresh token to use to get new access token for BitBucket
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshBitBucket = async ({
|
||||
refreshToken,
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data,
|
||||
}: {
|
||||
data: RefreshTokenBitBucketResponse;
|
||||
} = await standardRequest.post(
|
||||
INTEGRATION_BITBUCKET_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: refreshToken,
|
||||
client_id: await getClientIdBitBucket(),
|
||||
client_secret: await getClientSecretBitBucket(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/bitbucket/oauth2/callback`,
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
|
||||
return {
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
export { exchangeRefresh };
|
||||
|
@ -18,7 +18,6 @@ const revokeAccess = async ({
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let deletedIntegrationAuth;
|
||||
// add any integration-specific revocation logic
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
@ -33,7 +32,7 @@ const revokeAccess = async ({
|
||||
break;
|
||||
}
|
||||
|
||||
deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
const deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
_id: integrationAuth._id,
|
||||
});
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import _ from "lodash";
|
||||
import AWS from "aws-sdk";
|
||||
import {
|
||||
CreateSecretCommand,
|
||||
import {
|
||||
CreateSecretCommand,
|
||||
GetSecretValueCommand,
|
||||
ResourceNotFoundException,
|
||||
SecretsManagerClient,
|
||||
@ -14,12 +14,16 @@ import {
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_BITBUCKET_API_URL,
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_CHECKLY_API_URL,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CLOUDFLARE_PAGES_API_URL,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_CODEFRESH_API_URL,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_GITHUB,
|
||||
@ -43,9 +47,9 @@ import {
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_VERCEL_API_URL
|
||||
} from "../variables";
|
||||
import { standardRequest} from "../config/request";
|
||||
import { standardRequest } from "../config/request";
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to [app] in integration named [integration]
|
||||
@ -175,11 +179,11 @@ const syncSecrets = async ({
|
||||
break;
|
||||
case INTEGRATION_SUPABASE:
|
||||
await syncSecretsSupabase({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CHECKLY:
|
||||
await syncSecretsCheckly({
|
||||
integration,
|
||||
@ -198,10 +202,24 @@ const syncSecrets = async ({
|
||||
break;
|
||||
case INTEGRATION_CLOUDFLARE_PAGES:
|
||||
await syncSecretsCloudflarePages({
|
||||
integration,
|
||||
secrets,
|
||||
accessId,
|
||||
accessToken
|
||||
integration,
|
||||
secrets,
|
||||
accessId,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CODEFRESH:
|
||||
await syncSecretsCodefresh({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_BITBUCKET:
|
||||
await syncSecretsBitBucket({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NORTHFLANK:
|
||||
@ -211,7 +229,7 @@ const syncSecrets = async ({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
@ -240,11 +258,11 @@ const syncSecretsAzureKeyVault = async ({
|
||||
recoverableDays: number;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
interface AzureKeyVaultSecret extends GetAzureKeyVaultSecret {
|
||||
key: string;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return all secrets from Azure Key Vault by paginating through URL [url]
|
||||
* @param {String} url - pagination URL to get next set of secrets from Azure Key Vault
|
||||
@ -258,23 +276,23 @@ const syncSecretsAzureKeyVault = async ({
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
result = result.concat(res.data.value);
|
||||
|
||||
|
||||
url = res.data.nextLink;
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
const getAzureKeyVaultSecrets = await paginateAzureKeyVaultSecrets(`${integration.app}/secrets?api-version=7.3`);
|
||||
|
||||
|
||||
let lastSlashIndex: number;
|
||||
const res = (await Promise.all(getAzureKeyVaultSecrets.map(async (getAzureKeyVaultSecret) => {
|
||||
if (!lastSlashIndex) {
|
||||
lastSlashIndex = getAzureKeyVaultSecret.id.lastIndexOf("/");
|
||||
}
|
||||
|
||||
|
||||
const azureKeyVaultSecret = await standardRequest.get(`${getAzureKeyVaultSecret.id}?api-version=7.3`, {
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
@ -286,11 +304,11 @@ const syncSecretsAzureKeyVault = async ({
|
||||
key: getAzureKeyVaultSecret.id.substring(lastSlashIndex + 1),
|
||||
});
|
||||
})))
|
||||
.reduce((obj: any, secret: any) => ({
|
||||
.reduce((obj: any, secret: any) => ({
|
||||
...obj,
|
||||
[secret.key]: secret,
|
||||
}), {});
|
||||
|
||||
}), {});
|
||||
|
||||
const setSecrets: {
|
||||
key: string;
|
||||
value: string;
|
||||
@ -314,9 +332,9 @@ const syncSecretsAzureKeyVault = async ({
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const deleteSecrets: AzureKeyVaultSecret[] = [];
|
||||
|
||||
|
||||
Object.keys(res).forEach((key) => {
|
||||
const underscoredKey = key.replace(/-/g, "_");
|
||||
if (!(underscoredKey in secrets)) {
|
||||
@ -337,7 +355,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
}) => {
|
||||
let isSecretSet = false;
|
||||
let maxTries = 6;
|
||||
|
||||
|
||||
while (!isSecretSet && maxTries > 0) {
|
||||
// try to set secret
|
||||
try {
|
||||
@ -354,7 +372,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
);
|
||||
|
||||
isSecretSet = true;
|
||||
|
||||
|
||||
} catch (err) {
|
||||
const error: any = err;
|
||||
if (error?.response?.data?.error?.innererror?.code === "ObjectIsDeletedButRecoverable") {
|
||||
@ -374,7 +392,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Sync/push set secrets
|
||||
for await (const setSecret of setSecrets) {
|
||||
const { key, value } = setSecret;
|
||||
@ -385,7 +403,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
accessToken,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
for await (const deleteSecret of deleteSecrets) {
|
||||
const { key } = deleteSecret;
|
||||
await standardRequest.delete(`${integration.app}/secrets/${key}?api-version=7.3`, {
|
||||
@ -427,7 +445,7 @@ const syncSecretsAWSParameterStore = async ({
|
||||
apiVersion: "2014-11-06",
|
||||
region: integration.region,
|
||||
});
|
||||
|
||||
|
||||
const params = {
|
||||
Path: integration.path,
|
||||
Recursive: true,
|
||||
@ -435,61 +453,61 @@ const syncSecretsAWSParameterStore = async ({
|
||||
};
|
||||
|
||||
const parameterList = (await ssm.getParametersByPath(params).promise()).Parameters
|
||||
|
||||
|
||||
let awsParameterStoreSecretsObj: {
|
||||
[key: string]: any // TODO: fix type
|
||||
} = {};
|
||||
|
||||
if (parameterList) {
|
||||
awsParameterStoreSecretsObj = parameterList.reduce((obj: any, secret: any) => ({
|
||||
...obj,
|
||||
[secret.Name.split("/").pop()]: secret,
|
||||
...obj,
|
||||
[secret.Name.split("/").pop()]: secret,
|
||||
}), {});
|
||||
}
|
||||
|
||||
// Identify secrets to create
|
||||
Object.keys(secrets).map(async (key) => {
|
||||
if (!(key in awsParameterStoreSecretsObj)) {
|
||||
// case: secret does not exist in AWS parameter store
|
||||
// -> create secret
|
||||
if (!(key in awsParameterStoreSecretsObj)) {
|
||||
// case: secret does not exist in AWS parameter store
|
||||
// -> create secret
|
||||
await ssm.putParameter({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key],
|
||||
Overwrite: true,
|
||||
}).promise();
|
||||
} else {
|
||||
// case: secret exists in AWS parameter store
|
||||
|
||||
if (awsParameterStoreSecretsObj[key].Value !== secrets[key]) {
|
||||
// case: secret value doesn't match one in AWS parameter store
|
||||
// -> update secret
|
||||
await ssm.putParameter({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key],
|
||||
Overwrite: true,
|
||||
}).promise();
|
||||
} else {
|
||||
// case: secret exists in AWS parameter store
|
||||
|
||||
if (awsParameterStoreSecretsObj[key].Value !== secrets[key]) {
|
||||
// case: secret value doesn't match one in AWS parameter store
|
||||
// -> update secret
|
||||
await ssm.putParameter({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key],
|
||||
Overwrite: true,
|
||||
}).promise();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Identify secrets to delete
|
||||
Object.keys(awsParameterStoreSecretsObj).map(async (key) => {
|
||||
if (!(key in secrets)) {
|
||||
// case:
|
||||
// -> delete secret
|
||||
await ssm.deleteParameter({
|
||||
Name: awsParameterStoreSecretsObj[key].Name,
|
||||
}).promise();
|
||||
}
|
||||
if (!(key in secrets)) {
|
||||
// case:
|
||||
// -> delete secret
|
||||
await ssm.deleteParameter({
|
||||
Name: awsParameterStoreSecretsObj[key].Name,
|
||||
}).promise();
|
||||
}
|
||||
});
|
||||
|
||||
AWS.config.update({
|
||||
region: undefined,
|
||||
accessKeyId: undefined,
|
||||
secretAccessKey: undefined,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@ -520,7 +538,7 @@ const syncSecretsAWSSecretManager = async ({
|
||||
accessKeyId: accessId,
|
||||
secretAccessKey: accessToken,
|
||||
});
|
||||
|
||||
|
||||
secretsManager = new SecretsManagerClient({
|
||||
region: integration.region,
|
||||
credentials: {
|
||||
@ -534,13 +552,13 @@ const syncSecretsAWSSecretManager = async ({
|
||||
SecretId: integration.app,
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
let awsSecretManagerSecretObj: { [key: string]: any } = {};
|
||||
|
||||
|
||||
if (awsSecretManagerSecret?.SecretString) {
|
||||
awsSecretManagerSecretObj = JSON.parse(awsSecretManagerSecret.SecretString);
|
||||
}
|
||||
|
||||
|
||||
if (!_.isEqual(awsSecretManagerSecretObj, secrets)) {
|
||||
await secretsManager.send(new UpdateSecretCommand({
|
||||
SecretId: integration.app,
|
||||
@ -552,19 +570,19 @@ const syncSecretsAWSSecretManager = async ({
|
||||
region: undefined,
|
||||
accessKeyId: undefined,
|
||||
secretAccessKey: undefined,
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
if (err instanceof ResourceNotFoundException && secretsManager) {
|
||||
await secretsManager.send(new CreateSecretCommand({
|
||||
Name: integration.app,
|
||||
SecretString: JSON.stringify(secrets),
|
||||
}));
|
||||
}
|
||||
}
|
||||
AWS.config.update({
|
||||
region: undefined,
|
||||
accessKeyId: undefined,
|
||||
secretAccessKey: undefined,
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -647,38 +665,36 @@ const syncSecretsVercel = async ({
|
||||
decrypt: "true",
|
||||
...(integrationAuth?.teamId
|
||||
? {
|
||||
teamId: integrationAuth.teamId,
|
||||
}
|
||||
teamId: integrationAuth.teamId,
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
|
||||
const vercelSecrets: VercelSecret[] = (await standardRequest.get(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
))
|
||||
.data
|
||||
.envs
|
||||
.filter((secret: VercelSecret) => {
|
||||
if (!secret.target.includes(integration.targetEnvironment)) {
|
||||
// case: secret does not have the same target environment
|
||||
return false;
|
||||
}
|
||||
.data
|
||||
.envs
|
||||
.filter((secret: VercelSecret) => {
|
||||
if (!secret.target.includes(integration.targetEnvironment)) {
|
||||
// case: secret does not have the same target environment
|
||||
return false;
|
||||
}
|
||||
|
||||
if (integration.targetEnvironment === "preview" && integration.path && integration.path !== secret.gitBranch) {
|
||||
// case: secret on preview environment does not have same target git branch
|
||||
return false;
|
||||
}
|
||||
if (integration.targetEnvironment === "preview" && integration.path && integration.path !== secret.gitBranch) {
|
||||
// case: secret on preview environment does not have same target git branch
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
// return secret.target.includes(integration.targetEnvironment);
|
||||
return true;
|
||||
});
|
||||
|
||||
const res: { [key: string]: VercelSecret } = {};
|
||||
|
||||
@ -686,14 +702,14 @@ const syncSecretsVercel = async ({
|
||||
if (vercelSecret.type === "encrypted") {
|
||||
// case: secret is encrypted -> need to decrypt
|
||||
const decryptedSecret = (await standardRequest.get(
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${vercelSecret.id}`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
`${INTEGRATION_VERCEL_API_URL}/v9/projects/${integration.app}/env/${vercelSecret.id}`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)).data;
|
||||
|
||||
res[vercelSecret.key] = decryptedSecret;
|
||||
@ -701,7 +717,7 @@ const syncSecretsVercel = async ({
|
||||
res[vercelSecret.key] = vercelSecret;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const updateSecrets: VercelSecret[] = [];
|
||||
const deleteSecrets: VercelSecret[] = [];
|
||||
const newSecrets: VercelSecret[] = [];
|
||||
@ -732,9 +748,9 @@ const syncSecretsVercel = async ({
|
||||
key: key,
|
||||
value: secrets[key],
|
||||
type: res[key].type,
|
||||
target: res[key].target.includes(integration.targetEnvironment)
|
||||
? [...res[key].target]
|
||||
: [...res[key].target, integration.targetEnvironment],
|
||||
target: res[key].target.includes(integration.targetEnvironment)
|
||||
? [...res[key].target]
|
||||
: [...res[key].target, integration.targetEnvironment],
|
||||
...(integration.path ? {
|
||||
gitBranch: integration.path,
|
||||
} : {}),
|
||||
@ -784,7 +800,7 @@ const syncSecretsVercel = async ({
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for await (const secret of deleteSecrets) {
|
||||
@ -797,7 +813,7 @@ const syncSecretsVercel = async ({
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@ -1384,7 +1400,7 @@ const syncSecretsCircleCI = async ({
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
}) => {
|
||||
const circleciOrganizationDetail = (
|
||||
await standardRequest.get(`${INTEGRATION_CIRCLECI_API_URL}/v2/me/collaborations`, {
|
||||
headers: {
|
||||
@ -1471,13 +1487,13 @@ const syncSecretsTravisCI = async ({
|
||||
}
|
||||
)
|
||||
)
|
||||
.data
|
||||
?.env_vars
|
||||
.reduce((obj: any, secret: any) => ({
|
||||
.data
|
||||
?.env_vars
|
||||
.reduce((obj: any, secret: any) => ({
|
||||
...obj,
|
||||
[secret.name]: secret,
|
||||
}), {});
|
||||
|
||||
}), {});
|
||||
|
||||
// add secrets
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
if (!(key in getSecretsRes)) {
|
||||
@ -1522,7 +1538,7 @@ const syncSecretsTravisCI = async ({
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(getSecretsRes)) {
|
||||
if (!(key in secrets)){
|
||||
if (!(key in secrets)) {
|
||||
// delete secret
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_TRAVISCI_API_URL}/settings/env_vars/${getSecretsRes[key].id}?repository_id=${getSecretsRes[key].repository_id}`,
|
||||
@ -1567,29 +1583,29 @@ const syncSecretsGitLab = async ({
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
};
|
||||
|
||||
|
||||
let allEnvVariables: GitLabSecret[] = [];
|
||||
let url: string | null = `${gitLabApiUrl}?per_page=100`;
|
||||
|
||||
|
||||
while (url) {
|
||||
const response: any = await standardRequest.get(url, { headers });
|
||||
allEnvVariables = [...allEnvVariables, ...response.data];
|
||||
|
||||
|
||||
const linkHeader = response.headers.link;
|
||||
const nextLink = linkHeader?.split(",").find((part: string) => part.includes('rel="next"'));
|
||||
|
||||
|
||||
if (nextLink) {
|
||||
url = nextLink.trim().split(";")[0].slice(1, -1);
|
||||
} else {
|
||||
url = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return allEnvVariables;
|
||||
};
|
||||
|
||||
const allEnvVariables = await getAllEnvVariables(integration?.appId, accessToken);
|
||||
const getSecretsRes: GitLabSecret[] = allEnvVariables.filter((secret: GitLabSecret) =>
|
||||
const getSecretsRes: GitLabSecret[] = allEnvVariables.filter((secret: GitLabSecret) =>
|
||||
secret.environment_scope === integration.targetEnvironment
|
||||
);
|
||||
|
||||
@ -1671,8 +1687,8 @@ const syncSecretsSupabase = async ({
|
||||
`${INTEGRATION_SUPABASE_API_URL}/v1/projects/${integration.appId}/secrets`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
@ -1681,8 +1697,8 @@ const syncSecretsSupabase = async ({
|
||||
const modifiedFormatForSecretInjection = Object.keys(secrets).map(
|
||||
(key) => {
|
||||
return {
|
||||
name: key,
|
||||
value: secrets[key],
|
||||
name: key,
|
||||
value: secrets[key],
|
||||
};
|
||||
}
|
||||
);
|
||||
@ -1692,8 +1708,8 @@ const syncSecretsSupabase = async ({
|
||||
modifiedFormatForSecretInjection,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
@ -1701,7 +1717,7 @@ const syncSecretsSupabase = async ({
|
||||
const secretsToDelete: any = [];
|
||||
getSecretsRes?.forEach((secretObj: any) => {
|
||||
if (!(secretObj.name in secrets)) {
|
||||
secretsToDelete.push(secretObj.name);
|
||||
secretsToDelete.push(secretObj.name);
|
||||
}
|
||||
});
|
||||
|
||||
@ -1748,18 +1764,18 @@ const syncSecretsCheckly = async ({
|
||||
}
|
||||
)
|
||||
)
|
||||
.data
|
||||
.reduce((obj: any, secret: any) => ({
|
||||
.data
|
||||
.reduce((obj: any, secret: any) => ({
|
||||
...obj,
|
||||
[secret.key]: secret.value,
|
||||
}), {});
|
||||
|
||||
}), {});
|
||||
|
||||
// add secrets
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
if (!(key in getSecretsRes)) {
|
||||
// case: secret does not exist in checkly
|
||||
// -> add secret
|
||||
|
||||
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_CHECKLY_API_URL}/v1/variables`,
|
||||
{
|
||||
@ -1778,7 +1794,7 @@ const syncSecretsCheckly = async ({
|
||||
} else {
|
||||
// case: secret exists in checkly
|
||||
// -> update/set secret
|
||||
|
||||
|
||||
if (secrets[key] !== getSecretsRes[key]) {
|
||||
await standardRequest.put(
|
||||
`${INTEGRATION_CHECKLY_API_URL}/v1/variables/${key}`,
|
||||
@ -1799,7 +1815,7 @@ const syncSecretsCheckly = async ({
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(getSecretsRes)) {
|
||||
if (!(key in secrets)){
|
||||
if (!(key in secrets)) {
|
||||
// delete secret
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_CHECKLY_API_URL}/v1/variables/${key}`,
|
||||
@ -1836,13 +1852,13 @@ const syncSecretsHashiCorpVault = async ({
|
||||
accessToken: string;
|
||||
}) => {
|
||||
if (!accessId) return;
|
||||
|
||||
|
||||
interface LoginAppRoleRes {
|
||||
auth: {
|
||||
client_token: string;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// get Vault client token (could be optimized)
|
||||
const { data }: { data: LoginAppRoleRes } = await standardRequest.post(
|
||||
`${integrationAuth.url}/v1/auth/approle/login`,
|
||||
@ -1856,7 +1872,7 @@ const syncSecretsHashiCorpVault = async ({
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
const clientToken = data.auth.client_token;
|
||||
|
||||
await standardRequest.post(
|
||||
@ -1884,46 +1900,46 @@ const syncSecretsHashiCorpVault = async ({
|
||||
* @param {String} obj.accessToken - API token for Cloudflare
|
||||
*/
|
||||
const syncSecretsCloudflarePages = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessId,
|
||||
accessToken,
|
||||
integration,
|
||||
secrets,
|
||||
accessId,
|
||||
accessToken,
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
|
||||
// get secrets from cloudflare pages
|
||||
const getSecretsRes = (
|
||||
await standardRequest.get(
|
||||
`${INTEGRATION_CLOUDFLARE_PAGES_API_URL}/client/v4/accounts/${accessId}/pages/projects/${integration.app}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
await standardRequest.get(
|
||||
`${INTEGRATION_CLOUDFLARE_PAGES_API_URL}/client/v4/accounts/${accessId}/pages/projects/${integration.app}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
.data.result["deployment_configs"][integration.targetEnvironment]["env_vars"];
|
||||
|
||||
// copy the secrets object, so we can set deleted keys to null
|
||||
const secretsObj: any = {...secrets};
|
||||
const secretsObj: any = { ...secrets };
|
||||
|
||||
for (const [key, val] of Object.entries(secretsObj)) {
|
||||
secretsObj[key] = { type: "secret_text", value: val };
|
||||
secretsObj[key] = { type: "secret_text", value: val };
|
||||
}
|
||||
|
||||
if (getSecretsRes) {
|
||||
for await (const key of Object.keys(getSecretsRes)) {
|
||||
if (!(key in secrets)) {
|
||||
// case: secret does not exist in infisical
|
||||
// -> delete secret from cloudflare pages
|
||||
secretsObj[key] = null;
|
||||
}
|
||||
for await (const key of Object.keys(getSecretsRes)) {
|
||||
if (!(key in secrets)) {
|
||||
// case: secret does not exist in infisical
|
||||
// -> delete secret from cloudflare pages
|
||||
secretsObj[key] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const data = {
|
||||
@ -1935,16 +1951,164 @@ const syncSecretsCloudflarePages = async ({
|
||||
};
|
||||
|
||||
await standardRequest.patch(
|
||||
`${INTEGRATION_CLOUDFLARE_PAGES_API_URL}/client/v4/accounts/${accessId}/pages/projects/${integration.app}`,
|
||||
data,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
`${INTEGRATION_CLOUDFLARE_PAGES_API_URL}/client/v4/accounts/${accessId}/pages/projects/${integration.app}`,
|
||||
data,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to BitBucket repo with name [integration.app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {IIntegrationAuth} obj.integrationAuth - integration auth details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for BitBucket integration
|
||||
*/
|
||||
const syncSecretsBitBucket = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
interface VariablesResponse {
|
||||
size: number;
|
||||
page: number;
|
||||
pageLen: number;
|
||||
next: string;
|
||||
previous: string;
|
||||
values: Array<BitbucketVariable>;
|
||||
}
|
||||
|
||||
interface BitbucketVariable {
|
||||
type: string;
|
||||
uuid: string;
|
||||
key: string;
|
||||
value: string;
|
||||
secured: boolean;
|
||||
}
|
||||
|
||||
const res: { [key: string]: BitbucketVariable } = {};
|
||||
|
||||
let hasNextPage = true;
|
||||
let variablesUrl = `${INTEGRATION_BITBUCKET_API_URL}/2.0/repositories/${integration.targetEnvironmentId}/${integration.appId}/pipelines_config/variables`
|
||||
|
||||
while (hasNextPage) {
|
||||
const { data }: { data: VariablesResponse } = await standardRequest.get(
|
||||
variablesUrl,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (data?.values.length > 0) {
|
||||
data.values.forEach((variable) => {
|
||||
res[variable.key] = variable;
|
||||
});
|
||||
}
|
||||
|
||||
if (data.next) {
|
||||
variablesUrl = data.next
|
||||
} else {
|
||||
hasNextPage = false
|
||||
}
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
if (key in res) {
|
||||
// update existing secret
|
||||
await standardRequest.put(
|
||||
`${variablesUrl}/${res[key].uuid}`,
|
||||
{
|
||||
key,
|
||||
value: secrets[key],
|
||||
secured: true
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// create new secret
|
||||
await standardRequest.post(
|
||||
variablesUrl,
|
||||
{
|
||||
key,
|
||||
value: secrets[key],
|
||||
secured: true
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(res)) {
|
||||
if (!(key in secrets)) {
|
||||
// delete secret
|
||||
await standardRequest.delete(
|
||||
`${variablesUrl}/${res[key].uuid}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Sync/push [secrets] to Codefresh with name [integration.app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for Codefresh integration
|
||||
*/
|
||||
const syncSecretsCodefresh = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
await standardRequest.patch(
|
||||
`${INTEGRATION_CODEFRESH_API_URL}/projects/${integration.appId}`,
|
||||
{
|
||||
variables: Object.keys(secrets).map((key) => ({
|
||||
key,
|
||||
value: secrets[key]
|
||||
}))
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
};
|
||||
/* Sync/push [secrets] to Northflank
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
|
@ -1,4 +1,5 @@
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Types } from "mongoose";
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
import {
|
||||
getAuthAPIKeyPayload,
|
||||
@ -51,6 +52,10 @@ const requireAuth = ({
|
||||
});
|
||||
|
||||
let authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
let authUserPayload: {
|
||||
user: IUser;
|
||||
tokenVersionId: Types.ObjectId;
|
||||
};
|
||||
switch (authMode) {
|
||||
case AUTH_MODE_SERVICE_ACCOUNT:
|
||||
authPayload = await getAuthSAAKPayload({
|
||||
@ -71,12 +76,12 @@ const requireAuth = ({
|
||||
req.user = authPayload;
|
||||
break;
|
||||
default:
|
||||
const { user, tokenVersionId } = await getAuthUserPayload({
|
||||
authUserPayload = await getAuthUserPayload({
|
||||
authTokenValue,
|
||||
});
|
||||
authPayload = user;
|
||||
req.user = user;
|
||||
req.tokenVersionId = tokenVersionId;
|
||||
authPayload = authUserPayload.user;
|
||||
req.user = authUserPayload.user;
|
||||
req.tokenVersionId = authUserPayload.tokenVersionId;
|
||||
break;
|
||||
}
|
||||
|
||||
|
98
backend/src/models/botOrg.ts
Normal file
98
backend/src/models/botOrg.ts
Normal file
@ -0,0 +1,98 @@
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
import {
|
||||
ALGORITHM_AES_256_GCM,
|
||||
ENCODING_SCHEME_BASE64,
|
||||
ENCODING_SCHEME_UTF8,
|
||||
} from "../variables";
|
||||
|
||||
export interface IBotOrg {
|
||||
_id: Types.ObjectId;
|
||||
name: string;
|
||||
organization: Types.ObjectId;
|
||||
publicKey: string;
|
||||
encryptedSymmetricKey: string;
|
||||
symmetricKeyIV: string;
|
||||
symmetricKeyTag: string;
|
||||
symmetricKeyAlgorithm: "aes-256-gcm";
|
||||
symmetricKeyKeyEncoding: "base64" | "utf8";
|
||||
encryptedPrivateKey: string;
|
||||
privateKeyIV: string;
|
||||
privateKeyTag: string;
|
||||
privateKeyAlgorithm: "aes-256-gcm";
|
||||
privateKeyKeyEncoding: "base64" | "utf8";
|
||||
}
|
||||
|
||||
const botOrgSchema = new Schema<IBotOrg>(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
organization: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Organization",
|
||||
required: true,
|
||||
},
|
||||
publicKey: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
encryptedSymmetricKey: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
symmetricKeyIV: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
symmetricKeyTag: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
symmetricKeyAlgorithm: {
|
||||
type: String,
|
||||
enum: [ALGORITHM_AES_256_GCM],
|
||||
required: true
|
||||
},
|
||||
symmetricKeyKeyEncoding: {
|
||||
type: String,
|
||||
enum: [
|
||||
ENCODING_SCHEME_UTF8,
|
||||
ENCODING_SCHEME_BASE64,
|
||||
],
|
||||
required: true
|
||||
},
|
||||
encryptedPrivateKey: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
privateKeyIV: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
privateKeyTag: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
privateKeyAlgorithm: {
|
||||
type: String,
|
||||
enum: [ALGORITHM_AES_256_GCM],
|
||||
required: true
|
||||
},
|
||||
privateKeyKeyEncoding: {
|
||||
type: String,
|
||||
enum: [
|
||||
ENCODING_SCHEME_UTF8,
|
||||
ENCODING_SCHEME_BASE64,
|
||||
],
|
||||
required: true
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const BotOrg = model<IBotOrg>("BotOrg", botOrgSchema);
|
||||
|
||||
export default BotOrg;
|
34
backend/src/models/gitAppInstallationSession.ts
Normal file
34
backend/src/models/gitAppInstallationSession.ts
Normal file
@ -0,0 +1,34 @@
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
|
||||
type GitAppInstallationSession = {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
organization: Types.ObjectId;
|
||||
user: Types.ObjectId;
|
||||
}
|
||||
|
||||
const gitAppInstallationSession = new Schema<GitAppInstallationSession>({
|
||||
id: {
|
||||
required: true,
|
||||
type: String,
|
||||
},
|
||||
sessionId: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true
|
||||
},
|
||||
organization: {
|
||||
type: Schema.Types.ObjectId,
|
||||
required: true,
|
||||
unique: true
|
||||
},
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "User"
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const GitAppInstallationSession = model<GitAppInstallationSession>("git_app_installation_session", gitAppInstallationSession);
|
||||
|
||||
export default GitAppInstallationSession;
|
31
backend/src/models/gitAppOrganizationInstallation.ts
Normal file
31
backend/src/models/gitAppOrganizationInstallation.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import { Schema, model } from "mongoose";
|
||||
|
||||
type Installation = {
|
||||
installationId: string
|
||||
organizationId: string
|
||||
user: Schema.Types.ObjectId
|
||||
};
|
||||
|
||||
|
||||
const gitAppOrganizationInstallation = new Schema<Installation>({
|
||||
installationId: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true
|
||||
},
|
||||
organizationId: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true
|
||||
},
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "User",
|
||||
required: true,
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const GitAppOrganizationInstallation = model<Installation>("git_app_organization_installation", gitAppOrganizationInstallation);
|
||||
|
||||
export default GitAppOrganizationInstallation;
|
152
backend/src/models/gitRisks.ts
Normal file
152
backend/src/models/gitRisks.ts
Normal file
@ -0,0 +1,152 @@
|
||||
import { Schema, model } from "mongoose";
|
||||
|
||||
export const STATUS_RESOLVED_FALSE_POSITIVE = "RESOLVED_FALSE_POSITIVE";
|
||||
export const STATUS_RESOLVED_REVOKED = "RESOLVED_REVOKED";
|
||||
export const STATUS_RESOLVED_NOT_REVOKED = "RESOLVED_NOT_REVOKED";
|
||||
export const STATUS_UNRESOLVED = "UNRESOLVED";
|
||||
|
||||
export type GitRisks = {
|
||||
id: string;
|
||||
description: string;
|
||||
startLine: string;
|
||||
endLine: string;
|
||||
startColumn: string;
|
||||
endColumn: string;
|
||||
match: string;
|
||||
secret: string;
|
||||
file: string;
|
||||
symlinkFile: string;
|
||||
commit: string;
|
||||
entropy: string;
|
||||
author: string;
|
||||
email: string;
|
||||
date: string;
|
||||
message: string;
|
||||
tags: string[];
|
||||
ruleID: string;
|
||||
fingerprint: string;
|
||||
fingerPrintWithoutCommitId: string
|
||||
|
||||
isFalsePositive: boolean; // New field for marking risks as false positives
|
||||
isResolved: boolean; // New field for marking risks as resolved
|
||||
riskOwner: string | null; // New field for setting a risk owner (nullable string)
|
||||
installationId: string,
|
||||
repositoryId: string,
|
||||
repositoryLink: string
|
||||
repositoryFullName: string
|
||||
status: string
|
||||
pusher: {
|
||||
name: string,
|
||||
email: string
|
||||
},
|
||||
organization: Schema.Types.ObjectId,
|
||||
}
|
||||
|
||||
const gitRisks = new Schema<GitRisks>({
|
||||
id: {
|
||||
type: String,
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
},
|
||||
startLine: {
|
||||
type: String,
|
||||
},
|
||||
endLine: {
|
||||
type: String,
|
||||
},
|
||||
startColumn: {
|
||||
type: String,
|
||||
},
|
||||
endColumn: {
|
||||
type: String,
|
||||
},
|
||||
file: {
|
||||
type: String,
|
||||
},
|
||||
symlinkFile: {
|
||||
type: String,
|
||||
},
|
||||
commit: {
|
||||
type: String,
|
||||
},
|
||||
entropy: {
|
||||
type: String,
|
||||
},
|
||||
author: {
|
||||
type: String,
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
},
|
||||
date: {
|
||||
type: String,
|
||||
},
|
||||
message: {
|
||||
type: String,
|
||||
},
|
||||
tags: {
|
||||
type: [String],
|
||||
},
|
||||
ruleID: {
|
||||
type: String,
|
||||
},
|
||||
fingerprint: {
|
||||
type: String,
|
||||
unique: true
|
||||
},
|
||||
fingerPrintWithoutCommitId: {
|
||||
type: String,
|
||||
},
|
||||
isFalsePositive: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
isResolved: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
riskOwner: {
|
||||
type: String,
|
||||
default: null
|
||||
},
|
||||
installationId: {
|
||||
type: String,
|
||||
require: true
|
||||
},
|
||||
repositoryId: {
|
||||
type: String
|
||||
},
|
||||
repositoryLink: {
|
||||
type: String
|
||||
},
|
||||
repositoryFullName: {
|
||||
type: String
|
||||
},
|
||||
pusher: {
|
||||
name: {
|
||||
type: String
|
||||
},
|
||||
email: {
|
||||
type: String
|
||||
},
|
||||
},
|
||||
organization: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Organization",
|
||||
},
|
||||
status: {
|
||||
type: String,
|
||||
enum: [
|
||||
STATUS_RESOLVED_FALSE_POSITIVE,
|
||||
STATUS_RESOLVED_REVOKED,
|
||||
STATUS_RESOLVED_NOT_REVOKED,
|
||||
STATUS_UNRESOLVED
|
||||
],
|
||||
default: STATUS_UNRESOLVED
|
||||
}
|
||||
}, { timestamps: true });
|
||||
|
||||
const GitRisks = model<GitRisks>("GitRisks", gitRisks);
|
||||
|
||||
export default GitRisks;
|
@ -1,5 +1,6 @@
|
||||
import BackupPrivateKey, { IBackupPrivateKey } from "./backupPrivateKey";
|
||||
import Bot, { IBot } from "./bot";
|
||||
import BotOrg, { IBotOrg } from "./botOrg";
|
||||
import BotKey, { IBotKey } from "./botKey";
|
||||
import IncidentContactOrg, { IIncidentContactOrg } from "./incidentContactOrg";
|
||||
import Integration, { IIntegration } from "./integration";
|
||||
@ -16,13 +17,14 @@ import ServiceAccountKey, { IServiceAccountKey } from "./serviceAccountKey"; //
|
||||
import ServiceAccountOrganizationPermission, { IServiceAccountOrganizationPermission } from "./serviceAccountOrganizationPermission"; // new
|
||||
import ServiceAccountWorkspacePermission, { IServiceAccountWorkspacePermission } from "./serviceAccountWorkspacePermission"; // new
|
||||
import TokenData, { ITokenData } from "./tokenData";
|
||||
import User,{ AuthProvider, IUser } from "./user";
|
||||
import User, { AuthProvider, IUser } from "./user";
|
||||
import UserAction, { IUserAction } from "./userAction";
|
||||
import Workspace, { IWorkspace } from "./workspace";
|
||||
import ServiceTokenData, { IServiceTokenData } from "./serviceTokenData";
|
||||
import APIKeyData, { IAPIKeyData } from "./apiKeyData";
|
||||
import LoginSRPDetail, { ILoginSRPDetail } from "./loginSRPDetail";
|
||||
import TokenVersion, { ITokenVersion } from "./tokenVersion";
|
||||
import GitRisks, { STATUS_RESOLVED_FALSE_POSITIVE } from "./gitRisks";
|
||||
|
||||
export {
|
||||
AuthProvider,
|
||||
@ -30,6 +32,8 @@ export {
|
||||
IBackupPrivateKey,
|
||||
Bot,
|
||||
IBot,
|
||||
BotOrg,
|
||||
IBotOrg,
|
||||
BotKey,
|
||||
IBotKey,
|
||||
IncidentContactOrg,
|
||||
@ -76,4 +80,6 @@ export {
|
||||
ILoginSRPDetail,
|
||||
TokenVersion,
|
||||
ITokenVersion,
|
||||
GitRisks,
|
||||
STATUS_RESOLVED_FALSE_POSITIVE
|
||||
};
|
||||
|
@ -3,9 +3,11 @@ import {
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_GITLAB,
|
||||
@ -57,7 +59,9 @@ export interface IIntegration {
|
||||
| "checkly"
|
||||
| "hashicorp-vault"
|
||||
| "cloudflare-pages"
|
||||
| "northflank";
|
||||
| "bitbucket"
|
||||
| "codefresh"
|
||||
| "northflank"
|
||||
integrationAuth: Types.ObjectId;
|
||||
}
|
||||
|
||||
@ -147,6 +151,8 @@ const integrationSchema = new Schema<IIntegration>(
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_HASHICORP_VAULT,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_NORTHFLANK
|
||||
],
|
||||
required: true,
|
||||
|
@ -6,8 +6,10 @@ import {
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_GITLAB,
|
||||
@ -26,7 +28,27 @@ import {
|
||||
export interface IIntegrationAuth extends Document {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
integration: "heroku" | "vercel" | "netlify" | "github" | "gitlab" | "render" | "railway" | "flyio" | "azure-key-vault" | "laravel-forge" | "circleci" | "travisci" | "supabase" | "aws-parameter-store" | "aws-secret-manager" | "checkly" | "cloudflare-pages" | "northflank";
|
||||
integration:
|
||||
| "heroku"
|
||||
| "vercel"
|
||||
| "netlify"
|
||||
| "github"
|
||||
| "gitlab"
|
||||
| "render"
|
||||
| "railway"
|
||||
| "flyio"
|
||||
| "azure-key-vault"
|
||||
| "laravel-forge"
|
||||
| "circleci"
|
||||
| "travisci"
|
||||
| "supabase"
|
||||
| "aws-parameter-store"
|
||||
| "aws-secret-manager"
|
||||
| "checkly"
|
||||
| "cloudflare-pages"
|
||||
| "codefresh"
|
||||
| "bitbucket"
|
||||
| "northflank";
|
||||
teamId: string;
|
||||
accountId: string;
|
||||
url: string;
|
||||
@ -72,6 +94,8 @@ const integrationAuthSchema = new Schema<IIntegrationAuth>(
|
||||
INTEGRATION_SUPABASE,
|
||||
INTEGRATION_HASHICORP_VAULT,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_NORTHFLANK
|
||||
],
|
||||
required: true,
|
||||
|
52
backend/src/models/secretImports.ts
Normal file
52
backend/src/models/secretImports.ts
Normal file
@ -0,0 +1,52 @@
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
|
||||
export interface ISecretImports {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
imports: Array<{
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
const secretImportSchema = new Schema<ISecretImports>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true
|
||||
},
|
||||
environment: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
folderId: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "root"
|
||||
},
|
||||
imports: {
|
||||
type: [
|
||||
{
|
||||
environment: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
secretPath: {
|
||||
type: String,
|
||||
required: true
|
||||
}
|
||||
}
|
||||
],
|
||||
default: []
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
const SecretImport = model<ISecretImports>("SecretImports", secretImportSchema);
|
||||
export default SecretImport;
|
@ -1,7 +1,9 @@
|
||||
import { Document, Schema, Types, model } from "mongoose";
|
||||
|
||||
export enum AuthProvider {
|
||||
EMAIL = "email",
|
||||
GOOGLE = "google",
|
||||
OKTA_SAML = "okta-saml"
|
||||
}
|
||||
|
||||
export interface IUser extends Document {
|
||||
|
85
backend/src/models/webhooks.ts
Normal file
85
backend/src/models/webhooks.ts
Normal file
@ -0,0 +1,85 @@
|
||||
import { Document, Schema, Types, model } from "mongoose";
|
||||
import { ALGORITHM_AES_256_GCM, ENCODING_SCHEME_BASE64, ENCODING_SCHEME_UTF8 } from "../variables";
|
||||
|
||||
export interface IWebhook extends Document {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
url: string;
|
||||
lastStatus: "success" | "failed";
|
||||
lastRunErrorMessage?: string;
|
||||
isDisabled: boolean;
|
||||
encryptedSecretKey: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
algorithm: "aes-256-gcm";
|
||||
keyEncoding: "base64" | "utf8";
|
||||
}
|
||||
|
||||
const WebhookSchema = new Schema<IWebhook>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true
|
||||
},
|
||||
environment: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
secretPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: "/"
|
||||
},
|
||||
url: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
lastStatus: {
|
||||
type: String,
|
||||
enum: ["success", "failed"]
|
||||
},
|
||||
lastRunErrorMessage: {
|
||||
type: String
|
||||
},
|
||||
isDisabled: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
// used for webhook signature
|
||||
encryptedSecretKey: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
iv: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
tag: {
|
||||
type: String,
|
||||
select: false
|
||||
},
|
||||
algorithm: {
|
||||
// the encryption algorithm used
|
||||
type: String,
|
||||
enum: [ALGORITHM_AES_256_GCM],
|
||||
required: true,
|
||||
select: false
|
||||
},
|
||||
keyEncoding: {
|
||||
type: String,
|
||||
enum: [ENCODING_SCHEME_UTF8, ENCODING_SCHEME_BASE64],
|
||||
required: true,
|
||||
select: false
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
const Webhook = model<IWebhook>("Webhook", WebhookSchema);
|
||||
|
||||
export default Webhook;
|
@ -1,5 +1,5 @@
|
||||
import express, { Request, Response } from "express";
|
||||
import { getSmtpConfigured } from "../../config";
|
||||
import { getSecretScanningGitAppId, getSecretScanningPrivateKey, getSecretScanningWebhookSecret, getSmtpConfigured } from "../../config";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@ -10,6 +10,7 @@ router.get(
|
||||
date: new Date(),
|
||||
message: "Ok",
|
||||
emailConfigured: await getSmtpConfigured(),
|
||||
secretScanningConfigured: await getSecretScanningGitAppId() && await getSecretScanningWebhookSecret() && await getSecretScanningPrivateKey(),
|
||||
})
|
||||
}
|
||||
);
|
||||
|
@ -1,7 +1,6 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import { body } from "express-validator";
|
||||
import passport from "passport";
|
||||
import { requireAuth, validateRequest } from "../../middleware";
|
||||
import { authController } from "../../controllers/v1";
|
||||
import { authLimiter } from "../../helpers/rateLimiter";
|
||||
@ -44,21 +43,6 @@ router.post(
|
||||
authController.checkAuth
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/redirect/google",
|
||||
authLimiter,
|
||||
passport.authenticate("google", {
|
||||
scope: ["profile", "email"],
|
||||
session: false,
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/callback/google",
|
||||
passport.authenticate("google", { failureRedirect: "/login/provider/error", session: false }),
|
||||
authController.handleAuthProviderCallback,
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/common-passwords",
|
||||
authLimiter,
|
||||
|
@ -15,23 +15,29 @@ import password from "./password";
|
||||
import integration from "./integration";
|
||||
import integrationAuth from "./integrationAuth";
|
||||
import secretsFolder from "./secretsFolder";
|
||||
import secretScanning from "./secretScanning";
|
||||
import webhooks from "./webhook";
|
||||
import secretImport from "./secretImport";
|
||||
|
||||
export {
|
||||
signup,
|
||||
auth,
|
||||
bot,
|
||||
user,
|
||||
userAction,
|
||||
organization,
|
||||
workspace,
|
||||
membershipOrg,
|
||||
membership,
|
||||
key,
|
||||
inviteOrg,
|
||||
secret,
|
||||
serviceToken,
|
||||
password,
|
||||
integration,
|
||||
integrationAuth,
|
||||
secretsFolder,
|
||||
signup,
|
||||
auth,
|
||||
bot,
|
||||
user,
|
||||
userAction,
|
||||
organization,
|
||||
workspace,
|
||||
membershipOrg,
|
||||
membership,
|
||||
key,
|
||||
inviteOrg,
|
||||
secret,
|
||||
serviceToken,
|
||||
password,
|
||||
integration,
|
||||
integrationAuth,
|
||||
secretsFolder,
|
||||
secretScanning,
|
||||
webhooks,
|
||||
secretImport
|
||||
};
|
||||
|
@ -81,6 +81,7 @@ router.get(
|
||||
}),
|
||||
param("integrationAuthId"),
|
||||
query("teamId"),
|
||||
query("workspaceSlug"),
|
||||
validateRequest,
|
||||
integrationAuthController.getIntegrationAuthApps
|
||||
);
|
||||
@ -141,6 +142,19 @@ router.get(
|
||||
integrationAuthController.getIntegrationAuthRailwayServices
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/:integrationAuthId/bitbucket/workspaces",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
}),
|
||||
requireIntegrationAuthorizationAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
}),
|
||||
param("integrationAuthId").exists().isString(),
|
||||
validateRequest,
|
||||
integrationAuthController.getIntegrationAuthBitBucketWorkspaces
|
||||
);
|
||||
|
||||
router.delete(
|
||||
"/:integrationAuthId",
|
||||
requireAuth({
|
||||
|
84
backend/src/routes/v1/secretImport.ts
Normal file
84
backend/src/routes/v1/secretImport.ts
Normal file
@ -0,0 +1,84 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import { body, param, query } from "express-validator";
|
||||
import { secretImportController } from "../../controllers/v1";
|
||||
import { requireAuth, requireWorkspaceAuth, validateRequest } from "../../middleware";
|
||||
import { ADMIN, AUTH_MODE_JWT, MEMBER } from "../../variables";
|
||||
|
||||
router.post(
|
||||
"/",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "body"
|
||||
}),
|
||||
body("workspaceId").exists().isString().trim().notEmpty(),
|
||||
body("environment").exists().isString().trim().notEmpty(),
|
||||
body("folderId").default("root").isString().trim(),
|
||||
body("secretImport").exists().isObject(),
|
||||
body("secretImport.environment").isString().exists().trim(),
|
||||
body("secretImport.secretPath").isString().exists().trim(),
|
||||
validateRequest,
|
||||
secretImportController.createSecretImport
|
||||
);
|
||||
|
||||
router.put(
|
||||
"/:id",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
param("id").exists().isString().trim(),
|
||||
body("secretImports").exists().isArray(),
|
||||
body("secretImports.*.environment").isString().exists().trim(),
|
||||
body("secretImports.*.secretPath").isString().exists().trim(),
|
||||
validateRequest,
|
||||
secretImportController.updateSecretImport
|
||||
);
|
||||
|
||||
router.delete(
|
||||
"/:id",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
param("id").exists().isString().trim(),
|
||||
body("secretImportPath").isString().exists().trim(),
|
||||
body("secretImportEnv").isString().exists().trim(),
|
||||
validateRequest,
|
||||
secretImportController.updateSecretImport
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "query"
|
||||
}),
|
||||
query("workspaceId").exists().isString().trim().notEmpty(),
|
||||
query("environment").exists().isString().trim().notEmpty(),
|
||||
query("folderId").default("root").isString().trim(),
|
||||
validateRequest,
|
||||
secretImportController.getSecretImports
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/secrets",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "query"
|
||||
}),
|
||||
query("workspaceId").exists().isString().trim().notEmpty(),
|
||||
query("environment").exists().isString().trim().notEmpty(),
|
||||
query("folderId").default("root").isString().trim(),
|
||||
validateRequest,
|
||||
secretImportController.getAllSecretsFromImport
|
||||
);
|
||||
|
||||
export default router;
|
81
backend/src/routes/v1/secretScanning.ts
Normal file
81
backend/src/routes/v1/secretScanning.ts
Normal file
@ -0,0 +1,81 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import {
|
||||
requireAuth,
|
||||
requireOrganizationAuth,
|
||||
validateRequest,
|
||||
} from "../../middleware";
|
||||
import { body, param } from "express-validator";
|
||||
import { createInstallationSession, getCurrentOrganizationInstallationStatus, getRisksForOrganization, linkInstallationToOrganization, updateRisksStatus } from "../../controllers/v1/secretScanningController";
|
||||
import { ACCEPTED, ADMIN, MEMBER, OWNER } from "../../variables";
|
||||
|
||||
router.post(
|
||||
"/create-installation-session/organization/:organizationId",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
param("organizationId").exists().trim(),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
}),
|
||||
validateRequest,
|
||||
createInstallationSession
|
||||
);
|
||||
|
||||
router.post(
|
||||
"/link-installation",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
body("installationId").exists().trim(),
|
||||
body("sessionId").exists().trim(),
|
||||
validateRequest,
|
||||
linkInstallationToOrganization
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/installation-status/organization/:organizationId",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
param("organizationId").exists().trim(),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
}),
|
||||
validateRequest,
|
||||
getCurrentOrganizationInstallationStatus
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/organization/:organizationId/risks",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
param("organizationId").exists().trim(),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
}),
|
||||
validateRequest,
|
||||
getRisksForOrganization
|
||||
);
|
||||
|
||||
router.post(
|
||||
"/organization/:organizationId/risks/:riskId/status",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
param("organizationId").exists().trim(),
|
||||
param("riskId").exists().trim(),
|
||||
body("status").exists(),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN, MEMBER],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
}),
|
||||
validateRequest,
|
||||
updateRisksStatus
|
||||
);
|
||||
|
||||
export default router;
|
75
backend/src/routes/v1/webhook.ts
Normal file
75
backend/src/routes/v1/webhook.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import { requireAuth, requireWorkspaceAuth, validateRequest } from "../../middleware";
|
||||
import { body, param, query } from "express-validator";
|
||||
import { ADMIN, AUTH_MODE_JWT, MEMBER } from "../../variables";
|
||||
import { webhookController } from "../../controllers/v1";
|
||||
|
||||
router.post(
|
||||
"/",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "body",
|
||||
locationEnvironment: "body"
|
||||
}),
|
||||
body("workspaceId").exists().isString().trim(),
|
||||
body("environment").exists().isString().trim(),
|
||||
body("webhookUrl").exists().isString().isURL().trim(),
|
||||
body("webhookSecretKey").isString().trim(),
|
||||
body("secretPath").default("/").isString().trim(),
|
||||
validateRequest,
|
||||
webhookController.createWebhook
|
||||
);
|
||||
|
||||
router.patch(
|
||||
"/:webhookId",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
param("webhookId").exists().isString().trim(),
|
||||
body("isDisabled").default(false).isBoolean(),
|
||||
validateRequest,
|
||||
webhookController.updateWebhook
|
||||
);
|
||||
|
||||
router.post(
|
||||
"/:webhookId/test",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
param("webhookId").exists().isString().trim(),
|
||||
validateRequest,
|
||||
webhookController.testWebhook
|
||||
);
|
||||
|
||||
router.delete(
|
||||
"/:webhookId",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
param("webhookId").exists().isString().trim(),
|
||||
validateRequest,
|
||||
webhookController.deleteWebhook
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "query",
|
||||
locationEnvironment: "query"
|
||||
}),
|
||||
query("workspaceId").exists().isString().trim(),
|
||||
query("environment").optional().isString().trim(),
|
||||
query("secretPath").optional().isString().trim(),
|
||||
validateRequest,
|
||||
webhookController.listWebhooks
|
||||
);
|
||||
|
||||
export default router;
|
@ -5,7 +5,7 @@ import {
|
||||
requireAuth,
|
||||
requireSecretsAuth,
|
||||
requireWorkspaceAuth,
|
||||
validateRequest,
|
||||
validateRequest
|
||||
} from "../../middleware";
|
||||
import { validateClientForSecrets } from "../../validation";
|
||||
import { body, query } from "express-validator";
|
||||
@ -20,22 +20,18 @@ import {
|
||||
PERMISSION_READ_SECRETS,
|
||||
PERMISSION_WRITE_SECRETS,
|
||||
SECRET_PERSONAL,
|
||||
SECRET_SHARED,
|
||||
SECRET_SHARED
|
||||
} from "../../variables";
|
||||
import { BatchSecretRequest } from "../../types/secret";
|
||||
|
||||
router.post(
|
||||
"/batch",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
],
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY, AUTH_MODE_SERVICE_TOKEN]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "body",
|
||||
locationWorkspaceId: "body"
|
||||
}),
|
||||
body("workspaceId").exists().isString().trim(),
|
||||
body("folderId").default("root").isString().trim(),
|
||||
@ -52,10 +48,8 @@ router.post(
|
||||
if (secretIds.length > 0) {
|
||||
req.secrets = await validateClientForSecrets({
|
||||
authData: req.authData,
|
||||
secretIds: secretIds.map(
|
||||
(secretId: string) => new Types.ObjectId(secretId)
|
||||
),
|
||||
requiredPermissions: [],
|
||||
secretIds: secretIds.map((secretId: string) => new Types.ObjectId(secretId)),
|
||||
requiredPermissions: []
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -76,14 +70,11 @@ router.post(
|
||||
.custom((value) => {
|
||||
if (Array.isArray(value)) {
|
||||
// case: create multiple secrets
|
||||
if (value.length === 0)
|
||||
throw new Error("secrets cannot be an empty array");
|
||||
if (value.length === 0) throw new Error("secrets cannot be an empty array");
|
||||
for (const secret of value) {
|
||||
if (
|
||||
!secret.type ||
|
||||
!(
|
||||
secret.type === SECRET_PERSONAL || secret.type === SECRET_SHARED
|
||||
) ||
|
||||
!(secret.type === SECRET_PERSONAL || secret.type === SECRET_SHARED) ||
|
||||
!secret.secretKeyCiphertext ||
|
||||
!secret.secretKeyIV ||
|
||||
!secret.secretKeyTag ||
|
||||
@ -108,9 +99,7 @@ router.post(
|
||||
!value.secretValueIV ||
|
||||
!value.secretValueTag
|
||||
) {
|
||||
throw new Error(
|
||||
"secrets object is missing required secret properties"
|
||||
);
|
||||
throw new Error("secrets object is missing required secret properties");
|
||||
}
|
||||
} else {
|
||||
throw new Error("secrets must be an object or an array of objects");
|
||||
@ -120,17 +109,13 @@ router.post(
|
||||
}),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
],
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY, AUTH_MODE_SERVICE_TOKEN]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "body",
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS]
|
||||
}),
|
||||
secretsController.createSecrets
|
||||
);
|
||||
@ -142,20 +127,21 @@ router.get(
|
||||
query("tagSlugs"),
|
||||
query("folderId").default("root").isString().trim(),
|
||||
query("secretPath").optional().isString().trim(),
|
||||
query("include_imports").optional().default(false).isBoolean(),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "query",
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS]
|
||||
}),
|
||||
secretsController.getSecrets
|
||||
);
|
||||
@ -167,8 +153,7 @@ router.patch(
|
||||
.custom((value) => {
|
||||
if (Array.isArray(value)) {
|
||||
// case: update multiple secrets
|
||||
if (value.length === 0)
|
||||
throw new Error("secrets cannot be an empty array");
|
||||
if (value.length === 0) throw new Error("secrets cannot be an empty array");
|
||||
for (const secret of value) {
|
||||
if (!secret.id) {
|
||||
throw new Error("Each secret must contain a ID property");
|
||||
@ -187,15 +172,11 @@ router.patch(
|
||||
}),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
],
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY, AUTH_MODE_SERVICE_TOKEN]
|
||||
}),
|
||||
requireSecretsAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS]
|
||||
}),
|
||||
secretsController.updateSecrets
|
||||
);
|
||||
@ -210,8 +191,7 @@ router.delete(
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
// case: delete multiple secrets
|
||||
if (value.length === 0)
|
||||
throw new Error("secrets cannot be an empty array");
|
||||
if (value.length === 0) throw new Error("secrets cannot be an empty array");
|
||||
return value.every((id: string) => typeof id === "string");
|
||||
}
|
||||
|
||||
@ -221,15 +201,11 @@ router.delete(
|
||||
.isEmpty(),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
],
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY, AUTH_MODE_SERVICE_TOKEN]
|
||||
}),
|
||||
requireSecretsAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS]
|
||||
}),
|
||||
secretsController.deleteSecrets
|
||||
);
|
||||
|
@ -10,6 +10,9 @@ import {
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_JWT,
|
||||
} from "../../variables";
|
||||
import {
|
||||
AuthProvider
|
||||
} from "../../models";
|
||||
|
||||
router.get(
|
||||
"/me",
|
||||
@ -29,6 +32,30 @@ router.patch(
|
||||
usersController.updateMyMfaEnabled
|
||||
);
|
||||
|
||||
router.patch(
|
||||
"/me/name",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY],
|
||||
}),
|
||||
body("firstName").exists().isString(),
|
||||
body("lastName").isString(),
|
||||
validateRequest,
|
||||
usersController.updateName
|
||||
);
|
||||
|
||||
router.patch(
|
||||
"/me/auth-provider",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY],
|
||||
}),
|
||||
body("authProvider").exists().isString().isIn([
|
||||
AuthProvider.EMAIL,
|
||||
AuthProvider.GOOGLE
|
||||
]),
|
||||
validateRequest,
|
||||
usersController.updateAuthProvider
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/me/organizations",
|
||||
requireAuth({
|
||||
|
@ -1,10 +1,6 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import {
|
||||
requireAuth,
|
||||
requireWorkspaceAuth,
|
||||
validateRequest,
|
||||
} from "../../middleware";
|
||||
import { requireAuth, requireWorkspaceAuth, validateRequest } from "../../middleware";
|
||||
import { body, param, query } from "express-validator";
|
||||
import { secretsController } from "../../controllers/v3";
|
||||
import {
|
||||
@ -17,7 +13,7 @@ import {
|
||||
PERMISSION_READ_SECRETS,
|
||||
PERMISSION_WRITE_SECRETS,
|
||||
SECRET_PERSONAL,
|
||||
SECRET_SHARED,
|
||||
SECRET_SHARED
|
||||
} from "../../variables";
|
||||
|
||||
router.get(
|
||||
@ -25,14 +21,15 @@ router.get(
|
||||
query("workspaceId").exists().isString().trim(),
|
||||
query("environment").exists().isString().trim(),
|
||||
query("secretPath").default("/").isString().trim(),
|
||||
query("include_imports").optional().isBoolean().default(false),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -40,7 +37,7 @@ router.get(
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true,
|
||||
requireE2EEOff: true
|
||||
}),
|
||||
secretsController.getSecretsRaw
|
||||
);
|
||||
@ -58,8 +55,8 @@ router.get(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -67,7 +64,7 @@ router.get(
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true,
|
||||
requireE2EEOff: true
|
||||
}),
|
||||
secretsController.getSecretByNameRaw
|
||||
);
|
||||
@ -86,8 +83,8 @@ router.post(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -95,7 +92,7 @@ router.post(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true,
|
||||
requireE2EEOff: true
|
||||
}),
|
||||
secretsController.createSecretRaw
|
||||
);
|
||||
@ -114,8 +111,8 @@ router.patch(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -123,7 +120,7 @@ router.patch(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true,
|
||||
requireE2EEOff: true
|
||||
}),
|
||||
secretsController.updateSecretByNameRaw
|
||||
);
|
||||
@ -141,8 +138,8 @@ router.delete(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -150,7 +147,7 @@ router.delete(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true,
|
||||
requireE2EEOff: true
|
||||
}),
|
||||
secretsController.deleteSecretByNameRaw
|
||||
);
|
||||
@ -166,8 +163,8 @@ router.get(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -175,7 +172,7 @@ router.get(
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: false,
|
||||
requireE2EEOff: false
|
||||
}),
|
||||
secretsController.getSecrets
|
||||
);
|
||||
@ -201,8 +198,8 @@ router.post(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -210,7 +207,7 @@ router.post(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: false,
|
||||
requireE2EEOff: false
|
||||
}),
|
||||
secretsController.createSecret
|
||||
);
|
||||
@ -228,15 +225,15 @@ router.get(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "query",
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireBlindIndicesEnabled: true
|
||||
}),
|
||||
secretsController.getSecretByName
|
||||
);
|
||||
@ -257,8 +254,8 @@ router.patch(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -266,7 +263,7 @@ router.patch(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: false,
|
||||
requireE2EEOff: false
|
||||
}),
|
||||
secretsController.updateSecretByName
|
||||
);
|
||||
@ -284,8 +281,8 @@ router.delete(
|
||||
AUTH_MODE_JWT,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
AUTH_MODE_SERVICE_ACCOUNT,
|
||||
],
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -293,7 +290,7 @@ router.delete(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: false,
|
||||
requireE2EEOff: false
|
||||
}),
|
||||
secretsController.deleteSecretByName
|
||||
);
|
||||
|
@ -21,7 +21,8 @@ router.post(
|
||||
body("salt").exists().isString().trim().notEmpty(),
|
||||
body("verifier").exists().isString().trim().notEmpty(),
|
||||
body("organizationName").exists().isString().trim().notEmpty(),
|
||||
body("providerAuthToken").isString().trim().optional({nullable: true}),
|
||||
body("providerAuthToken").isString().trim().optional({ nullable: true }),
|
||||
body("attributionSource").optional().isString().trim(),
|
||||
validateRequest,
|
||||
signupController.completeAccountSignup,
|
||||
);
|
||||
|
12
backend/src/services/BotOrgService.ts
Normal file
12
backend/src/services/BotOrgService.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import { Types } from "mongoose";
|
||||
import { getSymmetricKeyHelper } from "../helpers/botOrg";
|
||||
|
||||
// TODO: DOCstrings
|
||||
|
||||
class BotOrgService {
|
||||
static async getSymmetricKey(organizationId: Types.ObjectId) {
|
||||
return await getSymmetricKeyHelper(organizationId);
|
||||
}
|
||||
}
|
||||
|
||||
export default BotOrgService;
|
250
backend/src/services/GithubSecretScanningService.ts
Normal file
250
backend/src/services/GithubSecretScanningService.ts
Normal file
@ -0,0 +1,250 @@
|
||||
import { Probot } from "probot";
|
||||
import { exec } from "child_process";
|
||||
import { mkdir, readFile, rm, writeFile } from "fs";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path"
|
||||
import GitRisks from "../models/gitRisks";
|
||||
import GitAppOrganizationInstallation from "../models/gitAppOrganizationInstallation";
|
||||
import MembershipOrg from "../models/membershipOrg";
|
||||
import { ADMIN, OWNER } from "../variables";
|
||||
import User from "../models/user";
|
||||
import { sendMail } from "../helpers";
|
||||
import TelemetryService from "./TelemetryService";
|
||||
|
||||
type SecretMatch = {
|
||||
Description: string;
|
||||
StartLine: number;
|
||||
EndLine: number;
|
||||
StartColumn: number;
|
||||
EndColumn: number;
|
||||
Match: string;
|
||||
Secret: string;
|
||||
File: string;
|
||||
SymlinkFile: string;
|
||||
Commit: string;
|
||||
Entropy: number;
|
||||
Author: string;
|
||||
Email: string;
|
||||
Date: string;
|
||||
Message: string;
|
||||
Tags: string[];
|
||||
RuleID: string;
|
||||
Fingerprint: string;
|
||||
FingerPrintWithoutCommitId: string
|
||||
};
|
||||
|
||||
export default async (app: Probot) => {
|
||||
app.on("installation.deleted", async (context) => {
|
||||
const { payload } = context;
|
||||
const { installation, repositories } = payload;
|
||||
if (installation.repository_selection == "all") {
|
||||
await GitRisks.deleteMany({ installationId: installation.id })
|
||||
await GitAppOrganizationInstallation.deleteOne({ installationId: installation.id })
|
||||
} else {
|
||||
if (repositories) {
|
||||
for (const repository of repositories) {
|
||||
await GitRisks.deleteMany({ repositoryId: repository.id })
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.on("push", async (context) => {
|
||||
const { payload } = context;
|
||||
const { commits, repository, installation, pusher } = payload;
|
||||
const [owner, repo] = repository.full_name.split("/");
|
||||
|
||||
if (!commits || !repository || !installation || !pusher) {
|
||||
return
|
||||
}
|
||||
|
||||
const installationLinkToOrgExists = await GitAppOrganizationInstallation.findOne({ installationId: installation?.id }).lean()
|
||||
if (!installationLinkToOrgExists) {
|
||||
return
|
||||
}
|
||||
|
||||
const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
|
||||
|
||||
for (const commit of commits) {
|
||||
for (const filepath of [...commit.added, ...commit.modified]) {
|
||||
try {
|
||||
const fileContentsResponse = await context.octokit.repos.getContent({
|
||||
owner,
|
||||
repo,
|
||||
path: filepath,
|
||||
});
|
||||
|
||||
const data: any = fileContentsResponse.data;
|
||||
const fileContent = Buffer.from(data.content, "base64").toString();
|
||||
|
||||
const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
|
||||
|
||||
for (const finding of findings) {
|
||||
const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
|
||||
const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
|
||||
finding.Fingerprint = fingerPrintWithCommitId
|
||||
finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
|
||||
finding.Commit = commit.id
|
||||
finding.File = filepath
|
||||
finding.Author = commit.author.name
|
||||
finding.Email = commit?.author?.email ? commit?.author?.email : ""
|
||||
|
||||
allFindingsByFingerprint[fingerPrintWithCommitId] = finding
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error fetching content for ${filepath}`, error); // eslint-disable-line
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// change to update
|
||||
for (const key in allFindingsByFingerprint) {
|
||||
const risk = await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
|
||||
{
|
||||
...convertKeysToLowercase(allFindingsByFingerprint[key]),
|
||||
installationId: installation.id,
|
||||
organization: installationLinkToOrgExists.organizationId,
|
||||
repositoryFullName: repository.full_name,
|
||||
repositoryId: repository.id
|
||||
}, {
|
||||
upsert: true
|
||||
}).lean()
|
||||
}
|
||||
// get emails of admins
|
||||
const adminsOfWork = await MembershipOrg.find({
|
||||
organization: installationLinkToOrgExists.organizationId,
|
||||
$or: [
|
||||
{ role: OWNER },
|
||||
{ role: ADMIN }
|
||||
]
|
||||
}).lean()
|
||||
|
||||
const userEmails = await User.find({
|
||||
_id: {
|
||||
$in: [adminsOfWork.map(orgMembership => orgMembership.user)]
|
||||
}
|
||||
}).select("email").lean()
|
||||
|
||||
const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
|
||||
|
||||
const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
|
||||
if (Object.keys(allFindingsByFingerprint).length) {
|
||||
await sendMail({
|
||||
template: "secretLeakIncident.handlebars",
|
||||
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.full_name}`,
|
||||
recipients: usersToNotify,
|
||||
substitutions: {
|
||||
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
|
||||
pusher_email: pusher.email,
|
||||
pusher_name: pusher.name
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const postHogClient = await TelemetryService.getPostHogClient();
|
||||
if (postHogClient) {
|
||||
postHogClient.capture({
|
||||
event: "cloud secret scan",
|
||||
distinctId: pusher.email,
|
||||
properties: {
|
||||
numberOfCommitsScanned: commits.length,
|
||||
numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
|
||||
const tempFolder = await createTempFolder();
|
||||
const filePath = join(tempFolder, "content.txt");
|
||||
const findingsPath = join(tempFolder, "findings.json");
|
||||
|
||||
try {
|
||||
await writeTextToFile(filePath, textContent);
|
||||
await runInfisicalScan(filePath, findingsPath);
|
||||
const findingsData = await readFindingsFile(findingsPath);
|
||||
return JSON.parse(findingsData);
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
}
|
||||
}
|
||||
|
||||
function createTempFolder(): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const tempDir = tmpdir()
|
||||
const tempFolderName = Math.random().toString(36).substring(2);
|
||||
const tempFolderPath = join(tempDir, tempFolderName);
|
||||
|
||||
mkdir(tempFolderPath, (err: any) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(tempFolderPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function writeTextToFile(filePath: string, content: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
writeFile(filePath, content, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
|
||||
exec(command, (error) => {
|
||||
if (error && error.code != 77) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function readFindingsFile(filePath: string): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
readFile(filePath, "utf8", (err, data) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(data);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function deleteTempFolder(folderPath: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
rm(folderPath, { recursive: true }, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function convertKeysToLowercase<T>(obj: T): T {
|
||||
const convertedObj = {} as T;
|
||||
|
||||
for (const key in obj) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||||
const lowercaseKey = key.charAt(0).toLowerCase() + key.slice(1);
|
||||
convertedObj[lowercaseKey as keyof T] = obj[key];
|
||||
}
|
||||
}
|
||||
|
||||
return convertedObj;
|
||||
}
|
87
backend/src/services/SecretImportService.ts
Normal file
87
backend/src/services/SecretImportService.ts
Normal file
@ -0,0 +1,87 @@
|
||||
import { Types } from "mongoose";
|
||||
import Folder from "../models/folder";
|
||||
import Secret, { ISecret } from "../models/secret";
|
||||
import SecretImport from "../models/secretImports";
|
||||
import { getFolderByPath } from "./FolderService";
|
||||
|
||||
type TSecretImportFid = { environment: string; folderId: string; secretPath: string };
|
||||
|
||||
export const getAllImportedSecrets = async (
|
||||
workspaceId: string,
|
||||
environment: string,
|
||||
folderId = "root"
|
||||
) => {
|
||||
const secImports = await SecretImport.findOne({
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
folderId
|
||||
});
|
||||
if (!secImports) return [];
|
||||
if (secImports.imports.length === 0) return [];
|
||||
|
||||
const importedEnv: Record<string, boolean> = {}; // to get folders from all environment
|
||||
secImports.imports.forEach((el) => (importedEnv[el.environment] = true));
|
||||
|
||||
const folders = await Folder.find({
|
||||
workspace: workspaceId,
|
||||
environment: { $in: Object.keys(importedEnv) }
|
||||
});
|
||||
|
||||
const importedSecByFid: TSecretImportFid[] = [];
|
||||
secImports.imports.forEach((el) => {
|
||||
const folder = folders.find((fl) => fl.environment === el.environment);
|
||||
if (folder) {
|
||||
const secPathFolder = getFolderByPath(folder.nodes, el.secretPath);
|
||||
if (secPathFolder)
|
||||
importedSecByFid.push({
|
||||
environment: el.environment,
|
||||
folderId: secPathFolder.id,
|
||||
secretPath: el.secretPath
|
||||
});
|
||||
} else {
|
||||
if (el.secretPath === "/") {
|
||||
// this happens when importing with a fresh env without any folders
|
||||
importedSecByFid.push({ environment: el.environment, folderId: "root", secretPath: "/" });
|
||||
}
|
||||
}
|
||||
});
|
||||
if (importedSecByFid.length === 0) return [];
|
||||
|
||||
const secsGroupedByRef = await Secret.aggregate([
|
||||
{
|
||||
$match: {
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
type: "shared"
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
environment: "$environment",
|
||||
folderId: "$folder"
|
||||
},
|
||||
secrets: { $push: "$$ROOT" }
|
||||
}
|
||||
},
|
||||
{
|
||||
$match: {
|
||||
$or: importedSecByFid.map(({ environment, folderId: fid }) => ({
|
||||
"_id.environment": environment,
|
||||
"_id.folderId": fid
|
||||
}))
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
// now let stitch together secrets.
|
||||
const importedSecrets: Array<TSecretImportFid & { secrets: ISecret[] }> = [];
|
||||
importedSecByFid.forEach(({ environment, folderId, secretPath }) => {
|
||||
const secretsGrouped = secsGroupedByRef.find(
|
||||
(el) => el._id.environment === environment && el._id.folderId === folderId
|
||||
);
|
||||
if (secretsGrouped) {
|
||||
importedSecrets.push({ secretPath, folderId, environment, secrets: secretsGrouped.secrets });
|
||||
}
|
||||
});
|
||||
return importedSecrets;
|
||||
};
|
93
backend/src/services/WebhookService.ts
Normal file
93
backend/src/services/WebhookService.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import axios from "axios";
|
||||
import crypto from "crypto";
|
||||
import { Types } from "mongoose";
|
||||
import picomatch from "picomatch";
|
||||
import { client, getRootEncryptionKey } from "../config";
|
||||
import Webhook, { IWebhook } from "../models/webhooks";
|
||||
|
||||
export const triggerWebhookRequest = async (
|
||||
{ url, encryptedSecretKey, iv, tag }: IWebhook,
|
||||
payload: Record<string, unknown>
|
||||
) => {
|
||||
const headers: Record<string, string> = {};
|
||||
payload["timestamp"] = Date.now();
|
||||
|
||||
if (encryptedSecretKey) {
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
const secretKey = client.decryptSymmetric(encryptedSecretKey, rootEncryptionKey, iv, tag);
|
||||
const webhookSign = crypto
|
||||
.createHmac("sha256", secretKey)
|
||||
.update(JSON.stringify(payload))
|
||||
.digest("hex");
|
||||
headers["x-infisical-signature"] = `t=${payload["timestamp"]};${webhookSign}`;
|
||||
}
|
||||
|
||||
const req = await axios.post(url, payload, { headers });
|
||||
return req;
|
||||
};
|
||||
|
||||
export const getWebhookPayload = (
|
||||
eventName: string,
|
||||
workspaceId: string,
|
||||
environment: string,
|
||||
secretPath?: string
|
||||
) => ({
|
||||
event: eventName,
|
||||
project: {
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath
|
||||
}
|
||||
});
|
||||
|
||||
export const triggerWebhook = async (
|
||||
workspaceId: string,
|
||||
environment: string,
|
||||
secretPath: string
|
||||
) => {
|
||||
const webhooks = await Webhook.find({ workspace: workspaceId, environment, isDisabled: false });
|
||||
// TODO(akhilmhdh): implement retry policy later, for that a cron job based approach is needed
|
||||
// for exponential backoff
|
||||
const toBeTriggeredHooks = webhooks.filter(({ secretPath: hookSecretPath }) =>
|
||||
picomatch.isMatch(secretPath, hookSecretPath, { strictSlashes: false })
|
||||
);
|
||||
const webhooksTriggered = await Promise.allSettled(
|
||||
toBeTriggeredHooks.map((hook) =>
|
||||
triggerWebhookRequest(
|
||||
hook,
|
||||
getWebhookPayload("secrets.modified", workspaceId, environment, secretPath)
|
||||
)
|
||||
)
|
||||
);
|
||||
const successWebhooks: Types.ObjectId[] = [];
|
||||
const failedWebhooks: Array<{ id: Types.ObjectId; error: string }> = [];
|
||||
webhooksTriggered.forEach((data, index) => {
|
||||
if (data.status === "rejected") {
|
||||
failedWebhooks.push({ id: toBeTriggeredHooks[index]._id, error: data.reason.message });
|
||||
return;
|
||||
}
|
||||
successWebhooks.push(toBeTriggeredHooks[index]._id);
|
||||
});
|
||||
// dont remove the workspaceid and environment filter. its used to reduce the dataset before $in check
|
||||
await Webhook.bulkWrite([
|
||||
{
|
||||
updateMany: {
|
||||
filter: { workspace: workspaceId, environment, _id: { $in: successWebhooks } },
|
||||
update: { lastStatus: "success", lastRunErrorMessage: null }
|
||||
}
|
||||
},
|
||||
...failedWebhooks.map(({ id, error }) => ({
|
||||
updateOne: {
|
||||
filter: {
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
_id: id
|
||||
},
|
||||
update: {
|
||||
lastStatus: "failed",
|
||||
lastRunErrorMessage: error
|
||||
}
|
||||
}
|
||||
}))
|
||||
]);
|
||||
};
|
@ -2,17 +2,21 @@ import DatabaseService from "./DatabaseService";
|
||||
// import { logTelemetryMessage, getPostHogClient } from './TelemetryService';
|
||||
import TelemetryService from "./TelemetryService";
|
||||
import BotService from "./BotService";
|
||||
import BotOrgService from "./BotOrgService";
|
||||
import EventService from "./EventService";
|
||||
import IntegrationService from "./IntegrationService";
|
||||
import TokenService from "./TokenService";
|
||||
import SecretService from "./SecretService";
|
||||
import GithubSecretScanningService from "./GithubSecretScanningService"
|
||||
|
||||
export {
|
||||
TelemetryService,
|
||||
DatabaseService,
|
||||
BotService,
|
||||
EventService,
|
||||
IntegrationService,
|
||||
TokenService,
|
||||
SecretService,
|
||||
}
|
||||
TelemetryService,
|
||||
DatabaseService,
|
||||
BotService,
|
||||
BotOrgService,
|
||||
EventService,
|
||||
IntegrationService,
|
||||
TokenService,
|
||||
SecretService,
|
||||
GithubSecretScanningService
|
||||
}
|
||||
|
@ -2,9 +2,10 @@ import nodemailer from "nodemailer";
|
||||
import {
|
||||
SMTP_HOST_GMAIL,
|
||||
SMTP_HOST_MAILGUN,
|
||||
SMTP_HOST_OFFICE365,
|
||||
SMTP_HOST_SENDGRID,
|
||||
SMTP_HOST_SOCKETLABS,
|
||||
SMTP_HOST_ZOHOMAIL,
|
||||
SMTP_HOST_ZOHOMAIL
|
||||
} from "../variables";
|
||||
import SMTPConnection from "nodemailer/lib/smtp-connection";
|
||||
import * as Sentry from "@sentry/node";
|
||||
@ -15,6 +16,7 @@ import {
|
||||
getSmtpSecure,
|
||||
getSmtpUsername,
|
||||
} from "../config";
|
||||
import { getLogger } from "../utils/logger";
|
||||
|
||||
export const initSmtp = async () => {
|
||||
const mailOpts: SMTPConnection.Options = {
|
||||
@ -58,6 +60,12 @@ export const initSmtp = async () => {
|
||||
ciphers: "TLSv1.2",
|
||||
}
|
||||
break;
|
||||
case SMTP_HOST_OFFICE365:
|
||||
mailOpts.requireTLS = true;
|
||||
mailOpts.tls = {
|
||||
ciphers: "TLSv1.2"
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if ((await getSmtpHost()).includes("amazonaws.com")) {
|
||||
mailOpts.tls = {
|
||||
@ -73,10 +81,12 @@ export const initSmtp = async () => {
|
||||
const transporter = nodemailer.createTransport(mailOpts);
|
||||
transporter
|
||||
.verify()
|
||||
.then((err) => {
|
||||
.then(async () => {
|
||||
Sentry.setUser(null);
|
||||
Sentry.captureMessage("SMTP - Successfully connected");
|
||||
console.log("SMTP - Successfully connected")
|
||||
(await getLogger("backend-main")).info(
|
||||
"SMTP - Successfully connected"
|
||||
);
|
||||
})
|
||||
.catch(async (err) => {
|
||||
Sentry.setUser(null);
|
||||
|
25
backend/src/templates/secretLeakIncident.handlebars
Normal file
25
backend/src/templates/secretLeakIncident.handlebars
Normal file
@ -0,0 +1,25 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Incident alert: secret leaked</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h3>Infisical has uncovered {{numberOfSecrets}} secret(s) from your recent push</h3>
|
||||
<p><a href="https://app.infisical.com/secret-scanning"><strong>View leaked secrets</strong></a></p>
|
||||
<p>You are receiving this notification because one or more secret leaks have been detected in a recent commit pushed
|
||||
by {{pusher_name}} ({{pusher_email}}). If
|
||||
these are test secrets, please add `infisical-scan:ignore` at the end of the line containing the secret as comment
|
||||
in the given programming. This will prevent future notifications from being sent out for those secret(s).</p>
|
||||
|
||||
<p>If these are production secrets, please rotate them immediately.</p>
|
||||
|
||||
<p>Once you have taken action, be sure to update the status of the risk in your <a
|
||||
href="https://app.infisical.com/">Infisical
|
||||
dashboard</a>.</p>
|
||||
</body>
|
||||
|
||||
</html>
|
1
backend/src/types/express/index.d.ts
vendored
1
backend/src/types/express/index.d.ts
vendored
@ -20,6 +20,7 @@ declare global {
|
||||
workspace: any;
|
||||
membership: any;
|
||||
targetMembership: any;
|
||||
isUserCompleted: boolean;
|
||||
providerAuthToken: any;
|
||||
organization: any;
|
||||
membershipOrg: any;
|
||||
|
@ -4,8 +4,6 @@ const ALGORITHM = "aes-256-gcm";
|
||||
const BLOCK_SIZE_BYTES = 16;
|
||||
|
||||
export default class AesGCM {
|
||||
constructor() {}
|
||||
|
||||
static encrypt(
|
||||
text: string,
|
||||
secret: string
|
||||
|
@ -1,11 +1,14 @@
|
||||
import express from "express";
|
||||
import passport from "passport";
|
||||
import { Types } from "mongoose";
|
||||
import { AuthData } from "../interfaces/middleware";
|
||||
import {
|
||||
AuthProvider,
|
||||
MembershipOrg,
|
||||
Organization,
|
||||
ServiceAccount,
|
||||
ServiceTokenData,
|
||||
User,
|
||||
User
|
||||
} from "../models";
|
||||
import { createToken } from "../helpers/auth";
|
||||
import {
|
||||
@ -14,11 +17,15 @@ import {
|
||||
getJwtProviderAuthLifetime,
|
||||
getJwtProviderAuthSecret,
|
||||
} from "../config";
|
||||
import { getSSOConfigHelper } from "../ee/helpers/organizations";
|
||||
import { InternalServerError, OrganizationNotFoundError } from "./errors";
|
||||
import { INVITED, MEMBER } from "../variables";
|
||||
import { getSiteURL } from "../config";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const GoogleStrategy = require("passport-google-oauth20").Strategy;
|
||||
|
||||
// TODO: find a more optimal folder structure to store these types of functions
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { MultiSamlStrategy } = require("@node-saml/passport-saml");
|
||||
|
||||
/**
|
||||
* Returns an object containing the id of the authentication data payload
|
||||
@ -39,7 +46,6 @@ const getAuthDataPayloadIdObj = (authData: AuthData) => {
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Returns an object containing the user associated with the authentication data payload
|
||||
* @param {AuthData} authData - authentication data object
|
||||
@ -56,7 +62,7 @@ const getAuthDataPayloadUserObj = (authData: AuthData) => {
|
||||
}
|
||||
|
||||
if (authData.authPayload instanceof ServiceTokenData) {
|
||||
return { user: authData.authPayload.user };
|
||||
return { user: authData.authPayload.user };0
|
||||
}
|
||||
}
|
||||
|
||||
@ -68,47 +74,143 @@ const initializePassport = async () => {
|
||||
passReqToCallback: true,
|
||||
clientID: googleClientId,
|
||||
clientSecret: googleClientSecret,
|
||||
callbackURL: "/api/v1/auth/callback/google",
|
||||
callbackURL: "/api/v1/sso/google",
|
||||
scope: ["profile", " email"],
|
||||
}, async (
|
||||
req: express.Request,
|
||||
accessToken: string,
|
||||
refreshToken: string,
|
||||
profile: any,
|
||||
cb: any
|
||||
done: any
|
||||
) => {
|
||||
try {
|
||||
const email = profile.emails[0].value;
|
||||
const firstName = profile.name.givenName;
|
||||
const lastName = profile.name.familyName;
|
||||
|
||||
let user = await User.findOne({
|
||||
authProvider: AuthProvider.GOOGLE,
|
||||
authId: profile.id,
|
||||
}).select("+publicKey")
|
||||
email
|
||||
}).select("+publicKey");
|
||||
|
||||
if (user && user.authProvider !== AuthProvider.GOOGLE) {
|
||||
done(InternalServerError());
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
user = await new User({
|
||||
email,
|
||||
authProvider: AuthProvider.GOOGLE,
|
||||
authId: profile.id,
|
||||
firstName,
|
||||
lastName
|
||||
}).save();
|
||||
}
|
||||
|
||||
const isUserCompleted = !!user.publicKey;
|
||||
const providerAuthToken = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString(),
|
||||
email: user.email,
|
||||
firstName,
|
||||
lastName,
|
||||
authProvider: user.authProvider,
|
||||
isUserCompleted: !!user.publicKey,
|
||||
isUserCompleted
|
||||
},
|
||||
expiresIn: await getJwtProviderAuthLifetime(),
|
||||
secret: await getJwtProviderAuthSecret(),
|
||||
});
|
||||
|
||||
req.isUserCompleted = isUserCompleted;
|
||||
req.providerAuthToken = providerAuthToken;
|
||||
cb(null, profile);
|
||||
done(null, profile);
|
||||
} catch (err) {
|
||||
cb(null, false);
|
||||
done(null, false);
|
||||
}
|
||||
}));
|
||||
|
||||
passport.use("saml", new MultiSamlStrategy(
|
||||
{
|
||||
passReqToCallback: true,
|
||||
getSamlOptions: async (req: any, done: any) => {
|
||||
const { ssoIdentifier } = req.params;
|
||||
|
||||
const ssoConfig = await getSSOConfigHelper({
|
||||
ssoConfigId: new Types.ObjectId(ssoIdentifier)
|
||||
});
|
||||
|
||||
const samlConfig = ({
|
||||
path: "/api/v1/auth/callback/saml",
|
||||
callbackURL: `${await getSiteURL()}/api/v1/auth/callback/saml`,
|
||||
entryPoint: ssoConfig.entryPoint,
|
||||
issuer: ssoConfig.issuer,
|
||||
cert: ssoConfig.cert,
|
||||
audience: ssoConfig.audience
|
||||
});
|
||||
|
||||
req.ssoConfig = ssoConfig;
|
||||
|
||||
done(null, samlConfig);
|
||||
},
|
||||
},
|
||||
async (req: any, profile: any, done: any) => {
|
||||
|
||||
if (!req.ssoConfig.isActive) return done(InternalServerError());
|
||||
|
||||
const organization = await Organization.findById(req.ssoConfig.organization);
|
||||
|
||||
if (!organization) return done(OrganizationNotFoundError());
|
||||
|
||||
const email = profile.email;
|
||||
const firstName = profile.firstName;
|
||||
const lastName = profile.lastName;
|
||||
|
||||
let user = await User.findOne({
|
||||
email
|
||||
}).select("+publicKey");
|
||||
|
||||
if (user && user.authProvider !== AuthProvider.OKTA_SAML) {
|
||||
done(InternalServerError());
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
user = await new User({
|
||||
email,
|
||||
authProvider: AuthProvider.OKTA_SAML,
|
||||
authId: profile.id,
|
||||
firstName,
|
||||
lastName
|
||||
}).save();
|
||||
|
||||
await new MembershipOrg({
|
||||
inviteEmail: email,
|
||||
user: user._id,
|
||||
organization: organization?._id,
|
||||
role: MEMBER,
|
||||
status: INVITED
|
||||
}).save();
|
||||
}
|
||||
|
||||
const isUserCompleted = !!user.publicKey;
|
||||
const providerAuthToken = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString(),
|
||||
email: user.email,
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization?.name,
|
||||
authProvider: user.authProvider,
|
||||
isUserCompleted
|
||||
},
|
||||
expiresIn: await getJwtProviderAuthLifetime(),
|
||||
secret: await getJwtProviderAuthSecret(),
|
||||
});
|
||||
|
||||
req.isUserCompleted = isUserCompleted;
|
||||
req.providerAuthToken = providerAuthToken;
|
||||
|
||||
done(null, profile);
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
export {
|
||||
|
@ -27,7 +27,7 @@ export const UnauthorizedRequestError = (error?: Partial<RequestErrorContext>) =
|
||||
context: error?.context,
|
||||
stack: error?.stack,
|
||||
});
|
||||
|
||||
|
||||
export const ForbiddenRequestError = (error?: Partial<RequestErrorContext>) => new RequestError({
|
||||
logLevel: error?.logLevel ?? LogLevel.INFO,
|
||||
statusCode: error?.statusCode ?? 403,
|
||||
@ -46,6 +46,15 @@ export const BadRequestError = (error?: Partial<RequestErrorContext>) => new Req
|
||||
stack: error?.stack,
|
||||
});
|
||||
|
||||
export const ResourceNotFoundError = (error?: Partial<RequestErrorContext>) => new RequestError({
|
||||
logLevel: error?.logLevel ?? LogLevel.INFO,
|
||||
statusCode: error?.statusCode ?? 404,
|
||||
type: error?.type ?? "resource_not_found",
|
||||
message: error?.message ?? "The requested resource is not found",
|
||||
context: error?.context,
|
||||
stack: error?.stack,
|
||||
});
|
||||
|
||||
export const InternalServerError = (error?: Partial<RequestErrorContext>) => new RequestError({
|
||||
logLevel: error?.logLevel ?? LogLevel.ERROR,
|
||||
statusCode: error?.statusCode ?? 500,
|
||||
@ -229,6 +238,6 @@ export const BotNotFoundError = (error?: Partial<RequestErrorContext>) => new Re
|
||||
message: error?.message ?? "The requested bot was not found",
|
||||
context: error?.context,
|
||||
stack: error?.stack,
|
||||
})
|
||||
})
|
||||
|
||||
//* ----->[MISC ERRORS]<-----
|
||||
|
@ -7,9 +7,11 @@ import { ISecretVersion, SecretSnapshot, SecretVersion } from "../../ee/models";
|
||||
import {
|
||||
BackupPrivateKey,
|
||||
Bot,
|
||||
BotOrg,
|
||||
ISecret,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
Organization,
|
||||
Secret,
|
||||
SecretBlindIndexData,
|
||||
ServiceTokenData,
|
||||
@ -137,6 +139,103 @@ export const backfillBots = async () => {
|
||||
await Bot.insertMany(botsToInsert);
|
||||
};
|
||||
|
||||
/**
|
||||
* Backfill organization bots to ensure that every organization has a bot
|
||||
*/
|
||||
export const backfillBotOrgs = async () => {
|
||||
const encryptionKey = await getEncryptionKey();
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
|
||||
const organizationIdsWithBot = await BotOrg.distinct("organization");
|
||||
const organizationIdsToAddBot = await Organization.distinct("_id", {
|
||||
_id: {
|
||||
$nin: organizationIdsWithBot
|
||||
}
|
||||
});
|
||||
|
||||
if (organizationIdsToAddBot.length === 0) return;
|
||||
|
||||
const botsToInsert = await Promise.all(
|
||||
organizationIdsToAddBot.map(async (organizationToAddBot) => {
|
||||
const { publicKey, privateKey } = generateKeyPair();
|
||||
|
||||
const key = client.createSymmetricKey();
|
||||
|
||||
if (rootEncryptionKey) {
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag
|
||||
} = client.encryptSymmetric(privateKey, rootEncryptionKey);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag
|
||||
} = client.encryptSymmetric(key, rootEncryptionKey);
|
||||
|
||||
return new BotOrg({
|
||||
name: "Infisical Bot",
|
||||
organization: organizationToAddBot,
|
||||
isActive: false,
|
||||
publicKey,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_BASE64,
|
||||
encryptedPrivateKey,
|
||||
privateKeyIV,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_BASE64
|
||||
});
|
||||
} else if (encryptionKey) {
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag
|
||||
} = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: privateKey,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag
|
||||
} = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: key,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
return new BotOrg({
|
||||
name: "Infisical Bot",
|
||||
organization: organizationToAddBot,
|
||||
isActive: false,
|
||||
publicKey,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_UTF8,
|
||||
encryptedPrivateKey,
|
||||
privateKeyIV,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_UTF8
|
||||
});
|
||||
}
|
||||
|
||||
throw InternalServerError({
|
||||
message: "Failed to backfill organization bots due to missing encryption key"
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
await BotOrg.insertMany(botsToInsert);
|
||||
};
|
||||
|
||||
/**
|
||||
* Backfill secret blind index data to ensure that every workspace
|
||||
* has a secret blind index data
|
||||
|
@ -7,6 +7,7 @@ import { createTestUserForDevelopment } from "../addDevelopmentUser";
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
import { validateEncryptionKeysConfig } from "./validateConfig";
|
||||
import {
|
||||
backfillBotOrgs,
|
||||
backfillBots,
|
||||
backfillEncryptionMetadata,
|
||||
backfillIntegration,
|
||||
@ -16,7 +17,11 @@ import {
|
||||
backfillServiceToken,
|
||||
backfillServiceTokenMultiScope
|
||||
} from "./backfillData";
|
||||
import { reencryptBotPrivateKeys, reencryptSecretBlindIndexDataSalts } from "./reencryptData";
|
||||
import {
|
||||
reencryptBotOrgKeys,
|
||||
reencryptBotPrivateKeys,
|
||||
reencryptSecretBlindIndexDataSalts
|
||||
} from "./reencryptData";
|
||||
import {
|
||||
getClientIdGoogle,
|
||||
getClientSecretGoogle,
|
||||
@ -72,6 +77,7 @@ export const setup = async () => {
|
||||
// backfilling data to catch up with new collections and updated fields
|
||||
await backfillSecretVersions();
|
||||
await backfillBots();
|
||||
await backfillBotOrgs();
|
||||
await backfillSecretBlindIndexData();
|
||||
await backfillEncryptionMetadata();
|
||||
await backfillSecretFolders();
|
||||
@ -82,6 +88,7 @@ export const setup = async () => {
|
||||
// re-encrypt any data previously encrypted under server hex 128-bit ENCRYPTION_KEY
|
||||
// to base64 256-bit ROOT_ENCRYPTION_KEY
|
||||
await reencryptBotPrivateKeys();
|
||||
await reencryptBotOrgKeys();
|
||||
await reencryptSecretBlindIndexDataSalts();
|
||||
|
||||
// initializing Sentry
|
||||
|
@ -1,6 +1,8 @@
|
||||
import {
|
||||
Bot,
|
||||
BotOrg,
|
||||
IBot,
|
||||
IBotOrg,
|
||||
ISecretBlindIndexData,
|
||||
SecretBlindIndexData,
|
||||
} from "../../models";
|
||||
@ -17,7 +19,7 @@ import {
|
||||
} from "../../variables";
|
||||
|
||||
/**
|
||||
* Re-encrypt bot private keys from hex 128-bit ENCRYPTION_KEY
|
||||
* Re-encrypt bot private keys from under hex 128-bit ENCRYPTION_KEY
|
||||
* to base64 256-bit ROOT_ENCRYPTION_KEY
|
||||
*/
|
||||
export const reencryptBotPrivateKeys = async () => {
|
||||
@ -70,6 +72,79 @@ export const reencryptBotPrivateKeys = async () => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-encrypt organization bot keys (symmetric and private) from under hex 128-bit ENCRYPTION_KEY
|
||||
* to base64 256-bit ROOT_ENCRYPTION_KEY
|
||||
*/
|
||||
export const reencryptBotOrgKeys = async () => {
|
||||
const encryptionKey = await getEncryptionKey();
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
|
||||
if (encryptionKey && rootEncryptionKey) {
|
||||
// 1: re-encrypt organization bot keys under ROOT_ENCRYPTION_KEY
|
||||
const botOrgs = await BotOrg.find({
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_UTF8,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_UTF8
|
||||
}).select("+encryptedPrivateKey iv tag algorithm keyEncoding");
|
||||
|
||||
if (botOrgs.length === 0) return;
|
||||
|
||||
const operationsBotOrg = await Promise.all(
|
||||
botOrgs.map(async (botOrg: IBotOrg) => {
|
||||
const privateKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: botOrg.encryptedPrivateKey,
|
||||
iv: botOrg.privateKeyIV,
|
||||
tag: botOrg.privateKeyTag,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
} = client.encryptSymmetric(privateKey, rootEncryptionKey);
|
||||
|
||||
const symmetricKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: botOrg.encryptedSymmetricKey,
|
||||
iv: botOrg.symmetricKeyIV,
|
||||
tag: botOrg.symmetricKeyTag,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
} = client.encryptSymmetric(symmetricKey, rootEncryptionKey);
|
||||
|
||||
return ({
|
||||
updateOne: {
|
||||
filter: {
|
||||
_id: botOrg._id,
|
||||
},
|
||||
update: {
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_BASE64,
|
||||
encryptedPrivateKey,
|
||||
privateKeyIV,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_BASE64,
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await BotOrg.bulkWrite(operationsBotOrg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-encrypt secret blind index data salts from hex 128-bit ENCRYPTION_KEY
|
||||
* to base64 256-bit ROOT_ENCRYPTION_KEY
|
||||
|
@ -1,2 +1,3 @@
|
||||
export const EVENT_PUSH_SECRETS = "pushSecrets";
|
||||
export const EVENT_PULL_SECRETS = "pullSecrets";
|
||||
export const EVENT_PULL_SECRETS = "pullSecrets";
|
||||
export const EVENT_START_INTEGRATION = "startIntegration";
|
||||
|
@ -1,5 +1,6 @@
|
||||
import {
|
||||
getClientIdAzure,
|
||||
getClientIdBitBucket,
|
||||
getClientIdGitHub,
|
||||
getClientIdGitLab,
|
||||
getClientIdHeroku,
|
||||
@ -26,6 +27,8 @@ export const INTEGRATION_SUPABASE = "supabase";
|
||||
export const INTEGRATION_CHECKLY = "checkly";
|
||||
export const INTEGRATION_HASHICORP_VAULT = "hashicorp-vault";
|
||||
export const INTEGRATION_CLOUDFLARE_PAGES = "cloudflare-pages";
|
||||
export const INTEGRATION_BITBUCKET = "bitbucket";
|
||||
export const INTEGRATION_CODEFRESH = "codefresh";
|
||||
export const INTEGRATION_NORTHFLANK = "northflank";
|
||||
export const INTEGRATION_SET = new Set([
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
@ -43,6 +46,8 @@ export const INTEGRATION_SET = new Set([
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_HASHICORP_VAULT,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_NORTHFLANK
|
||||
]);
|
||||
|
||||
@ -58,6 +63,7 @@ export const INTEGRATION_NETLIFY_TOKEN_URL = "https://api.netlify.com/oauth/toke
|
||||
export const INTEGRATION_GITHUB_TOKEN_URL =
|
||||
"https://github.com/login/oauth/access_token";
|
||||
export const INTEGRATION_GITLAB_TOKEN_URL = "https://gitlab.com/oauth/token";
|
||||
export const INTEGRATION_BITBUCKET_TOKEN_URL = "https://bitbucket.org/site/oauth2/access_token"
|
||||
|
||||
// integration apps endpoints
|
||||
export const INTEGRATION_HEROKU_API_URL = "https://api.heroku.com";
|
||||
@ -73,6 +79,8 @@ export const INTEGRATION_SUPABASE_API_URL = "https://api.supabase.com";
|
||||
export const INTEGRATION_LARAVELFORGE_API_URL = "https://forge.laravel.com";
|
||||
export const INTEGRATION_CHECKLY_API_URL = "https://api.checklyhq.com";
|
||||
export const INTEGRATION_CLOUDFLARE_PAGES_API_URL = "https://api.cloudflare.com";
|
||||
export const INTEGRATION_BITBUCKET_API_URL = "https://api.bitbucket.org";
|
||||
export const INTEGRATION_CODEFRESH_API_URL = "https://g.codefresh.io/api";
|
||||
export const INTEGRATION_NORTHFLANK_API_URL = "https://api.northflank.com";
|
||||
|
||||
export const getIntegrationOptions = async () => {
|
||||
@ -249,6 +257,24 @@ export const getIntegrationOptions = async () => {
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
},
|
||||
{
|
||||
name: "BitBucket",
|
||||
slug: "bitbucket",
|
||||
image: "BitBucket.png",
|
||||
isAvailable: true,
|
||||
type: "oauth",
|
||||
clientId: await getClientIdBitBucket(),
|
||||
docsLink: ""
|
||||
},
|
||||
{
|
||||
name: "Codefresh",
|
||||
slug: "codefresh",
|
||||
image: "Codefresh.png",
|
||||
isAvailable: true,
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: "",
|
||||
},
|
||||
{
|
||||
name: "Northflank",
|
||||
slug: "northflank",
|
||||
|
@ -3,3 +3,4 @@ export const SMTP_HOST_MAILGUN = "smtp.mailgun.org";
|
||||
export const SMTP_HOST_SOCKETLABS = "smtp.socketlabs.com";
|
||||
export const SMTP_HOST_ZOHOMAIL = "smtp.zoho.com";
|
||||
export const SMTP_HOST_GMAIL = "smtp.gmail.com";
|
||||
export const SMTP_HOST_OFFICE365 = "smtp.office365.com";
|
@ -235,6 +235,10 @@ func CallGetSecretsV3(httpClient *resty.Client, request GetEncryptedSecretsV3Req
|
||||
SetQueryParam("environment", request.Environment).
|
||||
SetQueryParam("workspaceId", request.WorkspaceId)
|
||||
|
||||
if request.IncludeImport {
|
||||
httpRequest.SetQueryParam("include_imports", "true")
|
||||
}
|
||||
|
||||
if request.SecretPath != "" {
|
||||
httpRequest.SetQueryParam("secretPath", request.SecretPath)
|
||||
}
|
||||
@ -246,7 +250,11 @@ func CallGetSecretsV3(httpClient *resty.Client, request GetEncryptedSecretsV3Req
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetEncryptedSecretsV3Response{}, fmt.Errorf("CallGetSecretsV3: Unsuccessful response. Please make sure your secret path, workspace and environment name are all correct [response=%s]", response)
|
||||
if response.StatusCode() == 401 {
|
||||
return GetEncryptedSecretsV3Response{}, fmt.Errorf("CallGetSecretsV3: Request to access secrets with [environment=%v] [path=%v] [workspaceId=%v] is denied. Please check if your authentication method has access to requested scope", request.Environment, request.SecretPath, request.WorkspaceId)
|
||||
} else {
|
||||
return GetEncryptedSecretsV3Response{}, fmt.Errorf("CallGetSecretsV3: Unsuccessful response. Please make sure your secret path, workspace and environment name are all correct [response=%v]", response.RawResponse)
|
||||
}
|
||||
}
|
||||
|
||||
return secretsResponse, nil
|
||||
|
@ -272,40 +272,51 @@ type GetNewAccessTokenWithRefreshTokenResponse struct {
|
||||
}
|
||||
|
||||
type GetEncryptedSecretsV3Request struct {
|
||||
Environment string `json:"environment"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
SecretPath string `json:"secretPath"`
|
||||
Environment string `json:"environment"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
SecretPath string `json:"secretPath"`
|
||||
IncludeImport bool `json:"include_imports"`
|
||||
}
|
||||
|
||||
type EncryptedSecretV3 struct {
|
||||
ID string `json:"_id"`
|
||||
Version int `json:"version"`
|
||||
Workspace string `json:"workspace"`
|
||||
Type string `json:"type"`
|
||||
Tags []struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
Workspace string `json:"workspace"`
|
||||
} `json:"tags"`
|
||||
Environment string `json:"environment"`
|
||||
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
|
||||
SecretKeyIV string `json:"secretKeyIV"`
|
||||
SecretKeyTag string `json:"secretKeyTag"`
|
||||
SecretValueCiphertext string `json:"secretValueCiphertext"`
|
||||
SecretValueIV string `json:"secretValueIV"`
|
||||
SecretValueTag string `json:"secretValueTag"`
|
||||
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
|
||||
SecretCommentIV string `json:"secretCommentIV"`
|
||||
SecretCommentTag string `json:"secretCommentTag"`
|
||||
Algorithm string `json:"algorithm"`
|
||||
KeyEncoding string `json:"keyEncoding"`
|
||||
Folder string `json:"folder"`
|
||||
V int `json:"__v"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
}
|
||||
|
||||
type ImportedSecretV3 struct {
|
||||
Environment string `json:"environment"`
|
||||
FolderId string `json:"folderId"`
|
||||
SecretPath string `json:"secretPath"`
|
||||
Secrets []EncryptedSecretV3 `json:"secrets"`
|
||||
}
|
||||
|
||||
type GetEncryptedSecretsV3Response struct {
|
||||
Secrets []struct {
|
||||
ID string `json:"_id"`
|
||||
Version int `json:"version"`
|
||||
Workspace string `json:"workspace"`
|
||||
Type string `json:"type"`
|
||||
Tags []struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
Workspace string `json:"workspace"`
|
||||
} `json:"tags"`
|
||||
Environment string `json:"environment"`
|
||||
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
|
||||
SecretKeyIV string `json:"secretKeyIV"`
|
||||
SecretKeyTag string `json:"secretKeyTag"`
|
||||
SecretValueCiphertext string `json:"secretValueCiphertext"`
|
||||
SecretValueIV string `json:"secretValueIV"`
|
||||
SecretValueTag string `json:"secretValueTag"`
|
||||
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
|
||||
SecretCommentIV string `json:"secretCommentIV"`
|
||||
SecretCommentTag string `json:"secretCommentTag"`
|
||||
Algorithm string `json:"algorithm"`
|
||||
KeyEncoding string `json:"keyEncoding"`
|
||||
Folder string `json:"folder"`
|
||||
V int `json:"__v"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
} `json:"secrets"`
|
||||
Secrets []EncryptedSecretV3 `json:"secrets"`
|
||||
ImportedSecrets []ImportedSecretV3 `json:"imports,omitempty"`
|
||||
}
|
||||
|
||||
type CreateSecretV3Request struct {
|
||||
|
@ -73,7 +73,6 @@ var loginCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
//override domain
|
||||
domainQuery := true
|
||||
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
|
||||
@ -322,6 +321,8 @@ func DomainOverridePrompt() (bool, error) {
|
||||
)
|
||||
|
||||
options := []string{PRESET, OVERRIDE}
|
||||
//trim the '/' from the end of the domain url
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE = strings.TrimRight(config.INFISICAL_URL_MANUAL_OVERRIDE, "/")
|
||||
optionsPrompt := promptui.Select{
|
||||
Label: fmt.Sprintf("Current INFISICAL_API_URL Domain Override: %s", config.INFISICAL_URL_MANUAL_OVERRIDE),
|
||||
Items: options,
|
||||
@ -380,7 +381,8 @@ func askForDomain() error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//trimmed the '/' from the end of the self hosting url
|
||||
domain = strings.TrimRight(domain, "/")
|
||||
//set api and login url
|
||||
config.INFISICAL_URL = fmt.Sprintf("%s/api", domain)
|
||||
config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", domain)
|
||||
|
@ -87,7 +87,12 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath})
|
||||
includeImports, err := cmd.Flags().GetBool("include-imports")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports})
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
|
||||
@ -186,6 +191,7 @@ func init() {
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("include-imports", true, "Import linked secrets ")
|
||||
runCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")")
|
||||
runCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs ")
|
||||
|
@ -54,12 +54,17 @@ var secretsCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
includeImports, err := cmd.Flags().GetBool("include-imports")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath})
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports})
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
@ -647,6 +652,7 @@ func init() {
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")
|
||||
secretsCmd.PersistentFlags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
secretsCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
rootCmd.AddCommand(secretsCmd)
|
||||
|
@ -48,7 +48,7 @@ var vaultSetCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("\nSuccessfully, switched vault backend from [%s] to [%s]. Please login in again to store your login details in the new vault with [infisical login]", currentVaultBackend, wantedVaultTypeName)
|
||||
fmt.Printf("\nSuccessfully, switched vault backend from [%s] to [%s]. Please login in again to store your login details in the new vault with [infisical login]\n", currentVaultBackend, wantedVaultTypeName)
|
||||
|
||||
Telemetry.CaptureEvent("cli-command:vault set", posthog.NewProperties().Set("currentVault", currentVaultBackend).Set("wantedVault", wantedVaultTypeName).Set("version", util.CLI_VERSION))
|
||||
} else {
|
||||
@ -81,7 +81,7 @@ func printAvailableVaultBackends() {
|
||||
|
||||
Telemetry.CaptureEvent("cli-command:vault", posthog.NewProperties().Set("currentVault", currentVaultBackend).Set("version", util.CLI_VERSION))
|
||||
|
||||
fmt.Printf("\n\nYou are currently using [%s] vault to store your login credentials", string(currentVaultBackend))
|
||||
fmt.Printf("\n\nYou are currently using [%s] vault to store your login credentials\n", string(currentVaultBackend))
|
||||
}
|
||||
|
||||
// Checks if the vault that the user wants to switch to is a valid available vault
|
||||
|
@ -65,4 +65,5 @@ type GetAllSecretsParameters struct {
|
||||
TagSlugs string
|
||||
WorkspaceId string
|
||||
SecretsPath string
|
||||
IncludeImport bool
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ import (
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string) ([]models.SingleEnvironmentVariable, api.GetServiceTokenDetailsResponse, error) {
|
||||
func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string, includeImports bool) ([]models.SingleEnvironmentVariable, api.GetServiceTokenDetailsResponse, error) {
|
||||
serviceTokenParts := strings.SplitN(fullServiceToken, ".", 4)
|
||||
if len(serviceTokenParts) < 4 {
|
||||
return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("invalid service token entered. Please double check your service token and try again")
|
||||
@ -45,9 +45,10 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str
|
||||
}
|
||||
|
||||
encryptedSecrets, err := api.CallGetSecretsV3(httpClient, api.GetEncryptedSecretsV3Request{
|
||||
WorkspaceId: serviceTokenDetails.Workspace,
|
||||
Environment: environment,
|
||||
SecretPath: secretPath,
|
||||
WorkspaceId: serviceTokenDetails.Workspace,
|
||||
Environment: environment,
|
||||
SecretPath: secretPath,
|
||||
IncludeImport: includeImports,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@ -64,15 +65,22 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str
|
||||
return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("unable to decrypt the required workspace key")
|
||||
}
|
||||
|
||||
plainTextSecrets, err := GetPlainTextSecrets(plainTextWorkspaceKey, encryptedSecrets)
|
||||
plainTextSecrets, err := GetPlainTextSecrets(plainTextWorkspaceKey, encryptedSecrets.Secrets)
|
||||
if err != nil {
|
||||
return nil, api.GetServiceTokenDetailsResponse{}, fmt.Errorf("unable to decrypt your secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
if includeImports {
|
||||
plainTextSecrets, err = InjectImportedSecret(plainTextWorkspaceKey, plainTextSecrets, encryptedSecrets.ImportedSecrets)
|
||||
if err != nil {
|
||||
return nil, api.GetServiceTokenDetailsResponse{}, err
|
||||
}
|
||||
}
|
||||
|
||||
return plainTextSecrets, serviceTokenDetails, nil
|
||||
}
|
||||
|
||||
func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string, tagSlugs string, secretsPath string) ([]models.SingleEnvironmentVariable, error) {
|
||||
func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, workspaceId string, environmentName string, tagSlugs string, secretsPath string, includeImports bool) ([]models.SingleEnvironmentVariable, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
@ -114,8 +122,9 @@ func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, work
|
||||
plainTextWorkspaceKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
|
||||
|
||||
getSecretsRequest := api.GetEncryptedSecretsV3Request{
|
||||
WorkspaceId: workspaceId,
|
||||
Environment: environmentName,
|
||||
WorkspaceId: workspaceId,
|
||||
Environment: environmentName,
|
||||
IncludeImport: includeImports,
|
||||
// TagSlugs: tagSlugs,
|
||||
}
|
||||
|
||||
@ -124,19 +133,53 @@ func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, work
|
||||
}
|
||||
|
||||
encryptedSecrets, err := api.CallGetSecretsV3(httpClient, getSecretsRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
plainTextSecrets, err := GetPlainTextSecrets(plainTextWorkspaceKey, encryptedSecrets)
|
||||
plainTextSecrets, err := GetPlainTextSecrets(plainTextWorkspaceKey, encryptedSecrets.Secrets)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to decrypt your secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
if includeImports {
|
||||
plainTextSecrets, err = InjectImportedSecret(plainTextWorkspaceKey, plainTextSecrets, encryptedSecrets.ImportedSecrets)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return plainTextSecrets, nil
|
||||
}
|
||||
|
||||
func InjectImportedSecret(plainTextWorkspaceKey []byte, secrets []models.SingleEnvironmentVariable, importedSecrets []api.ImportedSecretV3) ([]models.SingleEnvironmentVariable, error) {
|
||||
if importedSecrets == nil {
|
||||
return secrets, nil
|
||||
}
|
||||
|
||||
hasOverriden := make(map[string]bool)
|
||||
for _, sec := range secrets {
|
||||
hasOverriden[sec.Key] = true
|
||||
}
|
||||
|
||||
for i := len(importedSecrets) - 1; i >= 0; i-- {
|
||||
importSec := importedSecrets[i]
|
||||
plainTextImportedSecrets, err := GetPlainTextSecrets(plainTextWorkspaceKey, importSec.Secrets)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to decrypt your imported secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
for _, sec := range plainTextImportedSecrets {
|
||||
if _, ok := hasOverriden[sec.Key]; !ok {
|
||||
secrets = append(secrets, sec)
|
||||
hasOverriden[sec.Key] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return secrets, nil
|
||||
}
|
||||
|
||||
func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models.SingleEnvironmentVariable, error) {
|
||||
var infisicalToken string
|
||||
if params.InfisicalToken == "" {
|
||||
@ -179,7 +222,8 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models
|
||||
return nil, fmt.Errorf("unable to validate environment name because [err=%s]", err)
|
||||
}
|
||||
|
||||
secretsToReturn, errorToReturn = GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, workspaceFile.WorkspaceId, params.Environment, params.TagSlugs, params.SecretsPath)
|
||||
secretsToReturn, errorToReturn = GetPlainTextSecretsViaJTW(loggedInUserDetails.UserCredentials.JTWToken, loggedInUserDetails.UserCredentials.PrivateKey, workspaceFile.WorkspaceId,
|
||||
params.Environment, params.TagSlugs, params.SecretsPath, params.IncludeImport)
|
||||
log.Debug().Msgf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", errorToReturn)
|
||||
|
||||
backupSecretsEncryptionKey := []byte(loggedInUserDetails.UserCredentials.PrivateKey)[0:32]
|
||||
@ -199,7 +243,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters) ([]models
|
||||
|
||||
} else {
|
||||
log.Debug().Msg("Trying to fetch secrets using service token")
|
||||
secretsToReturn, _, errorToReturn = GetPlainTextSecretsViaServiceToken(infisicalToken, params.Environment, params.SecretsPath)
|
||||
secretsToReturn, _, errorToReturn = GetPlainTextSecretsViaServiceToken(infisicalToken, params.Environment, params.SecretsPath, params.IncludeImport)
|
||||
}
|
||||
|
||||
return secretsToReturn, errorToReturn
|
||||
@ -427,9 +471,9 @@ func OverrideSecrets(secrets []models.SingleEnvironmentVariable, secretType stri
|
||||
return secretsToReturn
|
||||
}
|
||||
|
||||
func GetPlainTextSecrets(key []byte, encryptedSecrets api.GetEncryptedSecretsV3Response) ([]models.SingleEnvironmentVariable, error) {
|
||||
func GetPlainTextSecrets(key []byte, encryptedSecrets []api.EncryptedSecretV3) ([]models.SingleEnvironmentVariable, error) {
|
||||
plainTextSecrets := []models.SingleEnvironmentVariable{}
|
||||
for _, secret := range encryptedSecrets.Secrets {
|
||||
for _, secret := range encryptedSecrets {
|
||||
// Decrypt key
|
||||
key_iv, err := base64.StdEncoding.DecodeString(secret.SecretKeyIV)
|
||||
if err != nil {
|
||||
|
@ -41,6 +41,20 @@ services:
|
||||
networks:
|
||||
- infisical
|
||||
|
||||
# secret-scanning-git-app:
|
||||
# container_name: infisical-secret-scanning-git-app
|
||||
# restart: unless-stopped
|
||||
# depends_on:
|
||||
# - backend
|
||||
# - frontend
|
||||
# - mongo
|
||||
# ports:
|
||||
# - "3000:3001"
|
||||
# image: infisical/staging_deployment_secret-scanning-git-app
|
||||
# env_file: .env
|
||||
# networks:
|
||||
# - infisical
|
||||
|
||||
mongo:
|
||||
container_name: infisical-mongo
|
||||
image: mongo
|
||||
|
@ -4,6 +4,21 @@ title: "Changelog"
|
||||
|
||||
The changelog below reflects new product developments and updates on a monthly basis; it will be updated later this quarter to include issues-addressed on a weekly basis.
|
||||
|
||||
## July 2023
|
||||
|
||||
- Released [secret referencing and importing](https://infisical.com/docs/documentation/platform/secret-reference) across folders and environments.
|
||||
- Added the [intergation with Laravel Forge](https://infisical.com/docs/integrations/cloud/laravel-forge).
|
||||
- Redesigned the project/organization experience.
|
||||
|
||||
## June 2023
|
||||
|
||||
- Released the [Terraform Provider](https://infisical.com/docs/integrations/frameworks/terraform#5-run-terraform).
|
||||
- Updated the usage and billing page. Added the free trial for the professional tier.
|
||||
- Added the intergation with [Checkly](https://infisical.com/docs/integrations/cloud/checkly), [Hashicorp Vault](https://infisical.com/docs/integrations/cloud/hashicorp-vault), and [Cloudflare Pages](https://infisical.com/docs/integrations/cloud/cloudflare-pages).
|
||||
- Comleted a penetration test with a `very good` result.
|
||||
- Added support for multi-line secrets.
|
||||
|
||||
|
||||
## May 2023
|
||||
|
||||
- Released secret scanning capability for the CLI.
|
||||
@ -11,8 +26,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
- Completed penetration test.
|
||||
- Released new landing page.
|
||||
- Started SOC 2 (Type II) compliance certification preparation.
|
||||
|
||||
More coming soon.
|
||||
- Released new deployment options for Fly.io, Digital Ocean and Render.
|
||||
|
||||
## April 2023
|
||||
|
||||
@ -107,4 +121,4 @@ More coming soon.
|
||||
- Added search bar to dashboard to query for keys on client-side.
|
||||
- Added capability to rename a project.
|
||||
- Added user roles for projects.
|
||||
- Added incident contacts.
|
||||
- Added incident contacts.
|
||||
|
@ -99,13 +99,12 @@ $ infisical secrets set STRIPE_API_KEY=sjdgwkeudyjwe DOMAIN=example.com HASH=jeb
|
||||
Default value: `dev`
|
||||
</Accordion>
|
||||
<Accordion title="--path">
|
||||
The `--path` flag indicates which project folder secrets will be injected from.
|
||||
Used to select the project folder in which the secrets will be set. This is useful when creating new secrets under a particular path.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical secrets set <key1=value1> <key2=value2>... --path="/"
|
||||
infisical secrets set DOMAIN=example.com --path="common/backend"
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
</Accordion>
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user