Fix lint errors, merge conflicts
@ -47,11 +47,13 @@ CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
|
@ -108,6 +108,22 @@ brews:
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
- name: 'infisical@{{.Version}}'
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
|
@ -1,4 +1,3 @@
|
||||
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
|
@ -10,6 +10,8 @@
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"no-empty-function": "off",
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"no-console": 2,
|
||||
"quotes": [
|
||||
"error",
|
||||
@ -24,7 +26,6 @@
|
||||
],
|
||||
"@typescript-eslint/no-unused-vars": "off",
|
||||
"unused-imports/no-unused-imports": "error",
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"unused-imports/no-unused-vars": [
|
||||
"warn",
|
||||
{
|
||||
|
3981
backend/package-lock.json
generated
@ -2,6 +2,7 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-secrets-manager": "^3.319.0",
|
||||
"@godaddy/terminus": "^4.12.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/rest": "^19.0.5",
|
||||
"@sentry/node": "^7.49.0",
|
||||
"@sentry/tracing": "^7.48.0",
|
||||
|
@ -37,6 +37,7 @@ export const getClientIdNetlify = async () => (await client.getSecret("CLIENT_ID
|
||||
export const getClientIdGitHub = async () => (await client.getSecret("CLIENT_ID_GITHUB")).secretValue;
|
||||
export const getClientIdGitLab = async () => (await client.getSecret("CLIENT_ID_GITLAB")).secretValue;
|
||||
export const getClientIdGoogle = async () => (await client.getSecret("CLIENT_ID_GOOGLE")).secretValue;
|
||||
export const getClientIdBitBucket = async () => (await client.getSecret("CLIENT_ID_BITBUCKET")).secretValue;
|
||||
export const getClientSecretAzure = async () => (await client.getSecret("CLIENT_SECRET_AZURE")).secretValue;
|
||||
export const getClientSecretHeroku = async () => (await client.getSecret("CLIENT_SECRET_HEROKU")).secretValue;
|
||||
export const getClientSecretVercel = async () => (await client.getSecret("CLIENT_SECRET_VERCEL")).secretValue;
|
||||
@ -44,6 +45,7 @@ export const getClientSecretNetlify = async () => (await client.getSecret("CLIEN
|
||||
export const getClientSecretGitHub = async () => (await client.getSecret("CLIENT_SECRET_GITHUB")).secretValue;
|
||||
export const getClientSecretGitLab = async () => (await client.getSecret("CLIENT_SECRET_GITLAB")).secretValue;
|
||||
export const getClientSecretGoogle = async () => (await client.getSecret("CLIENT_SECRET_GOOGLE")).secretValue;
|
||||
export const getClientSecretBitBucket = async () => (await client.getSecret("CLIENT_SECRET_BITBUCKET")).secretValue;
|
||||
export const getClientSlugVercel = async () => (await client.getSecret("CLIENT_SLUG_VERCEL")).secretValue;
|
||||
export const getPostHogHost = async () => (await client.getSecret("POSTHOG_HOST")).secretValue || "https://app.posthog.com";
|
||||
export const getPostHogProjectApiKey = async () => (await client.getSecret("POSTHOG_PROJECT_API_KEY")).secretValue || "phc_nSin8j5q2zdhpFDI1ETmFNUIuTG4DwKVyIigrY10XiE";
|
||||
|
@ -7,6 +7,8 @@ import { IntegrationService } from "../../services";
|
||||
import {
|
||||
ALGORITHM_AES_256_GCM,
|
||||
ENCODING_SCHEME_UTF8,
|
||||
INTEGRATION_BITBUCKET_API_URL,
|
||||
INTEGRATION_NORTHFLANK_API_URL,
|
||||
INTEGRATION_RAILWAY_API_URL,
|
||||
INTEGRATION_SET,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
@ -141,12 +143,14 @@ export const saveIntegrationAccessToken = async (req: Request, res: Response) =>
|
||||
*/
|
||||
export const getIntegrationAuthApps = async (req: Request, res: Response) => {
|
||||
const teamId = req.query.teamId as string;
|
||||
const workspaceSlug = req.query.workspaceSlug as string;
|
||||
|
||||
const apps = await getApps({
|
||||
integrationAuth: req.integrationAuth,
|
||||
accessToken: req.accessToken,
|
||||
accessId: req.accessId,
|
||||
...(teamId && { teamId })
|
||||
...(teamId && { teamId }),
|
||||
...(workspaceSlug && { workspaceSlug })
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
@ -382,6 +386,139 @@ export const getIntegrationAuthRailwayServices = async (req: Request, res: Respo
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of workspaces allowed for Bitbucket integration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthBitBucketWorkspaces = async (req: Request, res: Response) => {
|
||||
|
||||
interface WorkspaceResponse {
|
||||
size: number;
|
||||
page: number;
|
||||
pageLen: number;
|
||||
next: string;
|
||||
previous: string;
|
||||
values: Array<Workspace>;
|
||||
}
|
||||
|
||||
interface Workspace {
|
||||
type: string;
|
||||
uuid: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
is_private: boolean;
|
||||
created_on: string;
|
||||
updated_on: string;
|
||||
}
|
||||
|
||||
const workspaces: Workspace[] = [];
|
||||
let hasNextPage = true;
|
||||
let workspaceUrl = `${INTEGRATION_BITBUCKET_API_URL}/2.0/workspaces`
|
||||
|
||||
while (hasNextPage) {
|
||||
const { data }: { data: WorkspaceResponse } = await standardRequest.get(
|
||||
workspaceUrl,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${req.accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (data?.values.length > 0) {
|
||||
data.values.forEach((workspace) => {
|
||||
workspaces.push(workspace)
|
||||
})
|
||||
}
|
||||
|
||||
if (data.next) {
|
||||
workspaceUrl = data.next
|
||||
} else {
|
||||
hasNextPage = false
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
workspaces
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of secret groups for Northflank project with id [appId]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getIntegrationAuthNorthflankSecretGroups = async (req: Request, res: Response) => {
|
||||
const appId = req.query.appId as string;
|
||||
|
||||
interface NorthflankSecretGroup {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
priority: number;
|
||||
projectId: string;
|
||||
}
|
||||
|
||||
interface SecretGroup {
|
||||
name: string;
|
||||
groupId: string;
|
||||
}
|
||||
|
||||
const secretGroups: SecretGroup[] = [];
|
||||
|
||||
if (appId && appId !== "") {
|
||||
let page = 1;
|
||||
const perPage = 10;
|
||||
let hasMorePages = true;
|
||||
|
||||
while(hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage),
|
||||
filter: "all",
|
||||
});
|
||||
|
||||
const {
|
||||
data: {
|
||||
data: {
|
||||
secrets
|
||||
}
|
||||
}
|
||||
} = await standardRequest.get<{ data: { secrets: NorthflankSecretGroup[] }}>(
|
||||
`${INTEGRATION_NORTHFLANK_API_URL}/v1/projects/${appId}/secrets`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${req.accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
secrets.forEach((a: any) => {
|
||||
secretGroups.push({
|
||||
name: a.name,
|
||||
groupId: a.id
|
||||
});
|
||||
});
|
||||
|
||||
if (secrets.length < perPage) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
secretGroups
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete integration authorization with id [integrationAuthId]
|
||||
* @param req
|
||||
@ -398,3 +535,4 @@ export const deleteIntegrationAuth = async (req: Request, res: Response) => {
|
||||
integrationAuth
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -2,7 +2,7 @@ import { Request, Response } from "express";
|
||||
import { Types } from "mongoose";
|
||||
import { Integration } from "../../models";
|
||||
import { EventService } from "../../services";
|
||||
import { eventPushSecrets, eventStartIntegration } from "../../events";
|
||||
import { eventStartIntegration } from "../../events";
|
||||
import Folder from "../../models/folder";
|
||||
import { getFolderByPath } from "../../services/FolderService";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { Types } from "mongoose";
|
||||
import { Request, Response } from "express";
|
||||
import { MembershipOrg, Organization, User } from "../../models";
|
||||
import { SSOConfig } from "../../ee/models";
|
||||
import { deleteMembershipOrg as deleteMemberFromOrg } from "../../helpers/membershipOrg";
|
||||
import { createToken } from "../../helpers/auth";
|
||||
import { updateSubscriptionOrgQuantity } from "../../helpers/organization";
|
||||
@ -110,6 +111,18 @@ export const inviteUserToOrganization = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
const plan = await EELicenseService.getPlan(organizationId);
|
||||
|
||||
const ssoConfig = await SSOConfig.findOne({
|
||||
organization: new Types.ObjectId(organizationId)
|
||||
});
|
||||
|
||||
if (ssoConfig && ssoConfig.isActive) {
|
||||
// case: SAML SSO is enabled for the organization
|
||||
return res.status(400).send({
|
||||
message:
|
||||
"Failed to invite member due to SAML SSO configured for organization"
|
||||
});
|
||||
}
|
||||
|
||||
if (plan.memberLimit !== null) {
|
||||
// case: limit imposed on number of members allowed
|
||||
|
@ -9,7 +9,7 @@ import {
|
||||
import { createOrganization as create } from "../../helpers/organization";
|
||||
import { addMembershipsOrg } from "../../helpers/membershipOrg";
|
||||
import { ACCEPTED, OWNER } from "../../variables";
|
||||
import { getSiteURL, getLicenseServerUrl } from "../../config";
|
||||
import { getLicenseServerUrl, getSiteURL } from "../../config";
|
||||
import { licenseServerKeyRequest } from "../../config/request";
|
||||
|
||||
export const getOrganizations = async (req: Request, res: Response) => {
|
||||
|
@ -30,7 +30,6 @@ export const createSecretImport = async (req: Request, res: Response) => {
|
||||
if (doesImportExist) {
|
||||
throw BadRequestError({ message: "Secret import already exist" });
|
||||
}
|
||||
|
||||
importSecDoc.imports.push({
|
||||
environment: secretImport.environment,
|
||||
secretPath: secretImport.secretPath
|
||||
|
@ -27,16 +27,16 @@ export const createWorkspaceEnvironment = async (
|
||||
const { workspaceId } = req.params;
|
||||
const { environmentName, environmentSlug } = req.body;
|
||||
const workspace = await Workspace.findById(workspaceId).exec();
|
||||
|
||||
|
||||
if (!workspace) throw WorkspaceNotFoundError();
|
||||
|
||||
|
||||
const plan = await EELicenseService.getPlan(workspace.organization.toString());
|
||||
|
||||
|
||||
if (plan.environmentLimit !== null) {
|
||||
// case: limit imposed on number of environments allowed
|
||||
if (workspace.environments.length >= plan.environmentLimit) {
|
||||
// case: number of environments used exceeds the number of environments allowed
|
||||
|
||||
|
||||
return res.status(400).send({
|
||||
message: "Failed to create environment due to environment limit reached. Upgrade plan to create more environments.",
|
||||
});
|
||||
@ -191,14 +191,21 @@ export const deleteWorkspaceEnvironment = async (
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
});
|
||||
await ServiceToken.deleteMany({
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
});
|
||||
await ServiceTokenData.deleteMany({
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
});
|
||||
|
||||
// await ServiceToken.deleteMany({
|
||||
// workspace: workspaceId,
|
||||
// environment: environmentSlug,
|
||||
// });
|
||||
|
||||
const result = await ServiceTokenData.updateMany(
|
||||
{ workspace: workspaceId },
|
||||
{ $pull: { scopes: { environment: environmentSlug } } }
|
||||
);
|
||||
|
||||
if (result.modifiedCount > 0) {
|
||||
await ServiceTokenData.deleteMany({ workspace: workspaceId, scopes: { $size: 0 } });
|
||||
}
|
||||
|
||||
await Integration.deleteMany({
|
||||
workspace: workspaceId,
|
||||
environment: environmentSlug,
|
||||
|
@ -9,8 +9,6 @@ import {
|
||||
} from "../../types/secret";
|
||||
const { ValidationError } = mongoose.Error;
|
||||
import {
|
||||
BadRequestError,
|
||||
InternalServerError,
|
||||
ValidationError as RouteValidationError,
|
||||
UnauthorizedRequestError
|
||||
} from "../../utils/errors";
|
||||
|
@ -830,7 +830,7 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
|
||||
// TODO(akhilmhdh) - secret-imp change this to org type
|
||||
let importedSecrets: any[] = [];
|
||||
if (include_imports) {
|
||||
if (include_imports === "true") {
|
||||
importedSecrets = await getAllImportedSecrets(workspaceId, environment, folderId as string);
|
||||
}
|
||||
|
||||
|
@ -5,8 +5,6 @@ import { ServiceAccount, ServiceTokenData, User } from "../../models";
|
||||
import { AUTH_MODE_JWT, AUTH_MODE_SERVICE_ACCOUNT } from "../../variables";
|
||||
import { getSaltRounds } from "../../config";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
import Folder from "../../models/folder";
|
||||
import { getFolderByPath } from "../../services/FolderService";
|
||||
|
||||
/**
|
||||
* Return service token data associated with service token on request
|
||||
|
@ -18,7 +18,7 @@ import { updateSubscriptionOrgQuantity } from "../../helpers/organization";
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
let user;
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
@ -119,7 +119,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
userAgent: req.headers["user-agent"] ?? "",
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
const token = tokens.token;
|
||||
|
||||
// sending a welcome email to new users
|
||||
if (await getLoopsApiKey()) {
|
||||
@ -159,7 +159,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
* @returns
|
||||
*/
|
||||
export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
let user, token, refreshToken;
|
||||
let user;
|
||||
const {
|
||||
email,
|
||||
firstName,
|
||||
@ -244,7 +244,7 @@ export const completeAccountInvite = async (req: Request, res: Response) => {
|
||||
userAgent: req.headers["user-agent"] ?? "",
|
||||
});
|
||||
|
||||
token = tokens.token;
|
||||
const token = tokens.token;
|
||||
|
||||
// store (refresh) token in httpOnly cookie
|
||||
res.cookie("jid", tokens.refreshToken, {
|
||||
|
@ -4,6 +4,7 @@ import crypto from "crypto";
|
||||
import bcrypt from "bcrypt";
|
||||
import {
|
||||
APIKeyData,
|
||||
AuthProvider,
|
||||
MembershipOrg,
|
||||
TokenVersion,
|
||||
User
|
||||
@ -81,25 +82,66 @@ export const updateMyMfaEnabled = async (req: Request, res: Response) => {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the current user's name [firstName, lastName].
|
||||
* Update name of the current user to [firstName, lastName].
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateName = async (req: Request, res: Response) => {
|
||||
const { firstName, lastName }: { firstName: string; lastName: string; } = req.body;
|
||||
req.user.firstName = firstName;
|
||||
req.user.lastName = lastName || "";
|
||||
const {
|
||||
firstName,
|
||||
lastName
|
||||
}: {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
} = req.body;
|
||||
|
||||
await req.user.save();
|
||||
|
||||
const user = req.user;
|
||||
const user = await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
firstName,
|
||||
lastName: lastName ?? ""
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
return res.status(200).send({
|
||||
user,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update auth provider of the current user to [authProvider]
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateAuthProvider = async (req: Request, res: Response) => {
|
||||
const {
|
||||
authProvider
|
||||
} = req.body;
|
||||
|
||||
if (req.user?.authProvider === AuthProvider.OKTA_SAML) return res.status(400).send({
|
||||
message: "Failed to update user authentication method because SAML SSO is enforced"
|
||||
});
|
||||
|
||||
const user = await User.findByIdAndUpdate(
|
||||
req.user._id.toString(),
|
||||
{
|
||||
authProvider
|
||||
},
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
return res.status(200).send({
|
||||
user
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return organizations that the current user is part of.
|
||||
* @param req
|
||||
|
@ -179,10 +179,9 @@ export const getWorkspaceKey = async (req: Request, res: Response) => {
|
||||
}
|
||||
}
|
||||
*/
|
||||
let key;
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
key = await Key.findOne({
|
||||
const key = await Key.findOne({
|
||||
workspace: workspaceId,
|
||||
receiver: req.user._id
|
||||
}).populate("sender", "+publicKey");
|
||||
|
@ -56,7 +56,7 @@ export const login1 = async (req: Request, res: Response) => {
|
||||
|
||||
if (!user) throw new Error("Failed to find user");
|
||||
|
||||
if (user.authProvider) {
|
||||
if (user.authProvider && user.authProvider !== AuthProvider.EMAIL) {
|
||||
await validateProviderAuthToken({
|
||||
email,
|
||||
user,
|
||||
@ -117,7 +117,7 @@ export const login2 = async (req: Request, res: Response) => {
|
||||
|
||||
if (!user) throw new Error("Failed to find user");
|
||||
|
||||
if (user.authProvider) {
|
||||
if (user.authProvider && user.authProvider !== AuthProvider.EMAIL) {
|
||||
await validateProviderAuthToken({
|
||||
email,
|
||||
user,
|
||||
|
@ -3,12 +3,15 @@ import { Types } from "mongoose";
|
||||
import { EventService, SecretService } from "../../services";
|
||||
import { eventPushSecrets } from "../../events";
|
||||
import { BotService } from "../../services";
|
||||
import { repackageSecretToRaw } from "../../helpers/secrets";
|
||||
import { containsGlobPatterns, repackageSecretToRaw } from "../../helpers/secrets";
|
||||
import { encryptSymmetric128BitHexKeyUTF8 } from "../../utils/crypto";
|
||||
import { getAllImportedSecrets } from "../../services/SecretImportService";
|
||||
import Folder from "../../models/folder";
|
||||
import { getFolderByPath } from "../../services/FolderService";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
import { IServiceTokenData } from "../../models";
|
||||
import { requireWorkspaceAuth } from "../../middleware";
|
||||
import { ADMIN, MEMBER, PERMISSION_READ_SECRETS } from "../../variables";
|
||||
|
||||
/**
|
||||
* Return secrets for workspace with id [workspaceId] and environment
|
||||
@ -17,11 +20,30 @@ import { BadRequestError } from "../../utils/errors";
|
||||
* @param res
|
||||
*/
|
||||
export const getSecretsRaw = async (req: Request, res: Response) => {
|
||||
const workspaceId = req.query.workspaceId as string;
|
||||
const environment = req.query.environment as string;
|
||||
const secretPath = req.query.secretPath as string;
|
||||
let workspaceId = req.query.workspaceId as string;
|
||||
let environment = req.query.environment as string;
|
||||
let secretPath = req.query.secretPath as string;
|
||||
const includeImports = req.query.include_imports as string;
|
||||
|
||||
// if the service token has single scope, it will get all secrets for that scope by default
|
||||
const serviceTokenDetails: IServiceTokenData = req?.serviceTokenData;
|
||||
if (serviceTokenDetails && serviceTokenDetails.scopes.length == 1 && !containsGlobPatterns(serviceTokenDetails.scopes[0].secretPath)) {
|
||||
const scope = serviceTokenDetails.scopes[0];
|
||||
secretPath = scope.secretPath;
|
||||
environment = scope.environment;
|
||||
workspaceId = serviceTokenDetails.workspace.toString();
|
||||
} else {
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "query",
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
const secrets = await SecretService.getSecrets({
|
||||
workspaceId: new Types.ObjectId(workspaceId),
|
||||
environment,
|
||||
@ -33,7 +55,7 @@ export const getSecretsRaw = async (req: Request, res: Response) => {
|
||||
workspaceId: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
if (includeImports) {
|
||||
if (includeImports === "true") {
|
||||
const folders = await Folder.findOne({ workspace: workspaceId, environment });
|
||||
let folderId = "root";
|
||||
// if folder exist get it and replace folderid with new one
|
||||
@ -271,7 +293,7 @@ export const getSecrets = async (req: Request, res: Response) => {
|
||||
authData: req.authData
|
||||
});
|
||||
|
||||
if (includeImports) {
|
||||
if (includeImports === "true") {
|
||||
const folders = await Folder.findOne({ workspace: workspaceId, environment });
|
||||
let folderId = "root";
|
||||
// if folder exist get it and replace folderid with new one
|
||||
|
@ -12,6 +12,7 @@ import { standardRequest } from "../../config/request";
|
||||
import { getHttpsEnabled, getJwtSignupSecret, getLoopsApiKey } from "../../config";
|
||||
import { BadRequestError } from "../../utils/errors";
|
||||
import { TelemetryService } from "../../services";
|
||||
import { AuthProvider } from "../../models";
|
||||
|
||||
/**
|
||||
* Complete setting up user by adding their personal and auth information as part of the
|
||||
@ -116,11 +117,13 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
if (!user)
|
||||
throw new Error("Failed to complete account for non-existent user"); // ensure user is non-null
|
||||
|
||||
// initialize default organization and workspace
|
||||
await initializeDefaultOrg({
|
||||
organizationName,
|
||||
user,
|
||||
});
|
||||
if (user.authProvider !== AuthProvider.OKTA_SAML) {
|
||||
// initialize default organization and workspace
|
||||
await initializeDefaultOrg({
|
||||
organizationName,
|
||||
user,
|
||||
});
|
||||
}
|
||||
|
||||
// update organization membership statuses that are
|
||||
// invited to completed with user attached
|
||||
@ -174,7 +177,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
|
||||
distinctId: email,
|
||||
properties: {
|
||||
email,
|
||||
attributionSource,
|
||||
...(attributionSource ? { attributionSource } : {})
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
import * as secretController from "./secretController";
|
||||
import * as secretSnapshotController from "./secretSnapshotController";
|
||||
import * as organizationsController from "./organizationsController";
|
||||
import * as ssoController from "./ssoController";
|
||||
import * as usersController from "./usersController";
|
||||
import * as workspaceController from "./workspaceController";
|
||||
import * as actionController from "./actionController";
|
||||
import * as membershipController from "./membershipController";
|
||||
@ -10,6 +12,8 @@ export {
|
||||
secretController,
|
||||
secretSnapshotController,
|
||||
organizationsController,
|
||||
ssoController,
|
||||
usersController,
|
||||
workspaceController,
|
||||
actionController,
|
||||
membershipController,
|
||||
|
@ -178,6 +178,12 @@ export const addOrganizationTaxId = async (req: Request, res: Response) => {
|
||||
return res.status(200).send(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete tax id with id [taxId] from organization tax ids on file
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const deleteOrganizationTaxId = async (req: Request, res: Response) => {
|
||||
const { taxId } = req.params;
|
||||
|
||||
@ -188,6 +194,12 @@ export const deleteOrganizationTaxId = async (req: Request, res: Response) => {
|
||||
return res.status(200).send(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return organization's invoices on file
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getOrganizationInvoices = async (req: Request, res: Response) => {
|
||||
const { data: { invoices } } = await licenseServerKeyRequest.get(
|
||||
`${await getLicenseServerUrl()}/api/license-server/v1/customers/${req.organization.customerId}/invoices`
|
||||
|
267
backend/src/ee/controllers/v1/ssoController.ts
Normal file
@ -0,0 +1,267 @@
|
||||
import { Request, Response } from "express";
|
||||
import { Types } from "mongoose";
|
||||
import { BotOrgService } from "../../../services";
|
||||
import { SSOConfig } from "../../models";
|
||||
import {
|
||||
MembershipOrg,
|
||||
User
|
||||
} from "../../../models";
|
||||
import { getSSOConfigHelper } from "../../helpers/organizations";
|
||||
import { client } from "../../../config";
|
||||
import { ResourceNotFoundError } from "../../../utils/errors";
|
||||
import { getSiteURL } from "../../../config";
|
||||
import { EELicenseService } from "../../services";
|
||||
|
||||
/**
|
||||
* Redirect user to appropriate SSO endpoint after successful authentication
|
||||
* to finish inputting their master key for logging in or signing up
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const redirectSSO = async (req: Request, res: Response) => {
|
||||
if (req.isUserCompleted) {
|
||||
return res.redirect(`${await getSiteURL()}/login/sso?token=${encodeURIComponent(req.providerAuthToken)}`);
|
||||
}
|
||||
|
||||
return res.redirect(`${await getSiteURL()}/signup/sso?token=${encodeURIComponent(req.providerAuthToken)}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return organization SAML SSO configuration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getSSOConfig = async (req: Request, res: Response) => {
|
||||
const organizationId = req.query.organizationId as string;
|
||||
|
||||
const data = await getSSOConfigHelper({
|
||||
organizationId: new Types.ObjectId(organizationId)
|
||||
});
|
||||
|
||||
return res.status(200).send(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update organization SAML SSO configuration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const updateSSOConfig = async (req: Request, res: Response) => {
|
||||
const {
|
||||
organizationId,
|
||||
authProvider,
|
||||
isActive,
|
||||
entryPoint,
|
||||
issuer,
|
||||
cert,
|
||||
audience
|
||||
} = req.body;
|
||||
|
||||
const plan = await EELicenseService.getPlan(organizationId);
|
||||
|
||||
if (!plan.samlSSO) return res.status(400).send({
|
||||
message: "Failed to update SAML SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
|
||||
});
|
||||
|
||||
interface PatchUpdate {
|
||||
authProvider?: string;
|
||||
isActive?: boolean;
|
||||
encryptedEntryPoint?: string;
|
||||
entryPointIV?: string;
|
||||
entryPointTag?: string;
|
||||
encryptedIssuer?: string;
|
||||
issuerIV?: string;
|
||||
issuerTag?: string;
|
||||
encryptedCert?: string;
|
||||
certIV?: string;
|
||||
certTag?: string;
|
||||
encryptedAudience?: string;
|
||||
audienceIV?: string;
|
||||
audienceTag?: string;
|
||||
}
|
||||
|
||||
const update: PatchUpdate = {};
|
||||
|
||||
if (authProvider) {
|
||||
update.authProvider = authProvider;
|
||||
}
|
||||
|
||||
if (isActive !== undefined) {
|
||||
update.isActive = isActive;
|
||||
}
|
||||
|
||||
const key = await BotOrgService.getSymmetricKey(
|
||||
new Types.ObjectId(organizationId)
|
||||
);
|
||||
|
||||
if (entryPoint) {
|
||||
const {
|
||||
ciphertext: encryptedEntryPoint,
|
||||
iv: entryPointIV,
|
||||
tag: entryPointTag
|
||||
} = client.encryptSymmetric(entryPoint, key);
|
||||
|
||||
update.encryptedEntryPoint = encryptedEntryPoint;
|
||||
update.entryPointIV = entryPointIV;
|
||||
update.entryPointTag = entryPointTag;
|
||||
}
|
||||
|
||||
if (issuer) {
|
||||
const {
|
||||
ciphertext: encryptedIssuer,
|
||||
iv: issuerIV,
|
||||
tag: issuerTag
|
||||
} = client.encryptSymmetric(issuer, key);
|
||||
|
||||
update.encryptedIssuer = encryptedIssuer;
|
||||
update.issuerIV = issuerIV;
|
||||
update.issuerTag = issuerTag;
|
||||
}
|
||||
|
||||
if (cert) {
|
||||
const {
|
||||
ciphertext: encryptedCert,
|
||||
iv: certIV,
|
||||
tag: certTag
|
||||
} = client.encryptSymmetric(cert, key);
|
||||
|
||||
update.encryptedCert = encryptedCert;
|
||||
update.certIV = certIV;
|
||||
update.certTag = certTag;
|
||||
}
|
||||
|
||||
if (audience) {
|
||||
const {
|
||||
ciphertext: encryptedAudience,
|
||||
iv: audienceIV,
|
||||
tag: audienceTag
|
||||
} = client.encryptSymmetric(audience, key);
|
||||
|
||||
update.encryptedAudience = encryptedAudience;
|
||||
update.audienceIV = audienceIV;
|
||||
update.audienceTag = audienceTag;
|
||||
}
|
||||
|
||||
const ssoConfig = await SSOConfig.findOneAndUpdate(
|
||||
{
|
||||
organization: new Types.ObjectId(organizationId)
|
||||
},
|
||||
update,
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
if (!ssoConfig) throw ResourceNotFoundError({
|
||||
message: "Failed to find SSO config to update"
|
||||
});
|
||||
|
||||
if (update.isActive !== undefined) {
|
||||
const membershipOrgs = await MembershipOrg.find({
|
||||
organization: new Types.ObjectId(organizationId)
|
||||
}).select("user");
|
||||
|
||||
if (update.isActive) {
|
||||
await User.updateMany(
|
||||
{
|
||||
_id: {
|
||||
$in: membershipOrgs.map((membershipOrg) => membershipOrg.user)
|
||||
}
|
||||
},
|
||||
{
|
||||
authProvider: ssoConfig.authProvider
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await User.updateMany(
|
||||
{
|
||||
_id: {
|
||||
$in: membershipOrgs.map((membershipOrg) => membershipOrg.user)
|
||||
}
|
||||
},
|
||||
{
|
||||
$unset: {
|
||||
authProvider: 1
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send(ssoConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create organization SAML SSO configuration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const createSSOConfig = async (req: Request, res: Response) => {
|
||||
const {
|
||||
organizationId,
|
||||
authProvider,
|
||||
isActive,
|
||||
entryPoint,
|
||||
issuer,
|
||||
cert,
|
||||
audience
|
||||
} = req.body;
|
||||
|
||||
const plan = await EELicenseService.getPlan(organizationId);
|
||||
|
||||
if (!plan.samlSSO) return res.status(400).send({
|
||||
message: "Failed to create SAML SSO configuration due to plan restriction. Upgrade plan to add SSO configuration."
|
||||
});
|
||||
|
||||
const key = await BotOrgService.getSymmetricKey(
|
||||
new Types.ObjectId(organizationId)
|
||||
);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedEntryPoint,
|
||||
iv: entryPointIV,
|
||||
tag: entryPointTag
|
||||
} = client.encryptSymmetric(entryPoint, key);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedIssuer,
|
||||
iv: issuerIV,
|
||||
tag: issuerTag
|
||||
} = client.encryptSymmetric(issuer, key);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedCert,
|
||||
iv: certIV,
|
||||
tag: certTag
|
||||
} = client.encryptSymmetric(cert, key);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedAudience,
|
||||
iv: audienceIV,
|
||||
tag: audienceTag
|
||||
} = client.encryptSymmetric(audience, key);
|
||||
|
||||
const ssoConfig = await new SSOConfig({
|
||||
organization: new Types.ObjectId(organizationId),
|
||||
authProvider,
|
||||
isActive,
|
||||
encryptedEntryPoint,
|
||||
entryPointIV,
|
||||
entryPointTag,
|
||||
encryptedIssuer,
|
||||
issuerIV,
|
||||
issuerTag,
|
||||
encryptedCert,
|
||||
certIV,
|
||||
certTag,
|
||||
encryptedAudience,
|
||||
audienceIV,
|
||||
audienceTag
|
||||
}).save();
|
||||
|
||||
return res.status(200).send(ssoConfig);
|
||||
}
|
13
backend/src/ee/controllers/v1/usersController.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { Request, Response } from "express";
|
||||
|
||||
/**
|
||||
* Return the ip address of the current user
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns
|
||||
*/
|
||||
export const getMyIp = (req: Request, res: Response) => {
|
||||
return res.status(200).send({
|
||||
ip: req.authData.authIP
|
||||
});
|
||||
}
|
@ -3,16 +3,20 @@ import { PipelineStage, Types } from "mongoose";
|
||||
import { Secret } from "../../../models";
|
||||
import {
|
||||
FolderVersion,
|
||||
IPType,
|
||||
ISecretVersion,
|
||||
Log,
|
||||
SecretSnapshot,
|
||||
SecretVersion,
|
||||
TFolderRootVersionSchema,
|
||||
TrustedIP
|
||||
} from "../../models";
|
||||
import { EESecretService } from "../../services";
|
||||
import { getLatestSecretVersionIds } from "../../helpers/secretVersion";
|
||||
import Folder, { TFolderSchema } from "../../../models/folder";
|
||||
import { searchByFolderId } from "../../../services/FolderService";
|
||||
import { EELicenseService } from "../../services";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "../../../utils/ip";
|
||||
|
||||
/**
|
||||
* Return secret snapshots for workspace with id [workspaceId]
|
||||
@ -588,3 +592,147 @@ export const getWorkspaceLogs = async (req: Request, res: Response) => {
|
||||
logs,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Return trusted ips for workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const getWorkspaceTrustedIps = async (req: Request, res: Response) => {
|
||||
const { workspaceId } = req.params;
|
||||
|
||||
const trustedIps = await TrustedIP.find({
|
||||
workspace: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
trustedIps
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a trusted ip to workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const addWorkspaceTrustedIp = async (req: Request, res: Response) => {
|
||||
const { workspaceId } = req.params;
|
||||
const {
|
||||
ipAddress: ip,
|
||||
comment,
|
||||
isActive
|
||||
} = req.body;
|
||||
|
||||
const plan = await EELicenseService.getPlan(req.workspace.organization.toString());
|
||||
|
||||
if (!plan.ipAllowlisting) return res.status(400).send({
|
||||
message: "Failed to add IP access range due to plan restriction. Upgrade plan to add IP access range."
|
||||
});
|
||||
|
||||
const isValidIPOrCidr = isValidIpOrCidr(ip);
|
||||
|
||||
if (!isValidIPOrCidr) return res.status(400).send({
|
||||
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
|
||||
});
|
||||
|
||||
const { ipAddress, type, prefix } = extractIPDetails(ip);
|
||||
|
||||
const trustedIp = await new TrustedIP({
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
ipAddress,
|
||||
type,
|
||||
prefix,
|
||||
isActive,
|
||||
comment,
|
||||
}).save();
|
||||
|
||||
return res.status(200).send({
|
||||
trustedIp
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update trusted ip with id [trustedIpId] workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const updateWorkspaceTrustedIp = async (req: Request, res: Response) => {
|
||||
const { workspaceId, trustedIpId } = req.params;
|
||||
const {
|
||||
ipAddress: ip,
|
||||
comment
|
||||
} = req.body;
|
||||
|
||||
const plan = await EELicenseService.getPlan(req.workspace.organization.toString());
|
||||
|
||||
if (!plan.ipAllowlisting) return res.status(400).send({
|
||||
message: "Failed to update IP access range due to plan restriction. Upgrade plan to update IP access range."
|
||||
});
|
||||
|
||||
const isValidIPOrCidr = isValidIpOrCidr(ip);
|
||||
|
||||
if (!isValidIPOrCidr) return res.status(400).send({
|
||||
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
|
||||
});
|
||||
|
||||
const { ipAddress, type, prefix } = extractIPDetails(ip);
|
||||
|
||||
const updateObject: {
|
||||
ipAddress: string;
|
||||
type: IPType;
|
||||
comment: string;
|
||||
prefix?: number;
|
||||
$unset?: {
|
||||
prefix: number;
|
||||
}
|
||||
} = {
|
||||
ipAddress,
|
||||
type,
|
||||
comment
|
||||
};
|
||||
|
||||
if (prefix !== undefined) {
|
||||
updateObject.prefix = prefix;
|
||||
} else {
|
||||
updateObject.$unset = { prefix: 1 };
|
||||
}
|
||||
|
||||
const trustedIp = await TrustedIP.findOneAndUpdate(
|
||||
{
|
||||
_id: new Types.ObjectId(trustedIpId),
|
||||
workspace: new Types.ObjectId(workspaceId),
|
||||
},
|
||||
updateObject,
|
||||
{
|
||||
new: true
|
||||
}
|
||||
);
|
||||
|
||||
return res.status(200).send({
|
||||
trustedIp
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete IP access range from workspace with id [workspaceId]
|
||||
* @param req
|
||||
* @param res
|
||||
*/
|
||||
export const deleteWorkspaceTrustedIp = async (req: Request, res: Response) => {
|
||||
const { workspaceId, trustedIpId } = req.params;
|
||||
|
||||
const plan = await EELicenseService.getPlan(req.workspace.organization.toString());
|
||||
|
||||
if (!plan.ipAllowlisting) return res.status(400).send({
|
||||
message: "Failed to delete IP access range due to plan restriction. Upgrade plan to delete IP access range."
|
||||
});
|
||||
|
||||
const trustedIp = await TrustedIP.findOneAndDelete({
|
||||
_id: new Types.ObjectId(trustedIpId),
|
||||
workspace: new Types.ObjectId(workspaceId)
|
||||
});
|
||||
|
||||
return res.status(200).send({
|
||||
trustedIp
|
||||
});
|
||||
}
|
72
backend/src/ee/helpers/organizations.ts
Normal file
@ -0,0 +1,72 @@
|
||||
import { Types } from "mongoose";
|
||||
import {
|
||||
SSOConfig
|
||||
} from "../models";
|
||||
import {
|
||||
BotOrgService
|
||||
} from "../../services";
|
||||
import { client } from "../../config";
|
||||
import { ValidationError } from "../../utils/errors";
|
||||
|
||||
export const getSSOConfigHelper = async ({
|
||||
organizationId,
|
||||
ssoConfigId
|
||||
}: {
|
||||
organizationId?: Types.ObjectId;
|
||||
ssoConfigId?: Types.ObjectId;
|
||||
}) => {
|
||||
|
||||
if (!organizationId && !ssoConfigId) throw ValidationError({
|
||||
message: "Getting SSO data requires either id of organization or SSO data"
|
||||
});
|
||||
|
||||
const ssoConfig = await SSOConfig.findOne({
|
||||
...(organizationId ? { organization: organizationId } : {}),
|
||||
...(ssoConfigId ? { _id: ssoConfigId } : {})
|
||||
});
|
||||
|
||||
if (!ssoConfig) throw new Error("Failed to find organization SSO data");
|
||||
|
||||
const key = await BotOrgService.getSymmetricKey(
|
||||
ssoConfig.organization
|
||||
);
|
||||
|
||||
const entryPoint = client.decryptSymmetric(
|
||||
ssoConfig.encryptedEntryPoint,
|
||||
key,
|
||||
ssoConfig.entryPointIV,
|
||||
ssoConfig.entryPointTag
|
||||
);
|
||||
|
||||
const issuer = client.decryptSymmetric(
|
||||
ssoConfig.encryptedIssuer,
|
||||
key,
|
||||
ssoConfig.issuerIV,
|
||||
ssoConfig.issuerTag
|
||||
);
|
||||
|
||||
const cert = client.decryptSymmetric(
|
||||
ssoConfig.encryptedCert,
|
||||
key,
|
||||
ssoConfig.certIV,
|
||||
ssoConfig.certTag
|
||||
);
|
||||
|
||||
const audience = client.decryptSymmetric(
|
||||
ssoConfig.encryptedAudience,
|
||||
key,
|
||||
ssoConfig.audienceIV,
|
||||
ssoConfig.audienceTag
|
||||
);
|
||||
|
||||
return ({
|
||||
_id: ssoConfig._id,
|
||||
organization: ssoConfig.organization,
|
||||
authProvider: ssoConfig.authProvider,
|
||||
isActive: ssoConfig.isActive,
|
||||
entryPoint,
|
||||
issuer,
|
||||
cert,
|
||||
audience
|
||||
});
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
import requireLicenseAuth from "./requireLicenseAuth";
|
||||
import requireSecretSnapshotAuth from "./requireSecretSnapshotAuth";
|
||||
|
||||
export {
|
||||
requireLicenseAuth,
|
||||
requireSecretSnapshotAuth,
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
|
||||
/**
|
||||
* Validate if organization hosting meets license requirements to
|
||||
* access a license-specific route.
|
||||
* @param {Object} obj
|
||||
* @param {String[]} obj.acceptedTiers
|
||||
*/
|
||||
const requireLicenseAuth = ({
|
||||
acceptedTiers,
|
||||
}: {
|
||||
acceptedTiers: string[];
|
||||
}) => {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
|
||||
} catch (err) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default requireLicenseAuth;
|
@ -66,6 +66,4 @@ const actionSchema = new Schema<IAction>(
|
||||
}
|
||||
);
|
||||
|
||||
const Action = model<IAction>("Action", actionSchema);
|
||||
|
||||
export default Action;
|
||||
export const Action = model<IAction>("Action", actionSchema);
|
@ -52,9 +52,7 @@ const folderRootVersionSchema = new Schema<TFolderRootVersionSchema>(
|
||||
}
|
||||
);
|
||||
|
||||
const FolderVersion = model<TFolderRootVersionSchema>(
|
||||
export const FolderVersion = model<TFolderRootVersionSchema>(
|
||||
"FolderVersion",
|
||||
folderRootVersionSchema
|
||||
);
|
||||
|
||||
export default FolderVersion;
|
||||
);
|
@ -1,18 +1,7 @@
|
||||
import SecretSnapshot, { ISecretSnapshot } from "./secretSnapshot";
|
||||
import SecretVersion, { ISecretVersion } from "./secretVersion";
|
||||
import FolderVersion, { TFolderRootVersionSchema } from "./folderVersion";
|
||||
import Log, { ILog } from "./log";
|
||||
import Action, { IAction } from "./action";
|
||||
|
||||
export {
|
||||
SecretSnapshot,
|
||||
ISecretSnapshot,
|
||||
SecretVersion,
|
||||
ISecretVersion,
|
||||
FolderVersion,
|
||||
TFolderRootVersionSchema,
|
||||
Log,
|
||||
ILog,
|
||||
Action,
|
||||
IAction,
|
||||
};
|
||||
export * from "./secretSnapshot";
|
||||
export * from "./secretVersion";
|
||||
export * from "./folderVersion";
|
||||
export * from "./log";
|
||||
export * from "./action";
|
||||
export * from "./ssoConfig";
|
||||
export * from "./trustedIp";
|
@ -63,11 +63,10 @@ const logSchema = new Schema<ILog>(
|
||||
ipAddress: {
|
||||
type: String,
|
||||
},
|
||||
}, {
|
||||
timestamps: true,
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const Log = model<ILog>("Log", logSchema);
|
||||
|
||||
export default Log;
|
||||
export const Log = model<ILog>("Log", logSchema);
|
@ -46,9 +46,7 @@ const secretSnapshotSchema = new Schema<ISecretSnapshot>(
|
||||
}
|
||||
);
|
||||
|
||||
const SecretSnapshot = model<ISecretSnapshot>(
|
||||
export const SecretSnapshot = model<ISecretSnapshot>(
|
||||
"SecretSnapshot",
|
||||
secretSnapshotSchema
|
||||
);
|
||||
|
||||
export default SecretSnapshot;
|
||||
);
|
@ -124,9 +124,7 @@ const secretVersionSchema = new Schema<ISecretVersion>(
|
||||
}
|
||||
);
|
||||
|
||||
const SecretVersion = model<ISecretVersion>(
|
||||
export const SecretVersion = model<ISecretVersion>(
|
||||
"SecretVersion",
|
||||
secretVersionSchema
|
||||
);
|
||||
|
||||
export default SecretVersion;
|
||||
);
|
80
backend/src/ee/models/ssoConfig.ts
Normal file
@ -0,0 +1,80 @@
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
|
||||
export interface ISSOConfig {
|
||||
organization: Types.ObjectId;
|
||||
authProvider: "okta-saml"
|
||||
isActive: boolean;
|
||||
encryptedEntryPoint: string;
|
||||
entryPointIV: string;
|
||||
entryPointTag: string;
|
||||
encryptedIssuer: string;
|
||||
issuerIV: string;
|
||||
issuerTag: string;
|
||||
encryptedCert: string;
|
||||
certIV: string;
|
||||
certTag: string;
|
||||
encryptedAudience: string;
|
||||
audienceIV: string;
|
||||
audienceTag: string;
|
||||
}
|
||||
|
||||
const ssoConfigSchema = new Schema<ISSOConfig>(
|
||||
{
|
||||
organization: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Organization"
|
||||
},
|
||||
authProvider: {
|
||||
type: String,
|
||||
enum: [
|
||||
"okta-saml"
|
||||
],
|
||||
required: true
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
required: true
|
||||
},
|
||||
encryptedEntryPoint: {
|
||||
type: String
|
||||
},
|
||||
entryPointIV: {
|
||||
type: String
|
||||
},
|
||||
entryPointTag: {
|
||||
type: String
|
||||
},
|
||||
encryptedIssuer: {
|
||||
type: String
|
||||
},
|
||||
issuerIV: {
|
||||
type: String
|
||||
},
|
||||
issuerTag: {
|
||||
type: String
|
||||
},
|
||||
encryptedCert: {
|
||||
type: String
|
||||
},
|
||||
certIV: {
|
||||
type: String
|
||||
},
|
||||
certTag: {
|
||||
type: String
|
||||
},
|
||||
encryptedAudience: {
|
||||
type: String
|
||||
},
|
||||
audienceIV: {
|
||||
type: String
|
||||
},
|
||||
audienceTag: {
|
||||
type: String
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
export const SSOConfig = model<ISSOConfig>("SSOConfig", ssoConfigSchema);
|
54
backend/src/ee/models/trustedIp.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
|
||||
export enum IPType {
|
||||
IPV4 = "ipv4",
|
||||
IPV6 = "ipv6"
|
||||
}
|
||||
|
||||
export interface ITrustedIP {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
ipAddress: string;
|
||||
type: "ipv4" | "ipv6", // either IPv4/IPv6 address or network IPv4/IPv6 address
|
||||
isActive: boolean;
|
||||
comment: string;
|
||||
prefix?: number; // CIDR
|
||||
}
|
||||
|
||||
const trustedIpSchema = new Schema<ITrustedIP>(
|
||||
{
|
||||
workspace: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Workspace",
|
||||
required: true
|
||||
},
|
||||
ipAddress: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
enum: [
|
||||
IPType.IPV4,
|
||||
IPType.IPV6
|
||||
],
|
||||
required: true
|
||||
},
|
||||
prefix: {
|
||||
type: Number,
|
||||
required: false
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
required: true
|
||||
},
|
||||
comment: {
|
||||
type: String
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true
|
||||
}
|
||||
);
|
||||
|
||||
export const TrustedIP = model<ITrustedIP>("TrustedIP", trustedIpSchema);
|
@ -1,6 +1,8 @@
|
||||
import secret from "./secret";
|
||||
import secretSnapshot from "./secretSnapshot";
|
||||
import organizations from "./organizations";
|
||||
import sso from "./sso";
|
||||
import users from "./users";
|
||||
import workspace from "./workspace";
|
||||
import action from "./action";
|
||||
import cloudProducts from "./cloudProducts";
|
||||
@ -9,6 +11,8 @@ export {
|
||||
secret,
|
||||
secretSnapshot,
|
||||
organizations,
|
||||
sso,
|
||||
users,
|
||||
workspace,
|
||||
action,
|
||||
cloudProducts,
|
||||
|
121
backend/src/ee/routes/v1/sso.ts
Normal file
@ -0,0 +1,121 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import passport from "passport";
|
||||
import {
|
||||
requireAuth,
|
||||
requireOrganizationAuth,
|
||||
validateRequest,
|
||||
} from "../../../middleware";
|
||||
import { body, query } from "express-validator";
|
||||
import { ssoController } from "../../controllers/v1";
|
||||
import { authLimiter } from "../../../helpers/rateLimiter";
|
||||
import {
|
||||
ACCEPTED,
|
||||
ADMIN,
|
||||
OWNER
|
||||
} from "../../../variables";
|
||||
|
||||
router.get(
|
||||
"/redirect/google",
|
||||
authLimiter,
|
||||
(req, res, next) => {
|
||||
passport.authenticate("google", {
|
||||
scope: ["profile", "email"],
|
||||
session: false,
|
||||
...(req.query.callback_port ? {
|
||||
state: req.query.callback_port as string
|
||||
} : {})
|
||||
})(req, res, next);
|
||||
}
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/google",
|
||||
passport.authenticate("google", {
|
||||
failureRedirect: "/login/provider/error",
|
||||
session: false
|
||||
}),
|
||||
ssoController.redirectSSO
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/redirect/saml2/:ssoIdentifier",
|
||||
authLimiter,
|
||||
(req, res, next) => {
|
||||
const options = {
|
||||
failureRedirect: "/",
|
||||
additionalParams: {
|
||||
RelayState: req.query.callback_port ?? ""
|
||||
},
|
||||
};
|
||||
passport.authenticate("saml", options)(req, res, next);
|
||||
}
|
||||
);
|
||||
|
||||
router.post("/saml2/:ssoIdentifier",
|
||||
passport.authenticate("saml", {
|
||||
failureRedirect: "/login/provider/error",
|
||||
failureFlash: true,
|
||||
session: false
|
||||
}),
|
||||
ssoController.redirectSSO
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/config",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
locationOrganizationId: "query"
|
||||
}),
|
||||
query("organizationId").exists().trim(),
|
||||
validateRequest,
|
||||
ssoController.getSSOConfig
|
||||
);
|
||||
|
||||
router.post(
|
||||
"/config",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
locationOrganizationId: "body"
|
||||
}),
|
||||
body("organizationId").exists().trim(),
|
||||
body("authProvider").exists().isString(),
|
||||
body("isActive").exists().isBoolean(),
|
||||
body("entryPoint").exists().isString(),
|
||||
body("issuer").exists().isString(),
|
||||
body("cert").exists().isString(),
|
||||
body("audience").exists().isString(),
|
||||
validateRequest,
|
||||
ssoController.createSSOConfig
|
||||
);
|
||||
|
||||
router.patch(
|
||||
"/config",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
}),
|
||||
requireOrganizationAuth({
|
||||
acceptedRoles: [OWNER, ADMIN],
|
||||
acceptedStatuses: [ACCEPTED],
|
||||
locationOrganizationId: "body"
|
||||
}),
|
||||
body("organizationId").exists().trim(),
|
||||
body("authProvider").optional().isString(),
|
||||
body("isActive").optional().isBoolean(),
|
||||
body("entryPoint").optional().isString(),
|
||||
body("issuer").optional().isString(),
|
||||
body("cert").optional().isString(),
|
||||
body("audience").optional().isString(),
|
||||
validateRequest,
|
||||
ssoController.updateSSOConfig
|
||||
);
|
||||
|
||||
export default router;
|
17
backend/src/ee/routes/v1/users.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import {
|
||||
requireAuth
|
||||
} from "../../../middleware";
|
||||
import { AUTH_MODE_API_KEY, AUTH_MODE_JWT } from "../../../variables";
|
||||
import { usersController } from "../../controllers/v1";
|
||||
|
||||
router.get(
|
||||
"/me/ip",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY],
|
||||
}),
|
||||
usersController.getMyIp
|
||||
);
|
||||
|
||||
export default router;
|
@ -6,13 +6,18 @@ import {
|
||||
validateRequest,
|
||||
} from "../../../middleware";
|
||||
import { body, param, query } from "express-validator";
|
||||
import { ADMIN, MEMBER } from "../../../variables";
|
||||
import {
|
||||
ADMIN,
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_JWT,
|
||||
MEMBER
|
||||
} from "../../../variables";
|
||||
import { workspaceController } from "../../controllers/v1";
|
||||
|
||||
router.get(
|
||||
"/:workspaceId/secret-snapshots",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt", "apiKey"],
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -30,7 +35,7 @@ router.get(
|
||||
router.get(
|
||||
"/:workspaceId/secret-snapshots/count",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt"],
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -46,7 +51,7 @@ router.get(
|
||||
router.post(
|
||||
"/:workspaceId/secret-snapshots/rollback",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt", "apiKey"],
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -63,7 +68,7 @@ router.post(
|
||||
router.get(
|
||||
"/:workspaceId/logs",
|
||||
requireAuth({
|
||||
acceptedAuthModes: ["jwt", "apiKey"],
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
@ -79,4 +84,66 @@ router.get(
|
||||
workspaceController.getWorkspaceLogs
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/:workspaceId/trusted-ips",
|
||||
param("workspaceId").exists().isString().trim(),
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "params",
|
||||
}),
|
||||
workspaceController.getWorkspaceTrustedIps
|
||||
);
|
||||
|
||||
router.post(
|
||||
"/:workspaceId/trusted-ips",
|
||||
param("workspaceId").exists().isString().trim(),
|
||||
body("ipAddress").exists().isString().trim(),
|
||||
body("comment").default("").isString().trim(),
|
||||
body("isActive").exists().isBoolean(),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN],
|
||||
locationWorkspaceId: "params",
|
||||
}),
|
||||
workspaceController.addWorkspaceTrustedIp
|
||||
);
|
||||
|
||||
router.patch(
|
||||
"/:workspaceId/trusted-ips/:trustedIpId",
|
||||
param("workspaceId").exists().isString().trim(),
|
||||
param("trustedIpId").exists().isString().trim(),
|
||||
body("ipAddress").isString().trim().default(""),
|
||||
body("comment").default("").isString().trim(),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN],
|
||||
locationWorkspaceId: "params",
|
||||
}),
|
||||
workspaceController.updateWorkspaceTrustedIp
|
||||
);
|
||||
|
||||
router.delete(
|
||||
"/:workspaceId/trusted-ips/:trustedIpId",
|
||||
param("workspaceId").exists().isString().trim(),
|
||||
param("trustedIpId").exists().isString().trim(),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN],
|
||||
locationWorkspaceId: "params",
|
||||
}),
|
||||
workspaceController.deleteWorkspaceTrustedIp
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
@ -26,11 +26,13 @@ interface FeatureSet {
|
||||
environmentsUsed: number;
|
||||
secretVersioning: boolean;
|
||||
pitRecovery: boolean;
|
||||
ipAllowlisting: boolean;
|
||||
rbac: boolean;
|
||||
customRateLimits: boolean;
|
||||
customAlerts: boolean;
|
||||
auditLogs: boolean;
|
||||
status: 'incomplete' | 'incomplete_expired' | 'trialing' | 'active' | 'past_due' | 'canceled' | 'unpaid' | null;
|
||||
samlSSO: boolean;
|
||||
status: "incomplete" | "incomplete_expired" | "trialing" | "active" | "past_due" | "canceled" | "unpaid" | null;
|
||||
trial_end: number | null;
|
||||
has_used_trial: boolean;
|
||||
}
|
||||
@ -59,10 +61,12 @@ class EELicenseService {
|
||||
environmentsUsed: 0,
|
||||
secretVersioning: true,
|
||||
pitRecovery: false,
|
||||
ipAllowlisting: false,
|
||||
rbac: true,
|
||||
customRateLimits: true,
|
||||
customAlerts: true,
|
||||
auditLogs: false,
|
||||
samlSSO: false,
|
||||
status: null,
|
||||
trial_end: null,
|
||||
has_used_trial: true
|
||||
|
@ -16,7 +16,6 @@ import { client, getEncryptionKey, getRootEncryptionKey } from "../config";
|
||||
import { InternalServerError } from "../utils/errors";
|
||||
import Folder from "../models/folder";
|
||||
import { getFolderByPath } from "../services/FolderService";
|
||||
import { environment } from "../routes/v2";
|
||||
|
||||
/**
|
||||
* Create an inactive bot with name [name] for workspace with id [workspaceId]
|
||||
|
134
backend/src/helpers/botOrg.ts
Normal file
@ -0,0 +1,134 @@
|
||||
import { Types } from "mongoose";
|
||||
import { client, getEncryptionKey, getRootEncryptionKey } from "../config";
|
||||
import { BotOrg } from "../models";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "../utils/crypto";
|
||||
import {
|
||||
ALGORITHM_AES_256_GCM,
|
||||
ENCODING_SCHEME_BASE64,
|
||||
ENCODING_SCHEME_UTF8
|
||||
} from "../variables";
|
||||
import { InternalServerError } from "../utils/errors";
|
||||
import { encryptSymmetric128BitHexKeyUTF8, generateKeyPair } from "../utils/crypto";
|
||||
|
||||
/**
|
||||
* Create a bot with name [name] for organization with id [organizationId]
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.name - name of bot
|
||||
* @param {String} obj.organizationId - id of organization that bot belongs to
|
||||
*/
|
||||
export const createBotOrg = async ({
|
||||
name,
|
||||
organizationId,
|
||||
}: {
|
||||
name: string;
|
||||
organizationId: Types.ObjectId;
|
||||
}) => {
|
||||
const encryptionKey = await getEncryptionKey();
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
|
||||
const { publicKey, privateKey } = generateKeyPair();
|
||||
const key = client.createSymmetricKey();
|
||||
|
||||
if (rootEncryptionKey) {
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag
|
||||
} = client.encryptSymmetric(privateKey, rootEncryptionKey);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag
|
||||
} = client.encryptSymmetric(key, rootEncryptionKey);
|
||||
|
||||
return await new BotOrg({
|
||||
name,
|
||||
organization: organizationId,
|
||||
publicKey,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_BASE64,
|
||||
encryptedPrivateKey,
|
||||
privateKeyIV,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_BASE64
|
||||
}).save();
|
||||
} else if (encryptionKey) {
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag
|
||||
} = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: privateKey,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag
|
||||
} = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: key,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
return await new BotOrg({
|
||||
name,
|
||||
organization: organizationId,
|
||||
publicKey,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_UTF8,
|
||||
encryptedPrivateKey,
|
||||
privateKeyIV,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_UTF8
|
||||
}).save();
|
||||
}
|
||||
|
||||
throw InternalServerError({
|
||||
message: "Failed to create new organization bot due to missing encryption key",
|
||||
});
|
||||
};
|
||||
|
||||
export const getSymmetricKeyHelper = async (organizationId: Types.ObjectId) => {
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
const encryptionKey = await getEncryptionKey();
|
||||
|
||||
const botOrg = await BotOrg.findOne({
|
||||
organization: organizationId
|
||||
});
|
||||
|
||||
if (!botOrg) throw new Error("Failed to find organization bot");
|
||||
|
||||
if (rootEncryptionKey && botOrg.symmetricKeyKeyEncoding == ENCODING_SCHEME_BASE64) {
|
||||
const key = client.decryptSymmetric(
|
||||
botOrg.encryptedSymmetricKey,
|
||||
rootEncryptionKey,
|
||||
botOrg.symmetricKeyIV,
|
||||
botOrg.symmetricKeyTag
|
||||
);
|
||||
|
||||
return key;
|
||||
} else if (encryptionKey && botOrg.symmetricKeyKeyEncoding === ENCODING_SCHEME_UTF8) {
|
||||
const key = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: botOrg.encryptedSymmetricKey,
|
||||
iv: botOrg.symmetricKeyIV,
|
||||
tag: botOrg.symmetricKeyTag,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
throw InternalServerError({
|
||||
message: "Failed to match encryption key with organization bot symmetric key encoding"
|
||||
});
|
||||
}
|
@ -9,6 +9,7 @@ import {
|
||||
INTEGRATION_VERCEL,
|
||||
} from "../variables";
|
||||
import { UnauthorizedRequestError } from "../utils/errors";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
interface Update {
|
||||
workspace: string;
|
||||
@ -115,53 +116,60 @@ export const syncIntegrationsHelper = async ({
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
}) => {
|
||||
const integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
...(environment
|
||||
? {
|
||||
try {
|
||||
const integrations = await Integration.find({
|
||||
workspace: workspaceId,
|
||||
...(environment
|
||||
? {
|
||||
environment,
|
||||
}
|
||||
: {}),
|
||||
isActive: true,
|
||||
app: { $ne: null },
|
||||
});
|
||||
|
||||
// for each workspace integration, sync/push secrets
|
||||
// to that integration
|
||||
for await (const integration of integrations) {
|
||||
// get workspace, environment (shared) secrets
|
||||
const secrets = await BotService.getSecrets({
|
||||
workspaceId: integration.workspace,
|
||||
environment: integration.environment,
|
||||
secretPath: integration.secretPath,
|
||||
isActive: true,
|
||||
app: { $ne: null },
|
||||
});
|
||||
|
||||
// get workspace, environment (shared) secrets comments
|
||||
const secretComments = await BotService.getSecretComments({
|
||||
workspaceId: integration.workspace,
|
||||
environment: integration.environment,
|
||||
secretPath: integration.secretPath,
|
||||
})
|
||||
// for each workspace integration, sync/push secrets
|
||||
// to that integration
|
||||
for await (const integration of integrations) {
|
||||
// get workspace, environment (shared) secrets
|
||||
const secrets = await BotService.getSecrets({
|
||||
workspaceId: integration.workspace,
|
||||
environment: integration.environment,
|
||||
secretPath: integration.secretPath,
|
||||
});
|
||||
|
||||
const integrationAuth = await IntegrationAuth.findById(
|
||||
integration.integrationAuth
|
||||
);
|
||||
if (!integrationAuth) throw new Error("Failed to find integration auth");
|
||||
// get workspace, environment (shared) secrets comments
|
||||
const secretComments = await BotService.getSecretComments({
|
||||
workspaceId: integration.workspace,
|
||||
environment: integration.environment,
|
||||
secretPath: integration.secretPath,
|
||||
})
|
||||
|
||||
// get integration auth access token
|
||||
const access = await getIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integration.integrationAuth,
|
||||
});
|
||||
const integrationAuth = await IntegrationAuth.findById(
|
||||
integration.integrationAuth
|
||||
);
|
||||
|
||||
// sync secrets to integration
|
||||
await syncSecrets({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessId: access.accessId === undefined ? null : access.accessId,
|
||||
accessToken: access.accessToken,
|
||||
secretComments
|
||||
});
|
||||
if (!integrationAuth) throw new Error("Failed to find integration auth");
|
||||
|
||||
// get integration auth access token
|
||||
const access = await getIntegrationAuthAccessHelper({
|
||||
integrationAuthId: integration.integrationAuth,
|
||||
});
|
||||
|
||||
// sync secrets to integration
|
||||
await syncSecrets({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessId: access.accessId === undefined ? null : access.accessId,
|
||||
accessToken: access.accessToken,
|
||||
secretComments
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
Sentry.captureException(err);
|
||||
console.log(`syncIntegrationsHelper: failed with [workspaceId=${workspaceId}] [environment=${environment}]`, err) // eslint-disable-line no-use-before-define
|
||||
throw err
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -14,6 +14,9 @@ import {
|
||||
licenseKeyRequest,
|
||||
licenseServerKeyRequest,
|
||||
} from "../config/request";
|
||||
import {
|
||||
createBotOrg
|
||||
} from "./botOrg";
|
||||
|
||||
/**
|
||||
* Create an organization with name [name]
|
||||
@ -29,6 +32,7 @@ export const createOrganization = async ({
|
||||
name: string;
|
||||
email: string;
|
||||
}) => {
|
||||
|
||||
const licenseServerKey = await getLicenseServerKey();
|
||||
let organization;
|
||||
|
||||
@ -52,6 +56,12 @@ export const createOrganization = async ({
|
||||
}).save();
|
||||
}
|
||||
|
||||
// initialize bot for organization
|
||||
await createBotOrg({
|
||||
name,
|
||||
organizationId: organization._id
|
||||
});
|
||||
|
||||
return organization;
|
||||
};
|
||||
|
||||
|
@ -44,6 +44,7 @@ import { EELogService, EESecretService } from "../ee/services";
|
||||
import { getAuthDataPayloadIdObj, getAuthDataPayloadUserObj } from "../utils/auth";
|
||||
import { getFolderIdFromServiceToken } from "../services/FolderService";
|
||||
import picomatch from "picomatch";
|
||||
import path from "path";
|
||||
|
||||
export const isValidScope = (
|
||||
authPayload: IServiceTokenData,
|
||||
@ -60,6 +61,13 @@ export const isValidScope = (
|
||||
return Boolean(validScope);
|
||||
};
|
||||
|
||||
export function containsGlobPatterns(secretPath: string) {
|
||||
const globChars = ["*", "?", "[", "]", "{", "}", "**"];
|
||||
const normalizedPath = path.normalize(secretPath);
|
||||
return globChars.some(char => normalizedPath.includes(char));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns an object containing secret [secret] but with its value, key, comment decrypted.
|
||||
*
|
||||
|
@ -5,6 +5,10 @@ import {
|
||||
Secret,
|
||||
Workspace,
|
||||
} from "../models";
|
||||
import {
|
||||
IPType,
|
||||
TrustedIP
|
||||
} from "../ee/models";
|
||||
import { createBot } from "../helpers/bot";
|
||||
import { EELicenseService } from "../ee/services";
|
||||
import { SecretService } from "../services";
|
||||
@ -40,6 +44,26 @@ export const createWorkspace = async ({
|
||||
await SecretService.createSecretBlindIndexData({
|
||||
workspaceId: workspace._id,
|
||||
});
|
||||
|
||||
// initialize default trusted IPv4 CIDR - 0.0.0.0/0
|
||||
await new TrustedIP({
|
||||
workspace: workspace._id,
|
||||
ipAddress: "0.0.0.0",
|
||||
type: IPType.IPV4,
|
||||
prefix: 0,
|
||||
isActive: true,
|
||||
comment: ""
|
||||
}).save()
|
||||
|
||||
// initialize default trusted IPv6 CIDR - ::/0
|
||||
await new TrustedIP({
|
||||
workspace: workspace._id,
|
||||
ipAddress: "::",
|
||||
type: IPType.IPV6,
|
||||
prefix: 0,
|
||||
isActive: true,
|
||||
comment: ""
|
||||
});
|
||||
|
||||
await EELicenseService.refreshPlan(organizationId);
|
||||
|
||||
|
@ -19,9 +19,11 @@ import {
|
||||
action as eeActionRouter,
|
||||
cloudProducts as eeCloudProductsRouter,
|
||||
organizations as eeOrganizationsRouter,
|
||||
sso as eeSSORouter,
|
||||
secret as eeSecretRouter,
|
||||
secretSnapshot as eeSecretSnapshotRouter,
|
||||
workspace as eeWorkspaceRouter
|
||||
users as eeUsersRouter,
|
||||
workspace as eeWorkspaceRouter,
|
||||
} from "./ee/routes/v1";
|
||||
import {
|
||||
auth as v1AuthRouter,
|
||||
@ -34,6 +36,7 @@ import {
|
||||
membership as v1MembershipRouter,
|
||||
organization as v1OrganizationRouter,
|
||||
password as v1PasswordRouter,
|
||||
secretImport as v1SecretImportRouter,
|
||||
secret as v1SecretRouter,
|
||||
secretScanning as v1SecretScanningRouter,
|
||||
secretsFolder as v1SecretsFolder,
|
||||
@ -41,22 +44,21 @@ import {
|
||||
signup as v1SignupRouter,
|
||||
userAction as v1UserActionRouter,
|
||||
user as v1UserRouter,
|
||||
workspace as v1WorkspaceRouter,
|
||||
webhooks as v1WebhooksRouter,
|
||||
secretImport as v1SecretImportRouter
|
||||
workspace as v1WorkspaceRouter
|
||||
} from "./routes/v1";
|
||||
import {
|
||||
auth as v2AuthRouter,
|
||||
environment as v2EnvironmentRouter,
|
||||
organizations as v2OrganizationsRouter,
|
||||
signup as v2SignupRouter,
|
||||
users as v2UsersRouter,
|
||||
workspace as v2WorkspaceRouter,
|
||||
secret as v2SecretRouter, // begin to phase out
|
||||
secrets as v2SecretsRouter,
|
||||
serviceTokenData as v2ServiceTokenDataRouter,
|
||||
serviceAccounts as v2ServiceAccountsRouter,
|
||||
environment as v2EnvironmentRouter,
|
||||
tags as v2TagsRouter
|
||||
serviceTokenData as v2ServiceTokenDataRouter,
|
||||
signup as v2SignupRouter,
|
||||
tags as v2TagsRouter,
|
||||
users as v2UsersRouter,
|
||||
workspace as v2WorkspaceRouter,
|
||||
} from "./routes/v2";
|
||||
import {
|
||||
auth as v3AuthRouter,
|
||||
@ -81,6 +83,7 @@ const main = async () => {
|
||||
const app = express();
|
||||
app.enable("trust proxy");
|
||||
app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: false }));
|
||||
app.use(cookieParser());
|
||||
app.use(
|
||||
cors({
|
||||
@ -127,9 +130,11 @@ const main = async () => {
|
||||
// (EE) routes
|
||||
app.use("/api/v1/secret", eeSecretRouter);
|
||||
app.use("/api/v1/secret-snapshot", eeSecretSnapshotRouter);
|
||||
app.use("/api/v1/users", eeUsersRouter);
|
||||
app.use("/api/v1/workspace", eeWorkspaceRouter);
|
||||
app.use("/api/v1/action", eeActionRouter);
|
||||
app.use("/api/v1/organizations", eeOrganizationsRouter);
|
||||
app.use("/api/v1/sso", eeSSORouter);
|
||||
app.use("/api/v1/cloud-products", eeCloudProductsRouter);
|
||||
|
||||
// v1 routes (default)
|
||||
|
@ -1,16 +1,21 @@
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import { standardRequest } from "../config/request";
|
||||
import {
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_BITBUCKET_API_URL,
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_CHECKLY_API_URL,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CLOUDFLARE_PAGES_API_URL,
|
||||
INTEGRATION_CLOUD_66,
|
||||
INTEGRATION_CLOUD_66_API_URL,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_CODEFRESH_API_URL,
|
||||
INTEGRATION_DIGITAL_OCEAN_API_URL,
|
||||
INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_GITHUB,
|
||||
@ -22,21 +27,26 @@ import {
|
||||
INTEGRATION_LARAVELFORGE_API_URL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_NORTHFLANK,
|
||||
INTEGRATION_NORTHFLANK_API_URL,
|
||||
INTEGRATION_RAILWAY,
|
||||
INTEGRATION_RAILWAY_API_URL,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_SUPABASE,
|
||||
INTEGRATION_SUPABASE_API_URL,
|
||||
INTEGRATION_TERRAFORM_CLOUD,
|
||||
INTEGRATION_TERRAFORM_CLOUD_API_URL,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_CODEFRESH_API_URL,
|
||||
INTEGRATION_WINDMILL,
|
||||
INTEGRATION_WINDMILL_API_URL,
|
||||
} from "../variables";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { standardRequest } from "../config/request";
|
||||
|
||||
interface App {
|
||||
name: string;
|
||||
@ -58,11 +68,13 @@ const getApps = async ({
|
||||
accessToken,
|
||||
accessId,
|
||||
teamId,
|
||||
workspaceSlug,
|
||||
}: {
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
accessId?: string;
|
||||
teamId?: string;
|
||||
workspaceSlug?: string;
|
||||
}) => {
|
||||
let apps: App[] = [];
|
||||
switch (integrationAuth.integration) {
|
||||
@ -128,6 +140,12 @@ const getApps = async ({
|
||||
serverId: accessId
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TERRAFORM_CLOUD:
|
||||
apps = await getAppsTerraformCloud({
|
||||
accessToken,
|
||||
workspacesId: accessId,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TRAVISCI:
|
||||
apps = await getAppsTravisCI({
|
||||
accessToken,
|
||||
@ -147,7 +165,18 @@ const getApps = async ({
|
||||
apps = await getAppsCloudflarePages({
|
||||
accessToken,
|
||||
accountId: accessId
|
||||
})
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NORTHFLANK:
|
||||
apps = await getAppsNorthflank({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_BITBUCKET:
|
||||
apps = await getAppsBitBucket({
|
||||
accessToken,
|
||||
workspaceSlug
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CODEFRESH:
|
||||
apps = await getAppsCodefresh({
|
||||
@ -156,6 +185,16 @@ const getApps = async ({
|
||||
break;
|
||||
case INTEGRATION_WINDMILL:
|
||||
apps = await getAppsWindmill({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM:
|
||||
apps = await getAppsDigitalOceanAppPlatform({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CLOUD_66:
|
||||
apps = await getAppsCloud66({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
@ -546,6 +585,43 @@ const getAppsTravisCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
return apps;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of projects for Terraform Cloud integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for Terraform Cloud API
|
||||
* @param {String} obj.workspacesId - workspace id of Terraform Cloud projects
|
||||
* @returns {Object[]} apps - names and ids of Terraform Cloud projects
|
||||
* @returns {String} apps.name - name of Terraform Cloud projects
|
||||
*/
|
||||
const getAppsTerraformCloud = async ({
|
||||
accessToken,
|
||||
workspacesId
|
||||
}: {
|
||||
accessToken: string;
|
||||
workspacesId?: string;
|
||||
}) => {
|
||||
const res = (
|
||||
await standardRequest.get(`${INTEGRATION_TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${workspacesId}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json",
|
||||
},
|
||||
})
|
||||
).data.data;
|
||||
|
||||
const apps = []
|
||||
|
||||
const appsObj = {
|
||||
name: res?.attributes.name,
|
||||
appId: res?.id,
|
||||
};
|
||||
|
||||
apps.push(appsObj)
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return list of repositories for GitLab integration
|
||||
* @param {Object} obj
|
||||
@ -735,6 +811,113 @@ const getAppsCloudflarePages = async ({
|
||||
return apps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of repositories for the BitBucket integration based on provided BitBucket workspace
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for BitBucket API
|
||||
* @param {String} obj.workspaceSlug - Workspace identifier for fetching BitBucket repositories
|
||||
* @returns {Object[]} apps - BitBucket repositories
|
||||
* @returns {String} apps.name - name of BitBucket repository
|
||||
*/
|
||||
const getAppsBitBucket = async ({
|
||||
accessToken,
|
||||
workspaceSlug,
|
||||
}: {
|
||||
accessToken: string;
|
||||
workspaceSlug?: string;
|
||||
}) => {
|
||||
interface RepositoriesResponse {
|
||||
size: number;
|
||||
page: number;
|
||||
pageLen: number;
|
||||
next: string;
|
||||
previous: string;
|
||||
values: Array<Repository>;
|
||||
}
|
||||
|
||||
interface Repository {
|
||||
type: string;
|
||||
uuid: string;
|
||||
name: string;
|
||||
is_private: boolean;
|
||||
created_on: string;
|
||||
updated_on: string;
|
||||
}
|
||||
|
||||
if (!workspaceSlug) {
|
||||
return []
|
||||
}
|
||||
|
||||
const repositories: Repository[] = [];
|
||||
let hasNextPage = true;
|
||||
let repositoriesUrl = `${INTEGRATION_BITBUCKET_API_URL}/2.0/repositories/${workspaceSlug}`
|
||||
|
||||
while (hasNextPage) {
|
||||
const { data }: { data: RepositoriesResponse } = await standardRequest.get(
|
||||
repositoriesUrl,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (data?.values.length > 0) {
|
||||
data.values.forEach((repository) => {
|
||||
repositories.push(repository)
|
||||
})
|
||||
}
|
||||
|
||||
if (data.next) {
|
||||
repositoriesUrl = data.next
|
||||
} else {
|
||||
hasNextPage = false
|
||||
}
|
||||
}
|
||||
|
||||
const apps = repositories.map((repository) => {
|
||||
return {
|
||||
name: repository.name,
|
||||
appId: repository.uuid,
|
||||
};
|
||||
});
|
||||
return apps;
|
||||
}
|
||||
|
||||
/** Return list of projects for Northflank integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - access token for Northflank API
|
||||
* @returns {Object[]} apps - names of Northflank apps
|
||||
* @returns {String} apps.name - name of Northflank app
|
||||
*/
|
||||
const getAppsNorthflank = async ({ accessToken }: { accessToken: string }) => {
|
||||
const {
|
||||
data: {
|
||||
data: {
|
||||
projects
|
||||
}
|
||||
}
|
||||
} = await standardRequest.get(
|
||||
`${INTEGRATION_NORTHFLANK_API_URL}/v1/projects`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const apps = projects.map((a: any) => {
|
||||
return {
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
};
|
||||
});
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of projects for Supabase integration
|
||||
* @param {Object} obj
|
||||
@ -742,7 +925,6 @@ const getAppsCloudflarePages = async ({
|
||||
* @returns {Object[]} apps - names of Supabase apps
|
||||
* @returns {String} apps.name - name of Supabase app
|
||||
*/
|
||||
|
||||
const getAppsCodefresh = async ({
|
||||
accessToken,
|
||||
}: {
|
||||
@ -862,9 +1044,109 @@ const getAppsWindmill = async ({ accessToken }: { accessToken: string }) => {
|
||||
appId: a.id,
|
||||
};
|
||||
});
|
||||
|
||||
return apps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of applications for DigitalOcean App Platform integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - personal access token for DigitalOcean
|
||||
* @returns {Object[]} apps - names of DigitalOcean apps
|
||||
* @returns {String} apps.name - name of DigitalOcean app
|
||||
* @returns {String} apps.appId - id of DigitalOcean app
|
||||
*/
|
||||
const getAppsDigitalOceanAppPlatform = async ({ accessToken }: { accessToken: string }) => {
|
||||
interface DigitalOceanApp {
|
||||
id: string;
|
||||
owner_uuid: string;
|
||||
spec: Spec;
|
||||
}
|
||||
|
||||
interface Spec {
|
||||
name: string;
|
||||
region: string;
|
||||
envs: Env[];
|
||||
}
|
||||
|
||||
interface Env {
|
||||
key: string;
|
||||
value: string;
|
||||
scope: string;
|
||||
}
|
||||
|
||||
const res = (
|
||||
await standardRequest.get(`${INTEGRATION_DIGITAL_OCEAN_API_URL}/v2/apps`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
})
|
||||
).data;
|
||||
|
||||
return (res.apps ?? []).map((a: DigitalOceanApp) => ({
|
||||
name: a.spec.name,
|
||||
appId: a.id
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of applications for Cloud66 integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.accessToken - personal access token for Cloud66 API
|
||||
* @returns {Object[]} apps - Cloud66 apps
|
||||
* @returns {String} apps.name - name of Cloud66 app
|
||||
* @returns {String} apps.appId - uid of Cloud66 app
|
||||
*/
|
||||
const getAppsCloud66 = async ({ accessToken }: { accessToken: string }) => {
|
||||
interface Cloud66Apps {
|
||||
uid: string;
|
||||
name: string;
|
||||
account_id: number;
|
||||
git: string;
|
||||
git_branch: string;
|
||||
environment: string;
|
||||
cloud: string;
|
||||
fqdn: string;
|
||||
language: string;
|
||||
framework: string;
|
||||
status: number;
|
||||
health: number;
|
||||
last_activity: string;
|
||||
last_activity_iso: string;
|
||||
maintenance_mode: boolean;
|
||||
has_loadbalancer: boolean;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
deploy_directory: string;
|
||||
cloud_status: string;
|
||||
backend: string;
|
||||
version: string;
|
||||
revision: string;
|
||||
is_busy: boolean;
|
||||
account_name: string;
|
||||
is_cluster: boolean;
|
||||
is_inside_cluster: boolean;
|
||||
cluster_name: any;
|
||||
application_address: string;
|
||||
configstore_namespace: string;
|
||||
}
|
||||
|
||||
const stacks = (
|
||||
await standardRequest.get(`${INTEGRATION_CLOUD_66_API_URL}/3/stacks`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
})
|
||||
).data.response as Cloud66Apps[]
|
||||
|
||||
const apps = stacks.map((app) => ({
|
||||
name: app.name,
|
||||
appId: app.uid
|
||||
}));
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
||||
|
||||
export { getApps };
|
||||
|
@ -2,6 +2,8 @@ import { standardRequest } from "../config/request";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_AZURE_TOKEN_URL,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_BITBUCKET_TOKEN_URL,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_GITHUB_TOKEN_URL,
|
||||
INTEGRATION_GITLAB,
|
||||
@ -15,11 +17,13 @@ import {
|
||||
} from "../variables";
|
||||
import {
|
||||
getClientIdAzure,
|
||||
getClientIdBitBucket,
|
||||
getClientIdGitHub,
|
||||
getClientIdGitLab,
|
||||
getClientIdNetlify,
|
||||
getClientIdVercel,
|
||||
getClientSecretAzure,
|
||||
getClientSecretBitBucket,
|
||||
getClientSecretGitHub,
|
||||
getClientSecretGitLab,
|
||||
getClientSecretHeroku,
|
||||
@ -78,6 +82,15 @@ interface ExchangeCodeGitlabResponse {
|
||||
created_at: number;
|
||||
}
|
||||
|
||||
interface ExchangeCodeBitBucketResponse {
|
||||
access_token: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
refresh_token: string;
|
||||
scopes: string;
|
||||
state: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for OAuth2
|
||||
* code-token exchange for integration named [integration]
|
||||
@ -129,6 +142,12 @@ const exchangeCode = async ({
|
||||
obj = await exchangeCodeGitlab({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_BITBUCKET:
|
||||
obj = await exchangeCodeBitBucket({
|
||||
code,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return obj;
|
||||
@ -347,4 +366,43 @@ const exchangeCodeGitlab = async ({ code }: { code: string }) => {
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for BitBucket
|
||||
* code-token exchange
|
||||
* @param {Object} obj1
|
||||
* @param {Object} obj1.code - code for code-token exchange
|
||||
* @returns {Object} obj2
|
||||
* @returns {String} obj2.accessToken - access token for BitBucket API
|
||||
* @returns {String} obj2.refreshToken - refresh token for BitBucket API
|
||||
* @returns {Date} obj2.accessExpiresAt - date of expiration for access token
|
||||
*/
|
||||
const exchangeCodeBitBucket = async ({ code }: { code: string }) => {
|
||||
const accessExpiresAt = new Date();
|
||||
const res: ExchangeCodeBitBucketResponse = (
|
||||
await standardRequest.post(
|
||||
INTEGRATION_BITBUCKET_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: code,
|
||||
client_id: await getClientIdBitBucket(),
|
||||
client_secret: await getClientSecretBitBucket(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/bitbucket/oauth2/callback`,
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data;
|
||||
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + res.expires_in);
|
||||
|
||||
return {
|
||||
accessToken: res.access_token,
|
||||
refreshToken: res.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
export { exchangeCode };
|
||||
|
@ -2,6 +2,8 @@ import { standardRequest } from "../config/request";
|
||||
import { IIntegrationAuth } from "../models";
|
||||
import {
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_BITBUCKET_TOKEN_URL,
|
||||
INTEGRATION_GITLAB,
|
||||
INTEGRATION_HEROKU,
|
||||
} from "../variables";
|
||||
@ -13,8 +15,10 @@ import {
|
||||
import { IntegrationService } from "../services";
|
||||
import {
|
||||
getClientIdAzure,
|
||||
getClientIdBitBucket,
|
||||
getClientIdGitLab,
|
||||
getClientSecretAzure,
|
||||
getClientSecretBitBucket,
|
||||
getClientSecretGitLab,
|
||||
getClientSecretHeroku,
|
||||
getSiteURL,
|
||||
@ -46,6 +50,15 @@ interface RefreshTokenGitLabResponse {
|
||||
created_at: number;
|
||||
}
|
||||
|
||||
interface RefreshTokenBitBucketResponse {
|
||||
access_token: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
refresh_token: string;
|
||||
scopes: string;
|
||||
state: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for integration
|
||||
* named [integration]
|
||||
@ -83,6 +96,11 @@ const exchangeRefresh = async ({
|
||||
refreshToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_BITBUCKET:
|
||||
tokenDetails = await exchangeRefreshBitBucket({
|
||||
refreshToken,
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error("Failed to exchange token for incompatible integration");
|
||||
}
|
||||
@ -218,4 +236,46 @@ const exchangeRefreshGitLab = async ({
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||
* BitBucket integration
|
||||
* @param {Object} obj
|
||||
* @param {String} obj.refreshToken - refresh token to use to get new access token for BitBucket
|
||||
* @returns
|
||||
*/
|
||||
const exchangeRefreshBitBucket = async ({
|
||||
refreshToken,
|
||||
}: {
|
||||
refreshToken: string;
|
||||
}) => {
|
||||
const accessExpiresAt = new Date();
|
||||
const {
|
||||
data,
|
||||
}: {
|
||||
data: RefreshTokenBitBucketResponse;
|
||||
} = await standardRequest.post(
|
||||
INTEGRATION_BITBUCKET_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: refreshToken,
|
||||
client_id: await getClientIdBitBucket(),
|
||||
client_secret: await getClientSecretBitBucket(),
|
||||
redirect_uri: `${await getSiteURL()}/integrations/bitbucket/oauth2/callback`,
|
||||
} as any),
|
||||
{
|
||||
headers: {
|
||||
"Accept-Encoding": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
|
||||
return {
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token,
|
||||
accessExpiresAt,
|
||||
};
|
||||
};
|
||||
|
||||
export { exchangeRefresh };
|
||||
|
@ -18,7 +18,6 @@ const revokeAccess = async ({
|
||||
integrationAuth: IIntegrationAuth;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let deletedIntegrationAuth;
|
||||
// add any integration-specific revocation logic
|
||||
switch (integrationAuth.integration) {
|
||||
case INTEGRATION_HEROKU:
|
||||
@ -33,7 +32,7 @@ const revokeAccess = async ({
|
||||
break;
|
||||
}
|
||||
|
||||
deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
const deletedIntegrationAuth = await IntegrationAuth.findOneAndDelete({
|
||||
_id: integrationAuth._id,
|
||||
});
|
||||
|
||||
|
@ -1,25 +1,29 @@
|
||||
import _ from "lodash";
|
||||
import AWS from "aws-sdk";
|
||||
import {
|
||||
CreateSecretCommand,
|
||||
GetSecretValueCommand,
|
||||
ResourceNotFoundException,
|
||||
SecretsManagerClient,
|
||||
UpdateSecretCommand,
|
||||
UpdateSecretCommand
|
||||
} from "@aws-sdk/client-secrets-manager";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import sodium from "libsodium-wrappers";
|
||||
import { IIntegration, IIntegrationAuth } from "../models";
|
||||
import {
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_BITBUCKET_API_URL,
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_CHECKLY_API_URL,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_CIRCLECI_API_URL,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CLOUDFLARE_PAGES_API_URL,
|
||||
INTEGRATION_CLOUD_66,
|
||||
INTEGRATION_CLOUD_66_API_URL,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_CODEFRESH_API_URL,
|
||||
INTEGRATION_DIGITAL_OCEAN_API_URL,
|
||||
INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_FLYIO_API_URL,
|
||||
INTEGRATION_GITHUB,
|
||||
@ -32,23 +36,28 @@ import {
|
||||
INTEGRATION_LARAVELFORGE_API_URL,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_NETLIFY_API_URL,
|
||||
INTEGRATION_NORTHFLANK,
|
||||
INTEGRATION_NORTHFLANK_API_URL,
|
||||
INTEGRATION_RAILWAY,
|
||||
INTEGRATION_RAILWAY_API_URL,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_RENDER_API_URL,
|
||||
INTEGRATION_SUPABASE,
|
||||
INTEGRATION_SUPABASE_API_URL,
|
||||
INTEGRATION_TERRAFORM_CLOUD,
|
||||
INTEGRATION_TERRAFORM_CLOUD_API_URL,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_TRAVISCI_API_URL,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_VERCEL_API_URL,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_CODEFRESH_API_URL,
|
||||
INTEGRATION_WINDMILL,
|
||||
INTEGRATION_WINDMILL_API_URL,
|
||||
} from "../variables";
|
||||
import AWS from "aws-sdk";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import _ from "lodash";
|
||||
import sodium from "libsodium-wrappers";
|
||||
import { standardRequest } from "../config/request";
|
||||
import { handleAuthProviderCallback } from "../controllers/v1/authController";
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to [app] in integration named [integration]
|
||||
@ -186,34 +195,6 @@ const syncSecrets = async ({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_FLYIO:
|
||||
await syncSecretsFlyio({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CIRCLECI:
|
||||
await syncSecretsCircleCI({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TRAVISCI:
|
||||
await syncSecretsTravisCI({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_SUPABASE:
|
||||
await syncSecretsSupabase({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CHECKLY:
|
||||
await syncSecretsCheckly({
|
||||
integration,
|
||||
@ -221,6 +202,13 @@ const syncSecrets = async ({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_TERRAFORM_CLOUD:
|
||||
await syncSecretsTerraformCloud({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_HASHICORP_VAULT:
|
||||
await syncSecretsHashiCorpVault({
|
||||
integration,
|
||||
@ -245,6 +233,34 @@ const syncSecrets = async ({
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_BITBUCKET:
|
||||
await syncSecretsBitBucket({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM:
|
||||
await syncSecretsDigitalOceanAppPlatform({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_CLOUD_66:
|
||||
await syncSecretsCloud66({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_NORTHFLANK:
|
||||
await syncSecretsNorthflank({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case INTEGRATION_WINDMILL:
|
||||
await syncSecretsWindmill({
|
||||
integration,
|
||||
@ -720,8 +736,6 @@ const syncSecretsVercel = async ({
|
||||
return true;
|
||||
});
|
||||
|
||||
// return secret.target.includes(integration.targetEnvironment);
|
||||
|
||||
const res: { [key: string]: VercelSecret } = {};
|
||||
|
||||
for await (const vercelSecret of vercelSecrets) {
|
||||
@ -1857,6 +1871,106 @@ const syncSecretsCheckly = async ({
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to Terraform Cloud project with id [integration.appId]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for Terraform Cloud API
|
||||
*/
|
||||
const syncSecretsTerraformCloud = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
// get secrets from Terraform Cloud
|
||||
const getSecretsRes = (
|
||||
await standardRequest.get(`${INTEGRATION_TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json",
|
||||
},
|
||||
}
|
||||
))
|
||||
.data
|
||||
.data
|
||||
.reduce((obj: any, secret: any) => ({
|
||||
...obj,
|
||||
[secret.attributes.key]: secret
|
||||
}), {});
|
||||
|
||||
// create or update secrets on Terraform Cloud
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
if (!(key in getSecretsRes)) {
|
||||
// case: secret does not exist in Terraform Cloud
|
||||
// -> add secret
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
|
||||
{
|
||||
data: {
|
||||
type: "vars",
|
||||
attributes: {
|
||||
key,
|
||||
value: secrets[key],
|
||||
category: integration.targetService,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/vnd.api+json",
|
||||
Accept: "application/vnd.api+json",
|
||||
},
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// case: secret exists in Terraform Cloud
|
||||
if (secrets[key] !== getSecretsRes[key].attributes.value) {
|
||||
// -> update secret
|
||||
await standardRequest.patch(
|
||||
`${INTEGRATION_TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${getSecretsRes[key].id}`,
|
||||
{
|
||||
data: {
|
||||
type: "vars",
|
||||
id: getSecretsRes[key].id,
|
||||
attributes: {
|
||||
...getSecretsRes[key],
|
||||
value: secrets[key]
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/vnd.api+json",
|
||||
Accept: "application/vnd.api+json",
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(getSecretsRes)) {
|
||||
if (!(key in secrets)) {
|
||||
// case: delete secret
|
||||
await standardRequest.delete(`${INTEGRATION_TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${getSecretsRes[key].id}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/vnd.api+json",
|
||||
Accept: "application/vnd.api+json",
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to HashiCorp Vault path
|
||||
* @param {Object} obj
|
||||
@ -1949,7 +2063,7 @@ const syncSecretsCloudflarePages = async ({
|
||||
}
|
||||
)
|
||||
)
|
||||
.data.result['deployment_configs'][integration.targetEnvironment]['env_vars'];
|
||||
.data.result["deployment_configs"][integration.targetEnvironment]["env_vars"];
|
||||
|
||||
// copy the secrets object, so we can set deleted keys to null
|
||||
const secretsObj: any = { ...secrets };
|
||||
@ -1989,9 +2103,125 @@ const syncSecretsCloudflarePages = async ({
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to Codefresh with name [integration.app]
|
||||
* Sync/push [secrets] to BitBucket repo with name [integration.app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {IIntegrationAuth} obj.integrationAuth - integration auth details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for BitBucket integration
|
||||
*/
|
||||
const syncSecretsBitBucket = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken,
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
interface VariablesResponse {
|
||||
size: number;
|
||||
page: number;
|
||||
pageLen: number;
|
||||
next: string;
|
||||
previous: string;
|
||||
values: Array<BitbucketVariable>;
|
||||
}
|
||||
|
||||
interface BitbucketVariable {
|
||||
type: string;
|
||||
uuid: string;
|
||||
key: string;
|
||||
value: string;
|
||||
secured: boolean;
|
||||
}
|
||||
|
||||
const res: { [key: string]: BitbucketVariable } = {};
|
||||
|
||||
let hasNextPage = true;
|
||||
let variablesUrl = `${INTEGRATION_BITBUCKET_API_URL}/2.0/repositories/${integration.targetEnvironmentId}/${integration.appId}/pipelines_config/variables`
|
||||
|
||||
while (hasNextPage) {
|
||||
const { data }: { data: VariablesResponse } = await standardRequest.get(
|
||||
variablesUrl,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (data?.values.length > 0) {
|
||||
data.values.forEach((variable) => {
|
||||
res[variable.key] = variable;
|
||||
});
|
||||
}
|
||||
|
||||
if (data.next) {
|
||||
variablesUrl = data.next
|
||||
} else {
|
||||
hasNextPage = false
|
||||
}
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
if (key in res) {
|
||||
// update existing secret
|
||||
await standardRequest.put(
|
||||
`${variablesUrl}/${res[key].uuid}`,
|
||||
{
|
||||
key,
|
||||
value: secrets[key],
|
||||
secured: true
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// create new secret
|
||||
await standardRequest.post(
|
||||
variablesUrl,
|
||||
{
|
||||
key,
|
||||
value: secrets[key],
|
||||
secured: true
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(res)) {
|
||||
if (!(key in secrets)) {
|
||||
// delete secret
|
||||
await standardRequest.delete(
|
||||
`${variablesUrl}/${res[key].uuid}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept": "application/json",
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to Codefresh project with name [integration.app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {IIntegrationAuth} obj.integrationAuth - integration auth details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for Codefresh integration
|
||||
*/
|
||||
@ -2021,6 +2251,40 @@ const syncSecretsCodefresh = async ({
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to DigitalOcean App Platform application with name [integration.app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {IIntegrationAuth} obj.integrationAuth - integration auth details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for integration
|
||||
*/
|
||||
const syncSecretsDigitalOceanAppPlatform = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
await standardRequest.put(
|
||||
`${INTEGRATION_DIGITAL_OCEAN_API_URL}/v2/apps/${integration.appId}`,
|
||||
{
|
||||
spec: {
|
||||
name: integration.app,
|
||||
envs: Object.entries(secrets).map(([key, value]) => ({ key, value }))
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to Windmill with name [integration.app]
|
||||
* @param {Object} obj
|
||||
@ -2067,7 +2331,8 @@ const syncSecretsWindmill = async ({
|
||||
{}
|
||||
);
|
||||
|
||||
const pattern = /^(u\/|f\/)[a-zA-Z0-9_-]+\/([a-zA-Z0-9_-]+\/)*[a-zA-Z0-9_-]*[^\/]$/;
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
const pattern = new RegExp("^(u\/|f\/)[a-zA-Z0-9_-]+\/([a-zA-Z0-9_-]+\/)*[a-zA-Z0-9_-]*[^\/]$");
|
||||
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
if((key.startsWith("u/") || key.startsWith("f/")) && pattern.test(key)) {
|
||||
@ -2108,7 +2373,7 @@ const syncSecretsWindmill = async ({
|
||||
}
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(res)) {
|
||||
@ -2126,6 +2391,140 @@ const syncSecretsWindmill = async ({
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to Cloud66 application with name [integration.app]
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {IIntegrationAuth} obj.integrationAuth - integration auth details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for Cloud66 integration
|
||||
*/
|
||||
const syncSecretsCloud66 = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
|
||||
interface Cloud66Secret {
|
||||
id: number;
|
||||
key: string;
|
||||
value: string;
|
||||
readonly: boolean;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
is_password: boolean;
|
||||
is_generated: boolean;
|
||||
history: any[];
|
||||
}
|
||||
|
||||
// get all current secrets
|
||||
const res = (
|
||||
await standardRequest.get(
|
||||
`${INTEGRATION_CLOUD_66_API_URL}/3/stacks/${integration.appId}/environments`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
.data
|
||||
.response
|
||||
.filter((secret: Cloud66Secret) => !secret.readonly || !secret.is_generated)
|
||||
.reduce(
|
||||
(obj: any, secret: any) => ({
|
||||
...obj,
|
||||
[secret.key]: secret
|
||||
}),
|
||||
{}
|
||||
);
|
||||
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
if (key in res) {
|
||||
// update existing secret
|
||||
await standardRequest.put(
|
||||
`${INTEGRATION_CLOUD_66_API_URL}/3/stacks/${integration.appId}/environments/${key}`,
|
||||
{
|
||||
key,
|
||||
value: secrets[key]
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// create new secret
|
||||
await standardRequest.post(
|
||||
`${INTEGRATION_CLOUD_66_API_URL}/3/stacks/${integration.appId}/environments`,
|
||||
{
|
||||
key,
|
||||
value: secrets[key]
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(res)) {
|
||||
if (!(key in secrets)) {
|
||||
// delete secret
|
||||
await standardRequest.delete(
|
||||
`${INTEGRATION_CLOUD_66_API_URL}/3/stacks/${integration.appId}/environments/${key}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/** Sync/push [secrets] to Northflank
|
||||
* @param {Object} obj
|
||||
* @param {IIntegration} obj.integration - integration details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for Northflank integration
|
||||
*/
|
||||
const syncSecretsNorthflank = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: IIntegration;
|
||||
secrets: any;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
await standardRequest.patch(
|
||||
`${INTEGRATION_NORTHFLANK_API_URL}/v1/projects/${integration.appId}/secrets/${integration.targetServiceId}`,
|
||||
{
|
||||
secrets: {
|
||||
variables: secrets
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export { syncSecrets };
|
||||
|
@ -1,4 +1,5 @@
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Types } from "mongoose";
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
import {
|
||||
getAuthAPIKeyPayload,
|
||||
@ -51,6 +52,10 @@ const requireAuth = ({
|
||||
});
|
||||
|
||||
let authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
let authUserPayload: {
|
||||
user: IUser;
|
||||
tokenVersionId: Types.ObjectId;
|
||||
};
|
||||
switch (authMode) {
|
||||
case AUTH_MODE_SERVICE_ACCOUNT:
|
||||
authPayload = await getAuthSAAKPayload({
|
||||
@ -71,12 +76,12 @@ const requireAuth = ({
|
||||
req.user = authPayload;
|
||||
break;
|
||||
default:
|
||||
const { user, tokenVersionId } = await getAuthUserPayload({
|
||||
authUserPayload = await getAuthUserPayload({
|
||||
authTokenValue,
|
||||
});
|
||||
authPayload = user;
|
||||
req.user = user;
|
||||
req.tokenVersionId = tokenVersionId;
|
||||
authPayload = authUserPayload.user;
|
||||
req.user = authUserPayload.user;
|
||||
req.tokenVersionId = authUserPayload.tokenVersionId;
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,7 @@ const requireWorkspaceAuth = ({
|
||||
requiredPermissions = [],
|
||||
requireBlindIndicesEnabled = false,
|
||||
requireE2EEOff = false,
|
||||
checkIPAllowlist = false
|
||||
}: {
|
||||
acceptedRoles: Array<"admin" | "member">;
|
||||
locationWorkspaceId: req;
|
||||
@ -25,6 +26,7 @@ const requireWorkspaceAuth = ({
|
||||
requiredPermissions?: string[];
|
||||
requireBlindIndicesEnabled?: boolean;
|
||||
requireE2EEOff?: boolean;
|
||||
checkIPAllowlist?: boolean;
|
||||
}) => {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
const workspaceId = req[locationWorkspaceId]?.workspaceId;
|
||||
@ -39,6 +41,7 @@ const requireWorkspaceAuth = ({
|
||||
requiredPermissions,
|
||||
requireBlindIndicesEnabled,
|
||||
requireE2EEOff,
|
||||
checkIPAllowlist
|
||||
});
|
||||
|
||||
if (membership) {
|
||||
|
98
backend/src/models/botOrg.ts
Normal file
@ -0,0 +1,98 @@
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
import {
|
||||
ALGORITHM_AES_256_GCM,
|
||||
ENCODING_SCHEME_BASE64,
|
||||
ENCODING_SCHEME_UTF8,
|
||||
} from "../variables";
|
||||
|
||||
export interface IBotOrg {
|
||||
_id: Types.ObjectId;
|
||||
name: string;
|
||||
organization: Types.ObjectId;
|
||||
publicKey: string;
|
||||
encryptedSymmetricKey: string;
|
||||
symmetricKeyIV: string;
|
||||
symmetricKeyTag: string;
|
||||
symmetricKeyAlgorithm: "aes-256-gcm";
|
||||
symmetricKeyKeyEncoding: "base64" | "utf8";
|
||||
encryptedPrivateKey: string;
|
||||
privateKeyIV: string;
|
||||
privateKeyTag: string;
|
||||
privateKeyAlgorithm: "aes-256-gcm";
|
||||
privateKeyKeyEncoding: "base64" | "utf8";
|
||||
}
|
||||
|
||||
const botOrgSchema = new Schema<IBotOrg>(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
organization: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Organization",
|
||||
required: true,
|
||||
},
|
||||
publicKey: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
encryptedSymmetricKey: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
symmetricKeyIV: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
symmetricKeyTag: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
symmetricKeyAlgorithm: {
|
||||
type: String,
|
||||
enum: [ALGORITHM_AES_256_GCM],
|
||||
required: true
|
||||
},
|
||||
symmetricKeyKeyEncoding: {
|
||||
type: String,
|
||||
enum: [
|
||||
ENCODING_SCHEME_UTF8,
|
||||
ENCODING_SCHEME_BASE64,
|
||||
],
|
||||
required: true
|
||||
},
|
||||
encryptedPrivateKey: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
privateKeyIV: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
privateKeyTag: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
privateKeyAlgorithm: {
|
||||
type: String,
|
||||
enum: [ALGORITHM_AES_256_GCM],
|
||||
required: true
|
||||
},
|
||||
privateKeyKeyEncoding: {
|
||||
type: String,
|
||||
enum: [
|
||||
ENCODING_SCHEME_UTF8,
|
||||
ENCODING_SCHEME_BASE64,
|
||||
],
|
||||
required: true
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
}
|
||||
);
|
||||
|
||||
const BotOrg = model<IBotOrg>("BotOrg", botOrgSchema);
|
||||
|
||||
export default BotOrg;
|
@ -1,5 +1,6 @@
|
||||
import BackupPrivateKey, { IBackupPrivateKey } from "./backupPrivateKey";
|
||||
import Bot, { IBot } from "./bot";
|
||||
import BotOrg, { IBotOrg } from "./botOrg";
|
||||
import BotKey, { IBotKey } from "./botKey";
|
||||
import IncidentContactOrg, { IIncidentContactOrg } from "./incidentContactOrg";
|
||||
import Integration, { IIntegration } from "./integration";
|
||||
@ -31,6 +32,8 @@ export {
|
||||
IBackupPrivateKey,
|
||||
Bot,
|
||||
IBot,
|
||||
BotOrg,
|
||||
IBotOrg,
|
||||
BotKey,
|
||||
IBotKey,
|
||||
IncidentContactOrg,
|
||||
|
@ -1,11 +1,14 @@
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
import {
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CLOUD_66,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_GITLAB,
|
||||
@ -13,14 +16,16 @@ import {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_LARAVELFORGE,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_NORTHFLANK,
|
||||
INTEGRATION_RAILWAY,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_SUPABASE,
|
||||
INTEGRATION_TERRAFORM_CLOUD,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_WINDMILL,
|
||||
INTEGRATION_WINDMILL
|
||||
} from "../variables";
|
||||
import { Schema, Types, model } from "mongoose";
|
||||
|
||||
export interface IIntegration {
|
||||
_id: Types.ObjectId;
|
||||
@ -55,9 +60,14 @@ export interface IIntegration {
|
||||
| "travisci"
|
||||
| "supabase"
|
||||
| "checkly"
|
||||
| "terraform-cloud"
|
||||
| "hashicorp-vault"
|
||||
| "cloudflare-pages"
|
||||
| "bitbucket"
|
||||
| "codefresh"
|
||||
| "digital-ocean-app-platform"
|
||||
| "cloud-66"
|
||||
| "northflank"
|
||||
| "windmill";
|
||||
integrationAuth: Types.ObjectId;
|
||||
}
|
||||
@ -146,10 +156,15 @@ const integrationSchema = new Schema<IIntegration>(
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_SUPABASE,
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_TERRAFORM_CLOUD,
|
||||
INTEGRATION_HASHICORP_VAULT,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_WINDMILL,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM,
|
||||
INTEGRATION_CLOUD_66,
|
||||
INTEGRATION_NORTHFLANK
|
||||
],
|
||||
required: true,
|
||||
},
|
||||
@ -162,7 +177,7 @@ const integrationSchema = new Schema<IIntegration>(
|
||||
type: String,
|
||||
required: true,
|
||||
default: "/",
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { Document, Schema, Types, model } from "mongoose";
|
||||
import {
|
||||
ALGORITHM_AES_256_GCM,
|
||||
ENCODING_SCHEME_BASE64,
|
||||
@ -6,8 +5,12 @@ import {
|
||||
INTEGRATION_AWS_PARAMETER_STORE,
|
||||
INTEGRATION_AWS_SECRET_MANAGER,
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_CIRCLECI,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CLOUD_66,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM,
|
||||
INTEGRATION_FLYIO,
|
||||
INTEGRATION_GITHUB,
|
||||
INTEGRATION_GITLAB,
|
||||
@ -15,19 +18,45 @@ import {
|
||||
INTEGRATION_HEROKU,
|
||||
INTEGRATION_LARAVELFORGE,
|
||||
INTEGRATION_NETLIFY,
|
||||
INTEGRATION_NORTHFLANK,
|
||||
INTEGRATION_RAILWAY,
|
||||
INTEGRATION_RENDER,
|
||||
INTEGRATION_SUPABASE,
|
||||
INTEGRATION_TERRAFORM_CLOUD,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_VERCEL,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_WINDMILL
|
||||
} from "../variables";
|
||||
import { Document, Schema, Types, model } from "mongoose";
|
||||
|
||||
export interface IIntegrationAuth extends Document {
|
||||
_id: Types.ObjectId;
|
||||
workspace: Types.ObjectId;
|
||||
integration: 'heroku' | 'vercel' | 'netlify' | 'github' | 'gitlab' | 'render' | 'railway' | 'flyio' | 'azure-key-vault' | 'laravel-forge' | 'circleci' | 'travisci' | 'supabase' | 'aws-parameter-store' | 'aws-secret-manager' | 'checkly' | 'cloudflare-pages' | 'windmill' | 'codefresh';
|
||||
integration:
|
||||
| "heroku"
|
||||
| "vercel"
|
||||
| "netlify"
|
||||
| "github"
|
||||
| "gitlab"
|
||||
| "render"
|
||||
| "railway"
|
||||
| "flyio"
|
||||
| "azure-key-vault"
|
||||
| "laravel-forge"
|
||||
| "circleci"
|
||||
| "travisci"
|
||||
| "supabase"
|
||||
| "aws-parameter-store"
|
||||
| "aws-secret-manager"
|
||||
| "checkly"
|
||||
| "cloudflare-pages"
|
||||
| "codefresh"
|
||||
| "digital-ocean-app-platform"
|
||||
| "bitbucket"
|
||||
| "cloud-66"
|
||||
| "terraform-cloud"
|
||||
| "northflank"
|
||||
| "windmill";
|
||||
teamId: string;
|
||||
accountId: string;
|
||||
url: string;
|
||||
@ -71,10 +100,15 @@ const integrationAuthSchema = new Schema<IIntegrationAuth>(
|
||||
INTEGRATION_LARAVELFORGE,
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_SUPABASE,
|
||||
INTEGRATION_TERRAFORM_CLOUD,
|
||||
INTEGRATION_HASHICORP_VAULT,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_WINDMILL,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM,
|
||||
INTEGRATION_CLOUD_66,
|
||||
INTEGRATION_NORTHFLANK
|
||||
],
|
||||
required: true,
|
||||
},
|
||||
|
@ -1,7 +1,9 @@
|
||||
import { Document, Schema, Types, model } from "mongoose";
|
||||
|
||||
export enum AuthProvider {
|
||||
EMAIL = "email",
|
||||
GOOGLE = "google",
|
||||
OKTA_SAML = "okta-saml"
|
||||
}
|
||||
|
||||
export interface IUser extends Document {
|
||||
|
@ -1,7 +1,6 @@
|
||||
import express from "express";
|
||||
const router = express.Router();
|
||||
import { body } from "express-validator";
|
||||
import passport from "passport";
|
||||
import { requireAuth, validateRequest } from "../../middleware";
|
||||
import { authController } from "../../controllers/v1";
|
||||
import { authLimiter } from "../../helpers/rateLimiter";
|
||||
@ -44,21 +43,6 @@ router.post(
|
||||
authController.checkAuth
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/redirect/google",
|
||||
authLimiter,
|
||||
passport.authenticate("google", {
|
||||
scope: ["profile", "email"],
|
||||
session: false,
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/callback/google",
|
||||
passport.authenticate("google", { failureRedirect: "/login/provider/error", session: false }),
|
||||
authController.handleAuthProviderCallback,
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/common-passwords",
|
||||
authLimiter,
|
||||
|
@ -81,6 +81,7 @@ router.get(
|
||||
}),
|
||||
param("integrationAuthId"),
|
||||
query("teamId"),
|
||||
query("workspaceSlug"),
|
||||
validateRequest,
|
||||
integrationAuthController.getIntegrationAuthApps
|
||||
);
|
||||
@ -141,6 +142,33 @@ router.get(
|
||||
integrationAuthController.getIntegrationAuthRailwayServices
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/:integrationAuthId/bitbucket/workspaces",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
}),
|
||||
requireIntegrationAuthorizationAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
}),
|
||||
param("integrationAuthId").exists().isString(),
|
||||
validateRequest,
|
||||
integrationAuthController.getIntegrationAuthBitBucketWorkspaces
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/:integrationAuthId/northflank/secret-groups",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
}),
|
||||
requireIntegrationAuthorizationAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
}),
|
||||
param("integrationAuthId").exists().isString(),
|
||||
query("appId").exists().isString(),
|
||||
validateRequest,
|
||||
integrationAuthController.getIntegrationAuthNorthflankSecretGroups
|
||||
);
|
||||
|
||||
router.delete(
|
||||
"/:integrationAuthId",
|
||||
requireAuth({
|
||||
|
@ -46,7 +46,7 @@ router.delete(
|
||||
body("secretImportPath").isString().exists().trim(),
|
||||
body("secretImportEnv").isString().exists().trim(),
|
||||
validateRequest,
|
||||
secretImportController.updateSecretImport
|
||||
secretImportController.deleteSecretImport
|
||||
);
|
||||
|
||||
router.get(
|
||||
|
@ -2,7 +2,7 @@ import express from "express";
|
||||
const router = express.Router();
|
||||
import { requireAuth, requireWorkspaceAuth, validateRequest } from "../../middleware";
|
||||
import { body, param, query } from "express-validator";
|
||||
import { ADMIN, AUTH_MODE_JWT, AUTH_MODE_SERVICE_ACCOUNT, MEMBER } from "../../variables";
|
||||
import { ADMIN, AUTH_MODE_JWT, MEMBER } from "../../variables";
|
||||
import { webhookController } from "../../controllers/v1";
|
||||
|
||||
router.post(
|
||||
|
@ -10,6 +10,9 @@ import {
|
||||
AUTH_MODE_API_KEY,
|
||||
AUTH_MODE_JWT,
|
||||
} from "../../variables";
|
||||
import {
|
||||
AuthProvider
|
||||
} from "../../models";
|
||||
|
||||
router.get(
|
||||
"/me",
|
||||
@ -34,11 +37,25 @@ router.patch(
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY],
|
||||
}),
|
||||
body("firstName").exists(),
|
||||
body("firstName").exists().isString(),
|
||||
body("lastName").isString(),
|
||||
validateRequest,
|
||||
usersController.updateName
|
||||
);
|
||||
|
||||
router.patch(
|
||||
"/me/auth-provider",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT, AUTH_MODE_API_KEY],
|
||||
}),
|
||||
body("authProvider").exists().isString().isIn([
|
||||
AuthProvider.EMAIL,
|
||||
AuthProvider.GOOGLE
|
||||
]),
|
||||
validateRequest,
|
||||
usersController.updateAuthProvider
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/me/organizations",
|
||||
requireAuth({
|
||||
@ -76,7 +93,7 @@ router.delete(
|
||||
usersController.deleteAPIKey
|
||||
);
|
||||
|
||||
router.get( // new
|
||||
router.get(
|
||||
"/me/sessions",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
@ -84,7 +101,7 @@ router.get( // new
|
||||
usersController.getMySessions
|
||||
);
|
||||
|
||||
router.delete( // new
|
||||
router.delete(
|
||||
"/me/sessions",
|
||||
requireAuth({
|
||||
acceptedAuthModes: [AUTH_MODE_JWT],
|
||||
|
@ -18,10 +18,10 @@ import {
|
||||
|
||||
router.get(
|
||||
"/raw",
|
||||
query("workspaceId").exists().isString().trim(),
|
||||
query("environment").exists().isString().trim(),
|
||||
query("workspaceId").optional().isString().trim(),
|
||||
query("environment").optional().isString().trim(),
|
||||
query("secretPath").default("/").isString().trim(),
|
||||
query("include_imports").isBoolean().default(false),
|
||||
query("include_imports").optional().isBoolean().default(false),
|
||||
validateRequest,
|
||||
requireAuth({
|
||||
acceptedAuthModes: [
|
||||
@ -31,14 +31,6 @@ router.get(
|
||||
AUTH_MODE_SERVICE_ACCOUNT
|
||||
]
|
||||
}),
|
||||
requireWorkspaceAuth({
|
||||
acceptedRoles: [ADMIN, MEMBER],
|
||||
locationWorkspaceId: "query",
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true
|
||||
}),
|
||||
secretsController.getSecretsRaw
|
||||
);
|
||||
|
||||
@ -64,7 +56,8 @@ router.get(
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true
|
||||
requireE2EEOff: true,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.getSecretByNameRaw
|
||||
);
|
||||
@ -92,7 +85,8 @@ router.post(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true
|
||||
requireE2EEOff: true,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.createSecretRaw
|
||||
);
|
||||
@ -120,7 +114,8 @@ router.patch(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true
|
||||
requireE2EEOff: true,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.updateSecretByNameRaw
|
||||
);
|
||||
@ -147,7 +142,8 @@ router.delete(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: true
|
||||
requireE2EEOff: true,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.deleteSecretByNameRaw
|
||||
);
|
||||
@ -172,7 +168,8 @@ router.get(
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: false
|
||||
requireE2EEOff: false,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.getSecrets
|
||||
);
|
||||
@ -207,7 +204,8 @@ router.post(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: false
|
||||
requireE2EEOff: false,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.createSecret
|
||||
);
|
||||
@ -233,7 +231,8 @@ router.get(
|
||||
locationWorkspaceId: "query",
|
||||
locationEnvironment: "query",
|
||||
requiredPermissions: [PERMISSION_READ_SECRETS],
|
||||
requireBlindIndicesEnabled: true
|
||||
requireBlindIndicesEnabled: true,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.getSecretByName
|
||||
);
|
||||
@ -263,7 +262,8 @@ router.patch(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: false
|
||||
requireE2EEOff: false,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.updateSecretByName
|
||||
);
|
||||
@ -290,7 +290,8 @@ router.delete(
|
||||
locationEnvironment: "body",
|
||||
requiredPermissions: [PERMISSION_WRITE_SECRETS],
|
||||
requireBlindIndicesEnabled: true,
|
||||
requireE2EEOff: false
|
||||
requireE2EEOff: false,
|
||||
checkIPAllowlist: false
|
||||
}),
|
||||
secretsController.deleteSecretByName
|
||||
);
|
||||
|
@ -21,7 +21,8 @@ router.post(
|
||||
body("salt").exists().isString().trim().notEmpty(),
|
||||
body("verifier").exists().isString().trim().notEmpty(),
|
||||
body("organizationName").exists().isString().trim().notEmpty(),
|
||||
body("providerAuthToken").isString().trim().optional({nullable: true}),
|
||||
body("providerAuthToken").isString().trim().optional({ nullable: true }),
|
||||
body("attributionSource").optional().isString().trim(),
|
||||
validateRequest,
|
||||
signupController.completeAccountSignup,
|
||||
);
|
||||
|
12
backend/src/services/BotOrgService.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import { Types } from "mongoose";
|
||||
import { getSymmetricKeyHelper } from "../helpers/botOrg";
|
||||
|
||||
// TODO: DOCstrings
|
||||
|
||||
class BotOrgService {
|
||||
static async getSymmetricKey(organizationId: Types.ObjectId) {
|
||||
return await getSymmetricKeyHelper(organizationId);
|
||||
}
|
||||
}
|
||||
|
||||
export default BotOrgService;
|
@ -2,6 +2,7 @@ import DatabaseService from "./DatabaseService";
|
||||
// import { logTelemetryMessage, getPostHogClient } from './TelemetryService';
|
||||
import TelemetryService from "./TelemetryService";
|
||||
import BotService from "./BotService";
|
||||
import BotOrgService from "./BotOrgService";
|
||||
import EventService from "./EventService";
|
||||
import IntegrationService from "./IntegrationService";
|
||||
import TokenService from "./TokenService";
|
||||
@ -12,9 +13,10 @@ export {
|
||||
TelemetryService,
|
||||
DatabaseService,
|
||||
BotService,
|
||||
BotOrgService,
|
||||
EventService,
|
||||
IntegrationService,
|
||||
TokenService,
|
||||
SecretService,
|
||||
GithubSecretScanningService
|
||||
};
|
||||
}
|
||||
|
1
backend/src/types/express/index.d.ts
vendored
@ -20,6 +20,7 @@ declare global {
|
||||
workspace: any;
|
||||
membership: any;
|
||||
targetMembership: any;
|
||||
isUserCompleted: boolean;
|
||||
providerAuthToken: any;
|
||||
organization: any;
|
||||
membershipOrg: any;
|
||||
|
@ -4,8 +4,6 @@ const ALGORITHM = "aes-256-gcm";
|
||||
const BLOCK_SIZE_BYTES = 16;
|
||||
|
||||
export default class AesGCM {
|
||||
constructor() {}
|
||||
|
||||
static encrypt(
|
||||
text: string,
|
||||
secret: string
|
||||
|
@ -1,11 +1,14 @@
|
||||
import express from "express";
|
||||
import passport from "passport";
|
||||
import { Types } from "mongoose";
|
||||
import { AuthData } from "../interfaces/middleware";
|
||||
import {
|
||||
AuthProvider,
|
||||
MembershipOrg,
|
||||
Organization,
|
||||
ServiceAccount,
|
||||
ServiceTokenData,
|
||||
User,
|
||||
User
|
||||
} from "../models";
|
||||
import { createToken } from "../helpers/auth";
|
||||
import {
|
||||
@ -14,11 +17,15 @@ import {
|
||||
getJwtProviderAuthLifetime,
|
||||
getJwtProviderAuthSecret,
|
||||
} from "../config";
|
||||
import { getSSOConfigHelper } from "../ee/helpers/organizations";
|
||||
import { InternalServerError, OrganizationNotFoundError } from "./errors";
|
||||
import { INVITED, MEMBER } from "../variables";
|
||||
import { getSiteURL } from "../config";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const GoogleStrategy = require("passport-google-oauth20").Strategy;
|
||||
|
||||
// TODO: find a more optimal folder structure to store these types of functions
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { MultiSamlStrategy } = require("@node-saml/passport-saml");
|
||||
|
||||
/**
|
||||
* Returns an object containing the id of the authentication data payload
|
||||
@ -39,7 +46,6 @@ const getAuthDataPayloadIdObj = (authData: AuthData) => {
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Returns an object containing the user associated with the authentication data payload
|
||||
* @param {AuthData} authData - authentication data object
|
||||
@ -56,7 +62,7 @@ const getAuthDataPayloadUserObj = (authData: AuthData) => {
|
||||
}
|
||||
|
||||
if (authData.authPayload instanceof ServiceTokenData) {
|
||||
return { user: authData.authPayload.user };
|
||||
return { user: authData.authPayload.user };0
|
||||
}
|
||||
}
|
||||
|
||||
@ -68,47 +74,148 @@ const initializePassport = async () => {
|
||||
passReqToCallback: true,
|
||||
clientID: googleClientId,
|
||||
clientSecret: googleClientSecret,
|
||||
callbackURL: "/api/v1/auth/callback/google",
|
||||
callbackURL: "/api/v1/sso/google",
|
||||
scope: ["profile", " email"],
|
||||
}, async (
|
||||
req: express.Request,
|
||||
accessToken: string,
|
||||
refreshToken: string,
|
||||
profile: any,
|
||||
cb: any
|
||||
done: any
|
||||
) => {
|
||||
try {
|
||||
const email = profile.emails[0].value;
|
||||
const firstName = profile.name.givenName;
|
||||
const lastName = profile.name.familyName;
|
||||
|
||||
let user = await User.findOne({
|
||||
authProvider: AuthProvider.GOOGLE,
|
||||
authId: profile.id,
|
||||
}).select("+publicKey")
|
||||
email
|
||||
}).select("+publicKey");
|
||||
|
||||
if (user && user.authProvider !== AuthProvider.GOOGLE) {
|
||||
done(InternalServerError());
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
user = await new User({
|
||||
email,
|
||||
authProvider: AuthProvider.GOOGLE,
|
||||
authId: profile.id,
|
||||
firstName,
|
||||
lastName
|
||||
}).save();
|
||||
}
|
||||
|
||||
const isUserCompleted = !!user.publicKey;
|
||||
const providerAuthToken = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString(),
|
||||
email: user.email,
|
||||
firstName,
|
||||
lastName,
|
||||
authProvider: user.authProvider,
|
||||
isUserCompleted: !!user.publicKey,
|
||||
isUserCompleted,
|
||||
...(req.query.state ? {
|
||||
callbackPort: req.query.state as string
|
||||
} : {})
|
||||
},
|
||||
expiresIn: await getJwtProviderAuthLifetime(),
|
||||
secret: await getJwtProviderAuthSecret(),
|
||||
});
|
||||
|
||||
req.isUserCompleted = isUserCompleted;
|
||||
req.providerAuthToken = providerAuthToken;
|
||||
cb(null, profile);
|
||||
done(null, profile);
|
||||
} catch (err) {
|
||||
cb(null, false);
|
||||
done(null, false);
|
||||
}
|
||||
}));
|
||||
|
||||
passport.use("saml", new MultiSamlStrategy(
|
||||
{
|
||||
passReqToCallback: true,
|
||||
getSamlOptions: async (req: any, done: any) => {
|
||||
const { ssoIdentifier } = req.params;
|
||||
|
||||
const ssoConfig = await getSSOConfigHelper({
|
||||
ssoConfigId: new Types.ObjectId(ssoIdentifier)
|
||||
});
|
||||
|
||||
const samlConfig = ({
|
||||
path: "/api/v1/auth/callback/saml",
|
||||
callbackURL: `${await getSiteURL()}/api/v1/auth/callback/saml`,
|
||||
entryPoint: ssoConfig.entryPoint,
|
||||
issuer: ssoConfig.issuer,
|
||||
cert: ssoConfig.cert,
|
||||
audience: ssoConfig.audience
|
||||
});
|
||||
|
||||
req.ssoConfig = ssoConfig;
|
||||
|
||||
done(null, samlConfig);
|
||||
},
|
||||
},
|
||||
async (req: any, profile: any, done: any) => {
|
||||
if (!req.ssoConfig.isActive) return done(InternalServerError());
|
||||
|
||||
const organization = await Organization.findById(req.ssoConfig.organization);
|
||||
|
||||
if (!organization) return done(OrganizationNotFoundError());
|
||||
|
||||
const email = profile.email;
|
||||
const firstName = profile.firstName;
|
||||
const lastName = profile.lastName;
|
||||
|
||||
let user = await User.findOne({
|
||||
email
|
||||
}).select("+publicKey");
|
||||
|
||||
if (user && user.authProvider !== AuthProvider.OKTA_SAML) {
|
||||
done(InternalServerError());
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
user = await new User({
|
||||
email,
|
||||
authProvider: AuthProvider.OKTA_SAML,
|
||||
authId: profile.id,
|
||||
firstName,
|
||||
lastName
|
||||
}).save();
|
||||
|
||||
await new MembershipOrg({
|
||||
inviteEmail: email,
|
||||
user: user._id,
|
||||
organization: organization?._id,
|
||||
role: MEMBER,
|
||||
status: INVITED
|
||||
}).save();
|
||||
}
|
||||
|
||||
const isUserCompleted = !!user.publicKey;
|
||||
const providerAuthToken = createToken({
|
||||
payload: {
|
||||
userId: user._id.toString(),
|
||||
email: user.email,
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization?.name,
|
||||
authProvider: user.authProvider,
|
||||
isUserCompleted,
|
||||
...(req.body.RelayState ? {
|
||||
callbackPort: req.body.RelayState as string
|
||||
} : {})
|
||||
},
|
||||
expiresIn: await getJwtProviderAuthLifetime(),
|
||||
secret: await getJwtProviderAuthSecret(),
|
||||
});
|
||||
|
||||
req.isUserCompleted = isUserCompleted;
|
||||
req.providerAuthToken = providerAuthToken;
|
||||
|
||||
done(null, profile);
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
export {
|
||||
|
@ -46,7 +46,7 @@ export const BadRequestError = (error?: Partial<RequestErrorContext>) => new Req
|
||||
stack: error?.stack,
|
||||
});
|
||||
|
||||
export const ResourceNotFound = (error?: Partial<RequestErrorContext>) => new RequestError({
|
||||
export const ResourceNotFoundError = (error?: Partial<RequestErrorContext>) => new RequestError({
|
||||
logLevel: error?.logLevel ?? LogLevel.INFO,
|
||||
statusCode: error?.statusCode ?? 404,
|
||||
type: error?.type ?? "resource_not_found",
|
||||
|
1
backend/src/utils/ip/index.ts
Normal file
@ -0,0 +1 @@
|
||||
export * from "./ip";
|
101
backend/src/utils/ip/ip.ts
Normal file
@ -0,0 +1,101 @@
|
||||
import net from "net";
|
||||
import { IPType } from "../../ee/models";
|
||||
import { InternalServerError } from "../errors";
|
||||
|
||||
/**
|
||||
* Return details of IP [ip]:
|
||||
* - If [ip] is a specific IP address then return the IPv4/IPv6 address
|
||||
* - If [ip] is a subnet then return the network IPv4/IPv6 address and prefix
|
||||
* @param {String} ip - ip whose details to return
|
||||
* @returns
|
||||
*/
|
||||
export const extractIPDetails = (ip: string) => {
|
||||
if (net.isIPv4(ip)) return ({
|
||||
ipAddress: ip,
|
||||
type: IPType.IPV4
|
||||
});
|
||||
|
||||
if (net.isIPv6(ip)) return ({
|
||||
ipAddress: ip,
|
||||
type: IPType.IPV6
|
||||
});
|
||||
|
||||
const [ipNet, prefix] = ip.split("/");
|
||||
|
||||
let type;
|
||||
switch (net.isIP(ipNet)) {
|
||||
case 4:
|
||||
type = IPType.IPV4;
|
||||
break;
|
||||
case 6:
|
||||
type = IPType.IPV6;
|
||||
break;
|
||||
default:
|
||||
throw InternalServerError({
|
||||
message: "Failed to extract IP details"
|
||||
});
|
||||
}
|
||||
|
||||
return ({
|
||||
ipAddress: ipNet,
|
||||
type,
|
||||
prefix: parseInt(prefix, 10)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a given string is a valid CIDR block.
|
||||
*
|
||||
* The function checks if the input string is a valid IPv4 or IPv6 address in CIDR notation.
|
||||
*
|
||||
* CIDR notation includes a network address followed by a slash ('/') and a prefix length.
|
||||
* For IPv4, the prefix length must be between 0 and 32. For IPv6, it must be between 0 and 128.
|
||||
* If the input string is not a valid CIDR block, the function returns `false`.
|
||||
*
|
||||
* @param {string} cidr - string in CIDR notation
|
||||
* @returns {boolean} Returns `true` if the string is a valid CIDR block, `false` otherwise.
|
||||
*
|
||||
*/
|
||||
export const isValidCidr = (cidr: string): boolean => {
|
||||
const [ip, prefix] = cidr.split("/");
|
||||
|
||||
const prefixNum = parseInt(prefix, 10);
|
||||
|
||||
// ensure prefix exists and is a number within the appropriate range for each IP version
|
||||
if (!prefix || isNaN(prefixNum) ||
|
||||
(net.isIPv4(ip) && (prefixNum < 0 || prefixNum > 32)) ||
|
||||
(net.isIPv6(ip) && (prefixNum < 0 || prefixNum > 128))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// ensure the IP portion of the CIDR block is a valid IPv4 or IPv6 address
|
||||
if (!net.isIPv4(ip) && !net.isIPv6(ip)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a given string is a valid IPv4/IPv6 address or a valid CIDR block.
|
||||
*
|
||||
* If the string contains a slash ('/'), it treats the input as a CIDR block and checks its validity.
|
||||
* Otherwise, it treats the string as a standalone IP address (either IPv4 or IPv6) and checks its validity.
|
||||
*
|
||||
* @param {string} input - The string to be checked. It could be an IP address or a CIDR block.
|
||||
* @returns {boolean} Returns `true` if the string is a valid IP address (either IPv4 or IPv6) or a valid CIDR block, `false` otherwise.
|
||||
*
|
||||
*/
|
||||
export const isValidIpOrCidr = (ip: string): boolean => {
|
||||
// if the string contains a slash, treat it as a CIDR block
|
||||
if (ip.includes("/")) {
|
||||
return isValidCidr(ip);
|
||||
}
|
||||
|
||||
// otherwise, treat it as a standalone IP address
|
||||
if (net.isIPv4(ip) || net.isIPv6(ip)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
@ -3,13 +3,21 @@ import crypto from "crypto";
|
||||
import { Types } from "mongoose";
|
||||
import { encryptSymmetric128BitHexKeyUTF8 } from "../crypto";
|
||||
import { EESecretService } from "../../ee/services";
|
||||
import { ISecretVersion, SecretSnapshot, SecretVersion } from "../../ee/models";
|
||||
import {
|
||||
IPType,
|
||||
ISecretVersion,
|
||||
SecretSnapshot,
|
||||
SecretVersion,
|
||||
TrustedIP
|
||||
} from "../../ee/models";
|
||||
import {
|
||||
BackupPrivateKey,
|
||||
Bot,
|
||||
BotOrg,
|
||||
ISecret,
|
||||
Integration,
|
||||
IntegrationAuth,
|
||||
Organization,
|
||||
Secret,
|
||||
SecretBlindIndexData,
|
||||
ServiceTokenData,
|
||||
@ -137,6 +145,101 @@ export const backfillBots = async () => {
|
||||
await Bot.insertMany(botsToInsert);
|
||||
};
|
||||
|
||||
/**
|
||||
* Backfill organization bots to ensure that every organization has a bot
|
||||
*/
|
||||
export const backfillBotOrgs = async () => {
|
||||
const encryptionKey = await getEncryptionKey();
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
|
||||
const organizationIdsWithBot = await BotOrg.distinct("organization");
|
||||
const organizationIdsToAddBot = await Organization.distinct("_id", {
|
||||
_id: {
|
||||
$nin: organizationIdsWithBot
|
||||
}
|
||||
});
|
||||
|
||||
if (organizationIdsToAddBot.length === 0) return;
|
||||
|
||||
const botsToInsert = await Promise.all(
|
||||
organizationIdsToAddBot.map(async (organizationToAddBot) => {
|
||||
const { publicKey, privateKey } = generateKeyPair();
|
||||
|
||||
const key = client.createSymmetricKey();
|
||||
|
||||
if (rootEncryptionKey) {
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag
|
||||
} = client.encryptSymmetric(privateKey, rootEncryptionKey);
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag
|
||||
} = client.encryptSymmetric(key, rootEncryptionKey);
|
||||
|
||||
return new BotOrg({
|
||||
name: "Infisical Bot",
|
||||
organization: organizationToAddBot,
|
||||
publicKey,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_BASE64,
|
||||
encryptedPrivateKey,
|
||||
privateKeyIV,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_BASE64
|
||||
});
|
||||
} else if (encryptionKey) {
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag
|
||||
} = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: privateKey,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag
|
||||
} = encryptSymmetric128BitHexKeyUTF8({
|
||||
plaintext: key,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
return new BotOrg({
|
||||
name: "Infisical Bot",
|
||||
organization: organizationToAddBot,
|
||||
publicKey,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_UTF8,
|
||||
encryptedPrivateKey,
|
||||
privateKeyIV,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_UTF8
|
||||
});
|
||||
}
|
||||
|
||||
throw InternalServerError({
|
||||
message: "Failed to backfill organization bots due to missing encryption key"
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
await BotOrg.insertMany(botsToInsert);
|
||||
};
|
||||
|
||||
/**
|
||||
* Backfill secret blind index data to ensure that every workspace
|
||||
* has a secret blind index data
|
||||
@ -452,3 +555,79 @@ export const backfillServiceTokenMultiScope = async () => {
|
||||
|
||||
console.log("Migration: Service token migration v2 complete");
|
||||
};
|
||||
|
||||
/**
|
||||
* Backfill each workspace without any registered trusted IPs to
|
||||
* have default trusted ip of 0.0.0.0/0
|
||||
*/
|
||||
export const backfillTrustedIps = async () => {
|
||||
const workspaceIdsWithTrustedIps = await TrustedIP.distinct("workspace");
|
||||
const workspaceIdsToAddTrustedIp = await Workspace.distinct("_id", {
|
||||
_id: {
|
||||
$nin: workspaceIdsWithTrustedIps
|
||||
}
|
||||
});
|
||||
|
||||
if (workspaceIdsToAddTrustedIp.length > 0) {
|
||||
const operations: {
|
||||
updateOne: {
|
||||
filter: {
|
||||
workspace: Types.ObjectId;
|
||||
ipAddress: string;
|
||||
},
|
||||
update: {
|
||||
workspace: Types.ObjectId;
|
||||
ipAddress: string;
|
||||
type: string;
|
||||
prefix: number;
|
||||
isActive: boolean;
|
||||
comment: string;
|
||||
},
|
||||
upsert: boolean;
|
||||
}
|
||||
}[] = [];
|
||||
|
||||
workspaceIdsToAddTrustedIp.forEach((workspaceId) => {
|
||||
// default IPv4 trusted CIDR
|
||||
operations.push({
|
||||
updateOne: {
|
||||
filter: {
|
||||
workspace: workspaceId,
|
||||
ipAddress: "0.0.0.0"
|
||||
},
|
||||
update: {
|
||||
workspace: workspaceId,
|
||||
ipAddress: "0.0.0.0",
|
||||
type: IPType.IPV4.toString(),
|
||||
prefix: 0,
|
||||
isActive: true,
|
||||
comment: ""
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
});
|
||||
|
||||
// default IPv6 trusted CIDR
|
||||
operations.push({
|
||||
updateOne: {
|
||||
filter: {
|
||||
workspace: workspaceId,
|
||||
ipAddress: "::"
|
||||
},
|
||||
update: {
|
||||
workspace: workspaceId,
|
||||
ipAddress: "::",
|
||||
type: IPType.IPV6.toString(),
|
||||
prefix: 0,
|
||||
isActive: true,
|
||||
comment: ""
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
await TrustedIP.bulkWrite(operations);
|
||||
console.log("Backfill: Trusted IPs complete");
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ import { createTestUserForDevelopment } from "../addDevelopmentUser";
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
import { validateEncryptionKeysConfig } from "./validateConfig";
|
||||
import {
|
||||
backfillBotOrgs,
|
||||
backfillBots,
|
||||
backfillEncryptionMetadata,
|
||||
backfillIntegration,
|
||||
@ -14,9 +15,14 @@ import {
|
||||
backfillSecretFolders,
|
||||
backfillSecretVersions,
|
||||
backfillServiceToken,
|
||||
backfillServiceTokenMultiScope
|
||||
backfillServiceTokenMultiScope,
|
||||
backfillTrustedIps
|
||||
} from "./backfillData";
|
||||
import { reencryptBotPrivateKeys, reencryptSecretBlindIndexDataSalts } from "./reencryptData";
|
||||
import {
|
||||
reencryptBotOrgKeys,
|
||||
reencryptBotPrivateKeys,
|
||||
reencryptSecretBlindIndexDataSalts
|
||||
} from "./reencryptData";
|
||||
import {
|
||||
getClientIdGoogle,
|
||||
getClientSecretGoogle,
|
||||
@ -72,16 +78,19 @@ export const setup = async () => {
|
||||
// backfilling data to catch up with new collections and updated fields
|
||||
await backfillSecretVersions();
|
||||
await backfillBots();
|
||||
await backfillBotOrgs();
|
||||
await backfillSecretBlindIndexData();
|
||||
await backfillEncryptionMetadata();
|
||||
await backfillSecretFolders();
|
||||
await backfillServiceToken();
|
||||
await backfillIntegration();
|
||||
await backfillServiceTokenMultiScope();
|
||||
await backfillTrustedIps();
|
||||
|
||||
// re-encrypt any data previously encrypted under server hex 128-bit ENCRYPTION_KEY
|
||||
// to base64 256-bit ROOT_ENCRYPTION_KEY
|
||||
await reencryptBotPrivateKeys();
|
||||
await reencryptBotOrgKeys();
|
||||
await reencryptSecretBlindIndexDataSalts();
|
||||
|
||||
// initializing Sentry
|
||||
|
@ -1,6 +1,8 @@
|
||||
import {
|
||||
Bot,
|
||||
BotOrg,
|
||||
IBot,
|
||||
IBotOrg,
|
||||
ISecretBlindIndexData,
|
||||
SecretBlindIndexData,
|
||||
} from "../../models";
|
||||
@ -17,7 +19,7 @@ import {
|
||||
} from "../../variables";
|
||||
|
||||
/**
|
||||
* Re-encrypt bot private keys from hex 128-bit ENCRYPTION_KEY
|
||||
* Re-encrypt bot private keys from under hex 128-bit ENCRYPTION_KEY
|
||||
* to base64 256-bit ROOT_ENCRYPTION_KEY
|
||||
*/
|
||||
export const reencryptBotPrivateKeys = async () => {
|
||||
@ -70,6 +72,79 @@ export const reencryptBotPrivateKeys = async () => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-encrypt organization bot keys (symmetric and private) from under hex 128-bit ENCRYPTION_KEY
|
||||
* to base64 256-bit ROOT_ENCRYPTION_KEY
|
||||
*/
|
||||
export const reencryptBotOrgKeys = async () => {
|
||||
const encryptionKey = await getEncryptionKey();
|
||||
const rootEncryptionKey = await getRootEncryptionKey();
|
||||
|
||||
if (encryptionKey && rootEncryptionKey) {
|
||||
// 1: re-encrypt organization bot keys under ROOT_ENCRYPTION_KEY
|
||||
const botOrgs = await BotOrg.find({
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_UTF8,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_UTF8
|
||||
}).select("+encryptedPrivateKey iv tag algorithm keyEncoding");
|
||||
|
||||
if (botOrgs.length === 0) return;
|
||||
|
||||
const operationsBotOrg = await Promise.all(
|
||||
botOrgs.map(async (botOrg: IBotOrg) => {
|
||||
const privateKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: botOrg.encryptedPrivateKey,
|
||||
iv: botOrg.privateKeyIV,
|
||||
tag: botOrg.privateKeyTag,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
} = client.encryptSymmetric(privateKey, rootEncryptionKey);
|
||||
|
||||
const symmetricKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: botOrg.encryptedSymmetricKey,
|
||||
iv: botOrg.symmetricKeyIV,
|
||||
tag: botOrg.symmetricKeyTag,
|
||||
key: encryptionKey
|
||||
});
|
||||
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
} = client.encryptSymmetric(symmetricKey, rootEncryptionKey);
|
||||
|
||||
return ({
|
||||
updateOne: {
|
||||
filter: {
|
||||
_id: botOrg._id,
|
||||
},
|
||||
update: {
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
symmetricKeyKeyEncoding: ENCODING_SCHEME_BASE64,
|
||||
encryptedPrivateKey,
|
||||
privateKeyIV,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm: ALGORITHM_AES_256_GCM,
|
||||
privateKeyKeyEncoding: ENCODING_SCHEME_BASE64,
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await BotOrg.bulkWrite(operationsBotOrg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-encrypt secret blind index data salts from hex 128-bit ENCRYPTION_KEY
|
||||
* to base64 256-bit ROOT_ENCRYPTION_KEY
|
||||
|
@ -1,14 +1,15 @@
|
||||
import net from "net";
|
||||
import { Types } from "mongoose";
|
||||
import {
|
||||
IServiceAccount,
|
||||
IServiceTokenData,
|
||||
IUser,
|
||||
SecretBlindIndexData,
|
||||
ServiceAccount,
|
||||
ServiceTokenData,
|
||||
User,
|
||||
Workspace,
|
||||
} from "../models";
|
||||
import {
|
||||
TrustedIP
|
||||
} from "../ee/models";
|
||||
import { validateServiceAccountClientForWorkspace } from "./serviceAccount";
|
||||
import { validateUserClientForWorkspace } from "./user";
|
||||
import { validateServiceTokenDataClientForWorkspace } from "./serviceTokenData";
|
||||
@ -24,6 +25,8 @@ import {
|
||||
AUTH_MODE_SERVICE_TOKEN,
|
||||
} from "../variables";
|
||||
import { BotService } from "../services";
|
||||
import { AuthData } from "../interfaces/middleware";
|
||||
import { extractIPDetails } from "../utils/ip";
|
||||
|
||||
/**
|
||||
* Validate authenticated clients for workspace with id [workspaceId] based
|
||||
@ -43,17 +46,16 @@ export const validateClientForWorkspace = async ({
|
||||
requiredPermissions,
|
||||
requireBlindIndicesEnabled,
|
||||
requireE2EEOff,
|
||||
checkIPAllowlist
|
||||
}: {
|
||||
authData: {
|
||||
authMode: string;
|
||||
authPayload: IUser | IServiceAccount | IServiceTokenData;
|
||||
};
|
||||
authData: AuthData;
|
||||
workspaceId: Types.ObjectId;
|
||||
environment?: string;
|
||||
acceptedRoles: Array<"admin" | "member">;
|
||||
requiredPermissions?: string[];
|
||||
requireBlindIndicesEnabled: boolean;
|
||||
requireE2EEOff: boolean;
|
||||
checkIPAllowlist: boolean;
|
||||
}) => {
|
||||
const workspace = await Workspace.findById(workspaceId);
|
||||
|
||||
@ -82,6 +84,8 @@ export const validateClientForWorkspace = async ({
|
||||
message: "Failed workspace authorization due to end-to-end encryption not being disabled",
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (authData.authMode === AUTH_MODE_JWT && authData.authPayload instanceof User) {
|
||||
const membership = await validateUserClientForWorkspace({
|
||||
@ -107,6 +111,40 @@ export const validateClientForWorkspace = async ({
|
||||
}
|
||||
|
||||
if (authData.authMode === AUTH_MODE_SERVICE_TOKEN && authData.authPayload instanceof ServiceTokenData) {
|
||||
if (checkIPAllowlist) {
|
||||
const trustedIps = await TrustedIP.find({
|
||||
workspace: workspaceId
|
||||
});
|
||||
|
||||
if (trustedIps.length > 0) {
|
||||
// case: check the IP address of the inbound request against trusted IPs
|
||||
|
||||
const blockList = new net.BlockList();
|
||||
|
||||
for (const trustedIp of trustedIps) {
|
||||
if (trustedIp.prefix !== undefined) {
|
||||
blockList.addSubnet(
|
||||
trustedIp.ipAddress,
|
||||
trustedIp.prefix,
|
||||
trustedIp.type
|
||||
);
|
||||
} else {
|
||||
blockList.addAddress(
|
||||
trustedIp.ipAddress,
|
||||
trustedIp.type
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const { type } = extractIPDetails(authData.authIP);
|
||||
const check = blockList.check(authData.authIP, type);
|
||||
|
||||
if (!check) throw UnauthorizedRequestError({
|
||||
message: "Failed workspace authorization"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await validateServiceTokenDataClientForWorkspace({
|
||||
serviceTokenData: authData.authPayload,
|
||||
workspaceId,
|
||||
|
@ -1,5 +1,6 @@
|
||||
import {
|
||||
getClientIdAzure,
|
||||
getClientIdBitBucket,
|
||||
getClientIdGitHub,
|
||||
getClientIdGitLab,
|
||||
getClientIdHeroku,
|
||||
@ -24,10 +25,15 @@ export const INTEGRATION_CIRCLECI = "circleci";
|
||||
export const INTEGRATION_TRAVISCI = "travisci";
|
||||
export const INTEGRATION_SUPABASE = "supabase";
|
||||
export const INTEGRATION_CHECKLY = "checkly";
|
||||
export const INTEGRATION_TERRAFORM_CLOUD = "terraform-cloud";
|
||||
export const INTEGRATION_HASHICORP_VAULT = "hashicorp-vault";
|
||||
export const INTEGRATION_CLOUDFLARE_PAGES = "cloudflare-pages";
|
||||
export const INTEGRATION_BITBUCKET = "bitbucket";
|
||||
export const INTEGRATION_CODEFRESH = "codefresh";
|
||||
export const INTEGRATION_WINDMILL = "windmill";
|
||||
export const INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM = "digital-ocean-app-platform";
|
||||
export const INTEGRATION_CLOUD_66 = "cloud-66";
|
||||
export const INTEGRATION_NORTHFLANK = "northflank";
|
||||
export const INTEGRATION_SET = new Set([
|
||||
INTEGRATION_AZURE_KEY_VAULT,
|
||||
INTEGRATION_HEROKU,
|
||||
@ -42,10 +48,15 @@ export const INTEGRATION_SET = new Set([
|
||||
INTEGRATION_TRAVISCI,
|
||||
INTEGRATION_SUPABASE,
|
||||
INTEGRATION_CHECKLY,
|
||||
INTEGRATION_TERRAFORM_CLOUD,
|
||||
INTEGRATION_HASHICORP_VAULT,
|
||||
INTEGRATION_CLOUDFLARE_PAGES,
|
||||
INTEGRATION_CODEFRESH,
|
||||
INTEGRATION_WINDMILL
|
||||
INTEGRATION_WINDMILL,
|
||||
INTEGRATION_BITBUCKET,
|
||||
INTEGRATION_DIGITAL_OCEAN_APP_PLATFORM,
|
||||
INTEGRATION_CLOUD_66,
|
||||
INTEGRATION_NORTHFLANK
|
||||
]);
|
||||
|
||||
// integration types
|
||||
@ -60,6 +71,7 @@ export const INTEGRATION_NETLIFY_TOKEN_URL = "https://api.netlify.com/oauth/toke
|
||||
export const INTEGRATION_GITHUB_TOKEN_URL =
|
||||
"https://github.com/login/oauth/access_token";
|
||||
export const INTEGRATION_GITLAB_TOKEN_URL = "https://gitlab.com/oauth/token";
|
||||
export const INTEGRATION_BITBUCKET_TOKEN_URL = "https://bitbucket.org/site/oauth2/access_token"
|
||||
|
||||
// integration apps endpoints
|
||||
export const INTEGRATION_HEROKU_API_URL = "https://api.heroku.com";
|
||||
@ -74,9 +86,14 @@ export const INTEGRATION_TRAVISCI_API_URL = "https://api.travis-ci.com";
|
||||
export const INTEGRATION_SUPABASE_API_URL = "https://api.supabase.com";
|
||||
export const INTEGRATION_LARAVELFORGE_API_URL = "https://forge.laravel.com";
|
||||
export const INTEGRATION_CHECKLY_API_URL = "https://api.checklyhq.com";
|
||||
export const INTEGRATION_TERRAFORM_CLOUD_API_URL = "https://app.terraform.io";
|
||||
export const INTEGRATION_CLOUDFLARE_PAGES_API_URL = "https://api.cloudflare.com";
|
||||
export const INTEGRATION_BITBUCKET_API_URL = "https://api.bitbucket.org";
|
||||
export const INTEGRATION_CODEFRESH_API_URL = "https://g.codefresh.io/api";
|
||||
export const INTEGRATION_WINDMILL_API_URL = "https://app.windmill.dev/api";
|
||||
export const INTEGRATION_DIGITAL_OCEAN_API_URL = "https://api.digitalocean.com";
|
||||
export const INTEGRATION_CLOUD_66_API_URL = "https://app.cloud66.com/api";
|
||||
export const INTEGRATION_NORTHFLANK_API_URL = "https://api.northflank.com";
|
||||
|
||||
export const getIntegrationOptions = async () => {
|
||||
const INTEGRATION_OPTIONS = [
|
||||
@ -198,6 +215,15 @@ export const getIntegrationOptions = async () => {
|
||||
clientId: await getClientIdGitLab(),
|
||||
docsLink: "",
|
||||
},
|
||||
{
|
||||
name: "Terraform Cloud",
|
||||
slug: "terraform-cloud",
|
||||
image: "Terraform Cloud.png",
|
||||
isAvailable: true,
|
||||
type: "pat",
|
||||
cliendId: "",
|
||||
docsLink: "",
|
||||
},
|
||||
{
|
||||
name: "Travis CI",
|
||||
slug: "travisci",
|
||||
@ -252,6 +278,15 @@ export const getIntegrationOptions = async () => {
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
},
|
||||
{
|
||||
name: "BitBucket",
|
||||
slug: "bitbucket",
|
||||
image: "BitBucket.png",
|
||||
isAvailable: true,
|
||||
type: "oauth",
|
||||
clientId: await getClientIdBitBucket(),
|
||||
docsLink: ""
|
||||
},
|
||||
{
|
||||
name: "Codefresh",
|
||||
slug: "codefresh",
|
||||
@ -268,9 +303,35 @@ export const getIntegrationOptions = async () => {
|
||||
isAvailable: true,
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: "",
|
||||
},
|
||||
{
|
||||
name: "Digital Ocean App Platform",
|
||||
slug: "digital-ocean-app-platform",
|
||||
image: "Digital Ocean.png",
|
||||
isAvailable: true,
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: "",
|
||||
},
|
||||
{
|
||||
name: "Cloud 66",
|
||||
slug: "cloud-66",
|
||||
image: "Cloud 66.png",
|
||||
isAvailable: true,
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: "",
|
||||
},
|
||||
{
|
||||
name: "Northflank",
|
||||
slug: "northflank",
|
||||
image: "Northflank.png",
|
||||
isAvailable: true,
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
},
|
||||
|
||||
]
|
||||
|
||||
return INTEGRATION_OPTIONS;
|
||||
|
@ -143,13 +143,13 @@ var runCmd = &cobra.Command{
|
||||
|
||||
err = executeMultipleCommandWithEnvs(command, len(secretsByKey), env)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to execute your chained command")
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
} else {
|
||||
err = executeSingleCommandWithEnvs(args, len(secretsByKey), env)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to execute your single command")
|
||||
fmt.Println(err)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -7,8 +7,7 @@ in plaintext. Effectively, this means each such secret operation only requires 1
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Retrieve secrets">
|
||||
Retrieve all secrets for an Infisical project and environment.
|
||||
|
||||
Retrieve all secrets for an Infisical project and environment.
|
||||
<Tabs>
|
||||
<Tab title="cURL">
|
||||
```bash
|
||||
@ -18,7 +17,12 @@ in plaintext. Effectively, this means each such secret operation only requires 1
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
####
|
||||
<Info>
|
||||
When using a [service token](../../../documentation/platform/token) with access to a single environment and path, you don't need to provide request parameters because the server will automatically scope the request to the defined environment/secrets path of the service token used.
|
||||
For all other cases, request parameters are required.
|
||||
</Info>
|
||||
####
|
||||
<ParamField query="workspaceId" type="string" required>
|
||||
The ID of the workspace
|
||||
</ParamField>
|
||||
|
@ -6,19 +6,26 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
|
||||
## July 2023
|
||||
|
||||
- Released [secret referencing and importing](https://infisical.com/docs/documentation/platform/secret-reference) across folders and environments.
|
||||
- Added the [intergation with Laravel Forge](https://infisical.com/docs/integrations/cloud/laravel-forge).
|
||||
- Released [secret referencing and importing](https://infisical.com/docs/documentation/platform/secret-reference) across folders and environments.
|
||||
- Redesigned the project/organization experience.
|
||||
- Added native [Laravel Forge integration](https://infisical.com/docs/integrations/cloud/laravel-forge).
|
||||
- Added native [Codefresh integration](https://infisical.com/docs/integrations/cicd/codefresh)
|
||||
- Added native [Bitbucket integration](https://infisical.com/docs/integrations/cicd/bitbucket)
|
||||
- Added native [DigitalOcean App Platform integration](https://infisical.com/docs/integrations/cloud/digital-ocean-app-platform)
|
||||
- Added native [Cloud66 integration](https://infisical.com/docs/integrations/cloud/cloud-66)
|
||||
- Added support for Google SSO.
|
||||
- Added support for [Okta SAML 2.0 authentication](https://infisical.com/docs/documentation/platform/saml)
|
||||
- Released [folders / path-based secret storage](https://infisical.com/docs/documentation/platform/folder)
|
||||
- Released [webhooks](https://infisical.com/docs/documentation/platform/webhooks)
|
||||
|
||||
## June 2023
|
||||
|
||||
- Released the [Terraform Provider](https://infisical.com/docs/integrations/frameworks/terraform#5-run-terraform).
|
||||
- Updated the usage and billing page. Added the free trial for the professional tier.
|
||||
- Added the intergation with [Checkly](https://infisical.com/docs/integrations/cloud/checkly), [Hashicorp Vault](https://infisical.com/docs/integrations/cloud/hashicorp-vault), and [Cloudflare Pages](https://infisical.com/docs/integrations/cloud/cloudflare-pages).
|
||||
- Comleted a penetration test with a `very good` result.
|
||||
- Completed a penetration test with a `very good` result.
|
||||
- Added support for multi-line secrets.
|
||||
|
||||
|
||||
## May 2023
|
||||
|
||||
- Released secret scanning capability for the CLI.
|
||||
@ -26,7 +33,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
- Completed penetration test.
|
||||
- Released new landing page.
|
||||
- Started SOC 2 (Type II) compliance certification preparation.
|
||||
- Released new deployment options for Fly.io, Digital Ocean and Render.
|
||||
- Released new deployment options for Fly.io, Digital Ocean and Render.
|
||||
|
||||
## April 2023
|
||||
|
||||
|
@ -3,7 +3,8 @@ title: 'Install'
|
||||
description: "Infisical's CLI is one of the best way to manage environments and secrets. Install it here"
|
||||
---
|
||||
|
||||
The Infisical CLI can be used to access secrets across various environments, whether it's local development, CI/CD, staging, or production.
|
||||
The Infisical CLI is powerful command line tool that can be used to retrieve, modify, export and inject secrets into any process or application as environment variables.
|
||||
You can use it across various environments, whether it's local development, CI/CD, staging, or production.
|
||||
|
||||
## Installation
|
||||
|
||||
@ -57,7 +58,10 @@ The Infisical CLI can be used to access secrets across various environments, whe
|
||||
```bash
|
||||
apk update && sudo apk add infisical
|
||||
```
|
||||
|
||||
###
|
||||
<Tip>
|
||||
If you are installing the CLI in production environments, we highly recommend to set the version of the CLI to a specific version. This will help keep your CLI version consistent across reinstalls. [View versions](https://cloudsmith.io/~infisical/repos/infisical-cli/packages/)
|
||||
</Tip>
|
||||
</Tab>
|
||||
<Tab title="RedHat/CentOs/Amazon">
|
||||
Add Infisical repository
|
||||
@ -71,7 +75,10 @@ The Infisical CLI can be used to access secrets across various environments, whe
|
||||
```bash
|
||||
sudo yum install infisical
|
||||
```
|
||||
|
||||
###
|
||||
<Tip>
|
||||
If you are installing the CLI in production environments, we highly recommend to set the version of the CLI to a specific version. This will help keep your CLI version consistent across reinstalls. [View versions](https://cloudsmith.io/~infisical/repos/infisical-cli/packages/)
|
||||
</Tip>
|
||||
</Tab>
|
||||
<Tab title="Debian/Ubuntu">
|
||||
Add Infisical repository
|
||||
@ -86,7 +93,10 @@ The Infisical CLI can be used to access secrets across various environments, whe
|
||||
```bash
|
||||
sudo apt-get update && sudo apt-get install -y infisical
|
||||
```
|
||||
|
||||
###
|
||||
<Tip>
|
||||
If you are installing the CLI in production environments, we highly recommend to set the version of the CLI to a specific version. This will help keep your CLI version consistent across reinstalls. [View versions](https://cloudsmith.io/~infisical/repos/infisical-cli/packages/)
|
||||
</Tip>
|
||||
</Tab>
|
||||
<Tab title="Arch Linux">
|
||||
Use the `yay` package manager to install from the [Arch User Repository](https://aur.archlinux.org/packages/infisical-bin)
|
||||
@ -95,6 +105,9 @@ The Infisical CLI can be used to access secrets across various environments, whe
|
||||
yay -S infisical-bin
|
||||
```
|
||||
|
||||
###
|
||||
<Tip>
|
||||
If you are installing the CLI in production environments, we highly recommend to set the version of the CLI to a specific version. This will help keep your CLI version consistent across reinstalls. [View versions](https://cloudsmith.io/~infisical/repos/infisical-cli/packages/)
|
||||
</Tip>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
|
@ -8,7 +8,7 @@ The distinguishing factor, however, is the authentication method used.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Local development">
|
||||
To use the Infisical CLI in your development environment, simply run the following command and follow the interactive guide.
|
||||
To use the Infisical CLI in your development environment, simply run the command below and follow the interactive guide.
|
||||
|
||||
```bash
|
||||
infisical login
|
||||
|
@ -82,4 +82,28 @@ Password: `testInfisical1`
|
||||
```bash
|
||||
# To stop environment use Control+C (on Mac) CTRL+C (on Win) or
|
||||
docker-compose -f docker-compose.dev.yml down
|
||||
```
|
||||
|
||||
## Starting Infisical docs locally
|
||||
|
||||
We use [Mintlify](https://mintlify.com/) for our docs.
|
||||
|
||||
#### Install Mintlify CLI.
|
||||
|
||||
```bash
|
||||
npm i -g mintlify
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```bash
|
||||
yarn global add mintlify
|
||||
```
|
||||
|
||||
#### Running the docs
|
||||
Go to `docs` directory and run `mintlify dev`. This will start up the docs on `localhost:3000`
|
||||
|
||||
```bash
|
||||
# From the root directory
|
||||
cd docs; mintlify dev;
|
||||
```
|
@ -17,7 +17,7 @@ Start syncing environment variables with [Infisical Cloud](https://app.infisical
|
||||
Store secrets like API keys, database credentials, environment variables with Infisical
|
||||
</Card>
|
||||
|
||||
## Integrate with Infisical
|
||||
## Access secrets
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card href="../../cli/overview" title="Command Line Interface (CLI)" icon="square-terminal" color="#3775a9">
|
||||
@ -31,11 +31,11 @@ Start syncing environment variables with [Infisical Cloud](https://app.infisical
|
||||
>
|
||||
Fetch secrets with any programming language on demand
|
||||
</Card>
|
||||
<Card href="/documentation/getting-started/docker" title="Docker" icon="docker" color="#0078d3">
|
||||
<Card href="../../integrations/platforms/docker-intro" title="Docker" icon="docker" color="#0078d3">
|
||||
Inject secrets into Docker containers
|
||||
</Card>
|
||||
<Card
|
||||
href="/documentation/getting-started/kubernetes"
|
||||
href="../../integrations/platforms/kubernetes"
|
||||
title="Kubernetes"
|
||||
icon="server"
|
||||
color="#3775a9"
|
||||
|
24
docs/documentation/platform/ip-allowlisting.mdx
Normal file
@ -0,0 +1,24 @@
|
||||
---
|
||||
title: "IP Allowlisting"
|
||||
description: "Restrict access to your secrets in Infisical using trusted IPs"
|
||||
---
|
||||
|
||||
Projects in Infisical can be configured to restrict client access to specific IP addresses or CIDR ranges. This applies to any client using service tokens and
|
||||
can be useful, for example, for limiting access to traffic coming from corporate networks.
|
||||
|
||||
By default, each project is initialized with the `0.0.0.0/0` entry, representing all possible IPv4 addresses.
|
||||
For enhanced security, we strongly recommend replacing the default entry with your client IPs to tighten access to your secrets.
|
||||
|
||||
<Note>
|
||||
You must be a project `admin` to manage your project's IP whitelist.
|
||||
</Note>
|
||||
|
||||

|
||||
|
||||
## Creating a trusted IP entry
|
||||
|
||||
To create a trusted IP entry, head over to the **IP Whitelist** tab in your project. When creating an entry,
|
||||
you can specify either a specific IP address like `192.0.2.1` or a CIDR range like `2001:db8::/32`; both IPv4 and IPv6
|
||||
formats are accepted.
|
||||
|
||||

|
100
docs/documentation/platform/saml.mdx
Normal file
@ -0,0 +1,100 @@
|
||||
---
|
||||
title: "SSO"
|
||||
description: "Log in to Infisical via SSO protocols"
|
||||
---
|
||||
|
||||
<Warning>
|
||||
Infisical currently only supports SAML SSO authentication with [Okta as the
|
||||
identity provider (IDP)](https://www.okta.com/). We're expanding support for
|
||||
other IDPs in the coming months, so stay tuned with this issue
|
||||
[here](https://github.com/Infisical/infisical/issues/442).
|
||||
</Warning>
|
||||
|
||||
You can configure your organization in Infisical to have members authenticate with the platform via protocols like [SAML 2.0](https://en.wikipedia.org/wiki/SAML_2.0).
|
||||
|
||||
To note, configuring SSO retains the end-to-end encrypted architecture of Infisical because we decouple the **authentication** and **decryption** steps. In all login with SSO implementations,
|
||||
your IDP cannot and will not have access to the decryption key needed to decrypt your secrets.
|
||||
|
||||
## Configuration
|
||||
|
||||
Head over to your organization Settings > Authentication > SAML SSO Configuration.
|
||||
|
||||
Next, press "Set up SAML SSO" in the SAML SSO and follow the instructions
|
||||
below to configure SSO for your identity provider:
|
||||
|
||||
<Note>
|
||||
Note that only members with the `owner` or `admin` roles in an organization
|
||||
can configure SSO for it.
|
||||
</Note>
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Okta SAML 2.0">
|
||||
1. In the Okta Admin Portal, select Applications > Applications from the
|
||||
navigation. On the Applications screen, select the Create App Integration
|
||||
button.
|
||||
|
||||

|
||||
|
||||
2. In the Create a New Application Integration dialog, select the SAML 2.0 radio button:
|
||||
|
||||

|
||||
|
||||
3. On the General Settings screen, give the application a unique, Infisical-specific name and select Next.
|
||||
|
||||
4. On the Configure SAML screen, configure the following fields:
|
||||
|
||||
- Single sign on URL: `https://app.infisical.com/api/v1/sso/saml2/:identifier`; we'll update the `:identifier` part later in step 6.
|
||||
- Audience URI (SP Entity ID): `https://app.infisical.com`
|
||||
|
||||

|
||||
|
||||
<Note>
|
||||
If you're self-hosting Infisical, then you will want to replace `https://app.infisical.com` with your own domain.
|
||||
</Note>
|
||||
|
||||
4. Also on the Configure SAML screen, configure the Attribute Statements to map:
|
||||
|
||||
- `id -> user.id`,
|
||||
- `email -> user.email`,
|
||||
- `firstName -> user.firstName`
|
||||
- `lastName -> user.lastName`
|
||||
|
||||

|
||||
|
||||
Once configured, select the Next button to proceed to the Feedback screen and select Finish.
|
||||
|
||||
5. Get IDP values
|
||||
|
||||
Once your application is created, select the Sign On tab for the app and select the View Setup Instructions button located on the right side of the screen:
|
||||
|
||||
Copy the Identity Provider Single Sign-On URL, the Identity Provider Issuer, and the X.509 Certificate to be pasted into your Infisical SAML SSO configuration details with the following map:
|
||||
|
||||
- `Audience -> Okta Audience URI (SP Entity ID)`
|
||||
- `Entrypoint -> Okta Identity Provider Single Sign-On URL`
|
||||
- `Issuer -> Identity Provider Issuer`
|
||||
- `Certificate -> X.509 Certificate`.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
6. Create the SSO configuration and copy your SSO identifier in Infisical; update `:identifier` from step 4 earlier to be this value.
|
||||
|
||||

|
||||
|
||||
7. Assignments
|
||||
|
||||
Finally, Navigate to the Assignments tab and select the Assign button:
|
||||
|
||||
You can assign access to the application on a user-by-user basis using the Assign to People option, or in-bulk using the Assign to Groups option.
|
||||
|
||||

|
||||
|
||||
At this point, you have configured everything you need within the context of the Okta Admin Portal.
|
||||
|
||||
8. Return to Infisical and enable SAML SSO.
|
||||
|
||||
Enabling SAML SSO enforces all members in your organization to only be able to log into Infisical via Okta.
|
||||
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
@ -45,8 +45,8 @@ To add an import, simply click on the `Add import` button and provide the enviro
|
||||

|
||||
|
||||
The hierarchy of importing secrets is governed by a "last-one-wins" rule. This means the sequence in which you import matters - the final folder imported will override secrets from any prior folders.
|
||||
Moreover, any secrets you define directly in your environment will take precedence over secrets from any imported folders.
|
||||
Additionally, any secrets you define directly in your environment will override any secrets that are imported with the same name.
|
||||
|
||||
You can modify this sequence by dragging and rearranging the folders using the `Change Order` drag handle.
|
||||
You can modify the order of folders to control overrides using the `Change Order` drag handle.
|
||||
|
||||

|
||||
|
BIN
docs/images/integrations-bitbucket-auth.png
Normal file
After Width: | Height: | Size: 245 KiB |
BIN
docs/images/integrations-bitbucket-create.png
Normal file
After Width: | Height: | Size: 472 KiB |
BIN
docs/images/integrations-bitbucket.png
Normal file
After Width: | Height: | Size: 664 KiB |
BIN
docs/images/integrations-cloud-66-access-token.png
Normal file
After Width: | Height: | Size: 890 KiB |
BIN
docs/images/integrations-cloud-66-copy-pat.png
Normal file
After Width: | Height: | Size: 814 KiB |
BIN
docs/images/integrations-cloud-66-create.png
Normal file
After Width: | Height: | Size: 1.1 MiB |
BIN
docs/images/integrations-cloud-66-dashboard.png
Normal file
After Width: | Height: | Size: 742 KiB |