Compare commits

...

23 Commits

Author SHA1 Message Date
57a6d1fff6 fix syntax error in helm chart 2023-08-19 14:47:46 -04:00
554f0c79a4 update redis doc 2023-08-19 14:31:28 -04:00
2af88d4c99 Merge pull request #843 from Infisical/add-bull-queue
add bull queue
2023-08-19 14:13:34 -04:00
fc8b567352 fix syntax error in /api/status 2023-08-19 14:03:02 -04:00
ec234e198a Merge branch 'main' into add-bull-queue 2023-08-19 13:46:26 -04:00
6e1cc12e3a update redis banner text 2023-08-19 13:43:01 -04:00
1b4b7a967b fix docs typos 2023-08-19 13:42:33 -04:00
45a13d06b5 add redis why docs & update redis notice 2023-08-18 21:20:20 -04:00
4a48c088df Merge pull request #868 from daninge98/custom-environment-sorting
Adds user customizable environment ordering
2023-08-18 17:05:37 -07:00
2b65f65063 Rename things and fix bug in error checking 2023-08-18 17:33:59 -04:00
065e150847 update status api 2023-08-18 09:42:33 -04:00
ab72eb1178 added scrollbar to modal 2023-08-17 14:03:56 -07:00
c0ce92cf3d Formattting fix 2023-08-16 17:42:39 -04:00
0073fe459e Fix typo 2023-08-16 17:37:41 -04:00
a7f52a9298 Small formatting fixes 2023-08-16 17:36:07 -04:00
29c0d8ab57 Enable users to change the ordering of environments 2023-08-16 17:30:50 -04:00
90517258a2 added redis note 2023-08-14 18:30:40 -07:00
d78b37c632 add redis docs 2023-08-14 16:25:16 -04:00
4a6fc9e84f remove console.log and add redis to /status api 2023-08-14 16:24:43 -04:00
8030104c02 update helm read me with redis config details 2023-08-14 15:02:22 -04:00
01e613301a console.log queue errors 2023-08-11 19:47:15 -04:00
b11cd29943 close all queues 2023-08-10 19:13:09 -04:00
dfe95ac773 add bull queue 2023-08-10 17:22:20 -04:00
44 changed files with 5233 additions and 402 deletions

View File

@ -25,6 +25,9 @@ JWT_PROVIDER_AUTH_LIFETIME=
# Required
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
# Redis
REDIS_URL=redis://redis:6379
# Optional credentials for MongoDB container instance and Mongo-Express
MONGO_USERNAME=root
MONGO_PASSWORD=example

View File

@ -1,3 +0,0 @@
{
"workbench.editor.wrapTabs": true
}

4214
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -84,6 +84,7 @@
"@posthog/plugin-scaffold": "^1.3.4",
"@types/bcrypt": "^5.0.0",
"@types/bcryptjs": "^2.4.2",
"@types/bull": "^4.10.0",
"@types/cookie-parser": "^1.4.3",
"@types/cors": "^2.8.12",
"@types/express": "^4.17.14",

View File

@ -68,6 +68,8 @@ export const getSecretScanningWebhookSecret = async () => (await client.getSecre
export const getSecretScanningGitAppId = async () => (await client.getSecret("SECRET_SCANNING_GIT_APP_ID")).secretValue;
export const getSecretScanningPrivateKey = async () => (await client.getSecret("SECRET_SCANNING_PRIVATE_KEY")).secretValue;
export const getRedisUrl = async () => (await client.getSecret("REDIS_URL")).secretValue;
export const getLicenseKey = async () => {
const secretValue = (await client.getSecret("LICENSE_KEY")).secretValue;
return secretValue === "" ? undefined : secretValue;

View File

@ -85,6 +85,43 @@ export const createWorkspaceEnvironment = async (
});
};
/**
* Swaps the ordering of two environments in the database. This is purely for aesthetic purposes.
* @param req
* @param res
* @returns
*/
export const reorderWorkspaceEnvironments = async (
req: Request,
res: Response
) => {
const { workspaceId } = req.params;
const { environmentSlug, environmentName, otherEnvironmentSlug, otherEnvironmentName } = req.body;
// atomic update the env to avoid conflict
const workspace = await Workspace.findById(workspaceId).exec();
if (!workspace) {
throw BadRequestError({message: "Couldn't load workspace"});
}
const environmentIndex = workspace.environments.findIndex((env) => env.name === environmentName && env.slug === environmentSlug)
const otherEnvironmentIndex = workspace.environments.findIndex((env) => env.name === otherEnvironmentName && env.slug === otherEnvironmentSlug)
if (environmentIndex === -1 || otherEnvironmentIndex === -1) {
throw BadRequestError({message: "environment or otherEnvironment couldn't be found"})
}
// swap the order of the environments
[workspace.environments[environmentIndex], workspace.environments[otherEnvironmentIndex]] = [workspace.environments[otherEnvironmentIndex], workspace.environments[environmentIndex]]
await workspace.save()
return res.status(200).send({
message: "Successfully reordered environments",
workspace: workspaceId,
});
};
/**
* Rename workspace environment with new name and slug of a workspace with [workspaceId]
* Old slug [oldEnvironmentSlug] must be provided
@ -124,7 +161,7 @@ export const renameWorkspaceEnvironment = async (
if (envIndex === -1) {
throw new Error("Invalid environment given");
}
const oldEnvironment = workspace.environments[envIndex];
workspace.environments[envIndex].name = environmentName;
@ -159,7 +196,7 @@ export const renameWorkspaceEnvironment = async (
{ $set: { "deniedPermissions.$[element].environmentSlug": environmentSlug } },
{ arrayFilters: [{ "element.environmentSlug": oldEnvironmentSlug }] }
);
await EEAuditLogService.createAuditLog(
req.authData,
{
@ -210,7 +247,7 @@ export const deleteWorkspaceEnvironment = async (
if (envIndex === -1) {
throw new Error("Invalid environment given");
}
const oldEnvironment = workspace.environments[envIndex];
workspace.environments.splice(envIndex, 1);

View File

@ -1,58 +1,29 @@
import { Probot } from "probot";
import { exec } from "child_process";
import { mkdir, readFile, rm, writeFile } from "fs";
import { tmpdir } from "os";
import { join } from "path"
import GitRisks from "../../models/gitRisks";
import GitAppOrganizationInstallation from "../../models/gitAppOrganizationInstallation";
import MembershipOrg from "../../../models/membershipOrg";
import { ADMIN, OWNER } from "../../../variables";
import User from "../../../models/user";
import { sendMail } from "../../../helpers";
import TelemetryService from "../../../services/TelemetryService";
type SecretMatch = {
Description: string;
StartLine: number;
EndLine: number;
StartColumn: number;
EndColumn: number;
Match: string;
Secret: string;
File: string;
SymlinkFile: string;
Commit: string;
Entropy: number;
Author: string;
Email: string;
Date: string;
Message: string;
Tags: string[];
RuleID: string;
Fingerprint: string;
FingerPrintWithoutCommitId: string
};
import { scanGithubPushEventForSecretLeaks } from "../../../queues/secret-scanning/githubScanPushEvent";
export default async (app: Probot) => {
app.on("installation.deleted", async (context) => {
const { payload } = context;
const { installation, repositories } = payload;
if (installation.repository_selection == "all") {
await GitRisks.deleteMany({ installationId: installation.id })
await GitAppOrganizationInstallation.deleteOne({ installationId: installation.id })
} else {
if (repositories) {
for (const repository of repositories) {
await GitRisks.deleteMany({ repositoryId: repository.id })
}
if (repositories) {
for (const repository of repositories) {
await GitRisks.deleteMany({ repositoryId: repository.id })
}
await GitAppOrganizationInstallation.deleteOne({ installationId: installation.id })
}
})
app.on("installation", async (context) => {
const { payload } = context;
payload.repositories
const { installation, repositories } = payload;
// TODO: start full repo scans
})
app.on("push", async (context) => {
const { payload } = context;
const { commits, repository, installation, pusher } = payload;
const [owner, repo] = repository.full_name.split("/");
if (!commits || !repository || !installation || !pusher) {
return
@ -63,188 +34,12 @@ export default async (app: Probot) => {
return
}
const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
for (const commit of commits) {
for (const filepath of [...commit.added, ...commit.modified]) {
try {
const fileContentsResponse = await context.octokit.repos.getContent({
owner,
repo,
path: filepath,
});
const data: any = fileContentsResponse.data;
const fileContent = Buffer.from(data.content, "base64").toString();
const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
for (const finding of findings) {
const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
finding.Fingerprint = fingerPrintWithCommitId
finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
finding.Commit = commit.id
finding.File = filepath
finding.Author = commit.author.name
finding.Email = commit?.author?.email ? commit?.author?.email : ""
allFindingsByFingerprint[fingerPrintWithCommitId] = finding
}
} catch (error) {
console.error(`Error fetching content for ${filepath}`, error); // eslint-disable-line
}
}
}
// change to update
for (const key in allFindingsByFingerprint) {
const risk = await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
{
...convertKeysToLowercase(allFindingsByFingerprint[key]),
installationId: installation.id,
organization: installationLinkToOrgExists.organizationId,
repositoryFullName: repository.full_name,
repositoryId: repository.id
}, {
upsert: true
}).lean()
}
// get emails of admins
const adminsOfWork = await MembershipOrg.find({
organization: installationLinkToOrgExists.organizationId,
$or: [
{ role: OWNER },
{ role: ADMIN }
]
}).lean()
const userEmails = await User.find({
_id: {
$in: [adminsOfWork.map(orgMembership => orgMembership.user)]
}
}).select("email").lean()
const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
if (Object.keys(allFindingsByFingerprint).length) {
await sendMail({
template: "secretLeakIncident.handlebars",
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.full_name}`,
recipients: usersToNotify,
substitutions: {
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
pusher_email: pusher.email,
pusher_name: pusher.name
}
});
}
const postHogClient = await TelemetryService.getPostHogClient();
if (postHogClient) {
postHogClient.capture({
event: "cloud secret scan",
distinctId: pusher.email,
properties: {
numberOfCommitsScanned: commits.length,
numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
}
});
}
scanGithubPushEventForSecretLeaks({
commits: commits,
pusher: { name: pusher.name, email: pusher.email },
repository: { fullName: repository.full_name, id: repository.id },
organizationId: installationLinkToOrgExists.organizationId,
installationId: installation.id
})
});
};
async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData);
} finally {
await deleteTempFolder(tempFolder);
}
}
function createTempFolder(): Promise<string> {
return new Promise((resolve, reject) => {
const tempDir = tmpdir()
const tempFolderName = Math.random().toString(36).substring(2);
const tempFolderPath = join(tempDir, tempFolderName);
mkdir(tempFolderPath, (err: any) => {
if (err) {
reject(err);
} else {
resolve(tempFolderPath);
}
});
});
}
function writeTextToFile(filePath: string, content: string): Promise<void> {
return new Promise((resolve, reject) => {
writeFile(filePath, content, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
exec(command, (error) => {
if (error && error.code != 77) {
reject(error);
} else {
resolve();
}
});
});
}
function readFindingsFile(filePath: string): Promise<string> {
return new Promise((resolve, reject) => {
readFile(filePath, "utf8", (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
function deleteTempFolder(folderPath: string): Promise<void> {
return new Promise((resolve, reject) => {
rm(folderPath, { recursive: true }, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function convertKeysToLowercase<T>(obj: T): T {
const convertedObj = {} as T;
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const lowercaseKey = key.charAt(0).toLowerCase() + key.slice(1);
convertedObj[lowercaseKey as keyof T] = obj[key];
}
}
return convertedObj;
}

View File

@ -0,0 +1,125 @@
import { exec } from "child_process";
import { mkdir, readFile, rm, writeFile } from "fs";
import { tmpdir } from "os";
import { join } from "path"
import { SecretMatch } from "./types";
import { Octokit } from "@octokit/rest";
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData);
} finally {
await deleteTempFolder(tempFolder);
}
}
export function createTempFolder(): Promise<string> {
return new Promise((resolve, reject) => {
const tempDir = tmpdir()
const tempFolderName = Math.random().toString(36).substring(2);
const tempFolderPath = join(tempDir, tempFolderName);
mkdir(tempFolderPath, (err: any) => {
if (err) {
reject(err);
} else {
resolve(tempFolderPath);
}
});
});
}
export function writeTextToFile(filePath: string, content: string): Promise<void> {
return new Promise((resolve, reject) => {
writeFile(filePath, content, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
exec(command, (error) => {
if (error && error.code != 77) {
reject(error);
} else {
resolve();
}
});
});
}
export function readFindingsFile(filePath: string): Promise<string> {
return new Promise((resolve, reject) => {
readFile(filePath, "utf8", (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
export function deleteTempFolder(folderPath: string): Promise<void> {
return new Promise((resolve, reject) => {
rm(folderPath, { recursive: true }, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
export function convertKeysToLowercase<T>(obj: T): T {
const convertedObj = {} as T;
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const lowercaseKey = key.charAt(0).toLowerCase() + key.slice(1);
convertedObj[lowercaseKey as keyof T] = obj[key];
}
}
return convertedObj;
}
export async function getCommits(octokit: Octokit, owner: string, repo: string) {
let commits: { sha: string }[] = [];
let page = 1;
while (true) {
const response = await octokit.repos.listCommits({
owner,
repo,
per_page: 100,
page,
});
commits = commits.concat(response.data);
if (response.data.length == 0) break;
page++;
}
return commits;
}
export async function getFilesFromCommit(octokit: any, owner: string, repo: string, sha: string) {
const response = await octokit.repos.getCommit({
owner,
repo,
ref: sha,
});
}

View File

@ -0,0 +1,21 @@
export type SecretMatch = {
Description: string;
StartLine: number;
EndLine: number;
StartColumn: number;
EndColumn: number;
Match: string;
Secret: string;
File: string;
SymlinkFile: string;
Commit: string;
Entropy: number;
Author: string;
Email: string;
Date: string;
Message: string;
Tags: string[];
RuleID: string;
Fingerprint: string;
FingerPrintWithoutCommitId: string
};

View File

@ -32,7 +32,7 @@ export const handleEventHelper = async ({ event }: { event: Event }) => {
switch (event.name) {
case EVENT_PUSH_SECRETS:
if (bot) {
await IntegrationService.syncIntegrations({
IntegrationService.syncIntegrations({
workspaceId,
environment
});

View File

@ -1,6 +1,6 @@
import { Types } from "mongoose";
import { Bot, Integration, IntegrationAuth } from "../models";
import { exchangeCode, exchangeRefresh, syncSecrets } from "../integrations";
import { Bot, IntegrationAuth } from "../models";
import { exchangeCode, exchangeRefresh } from "../integrations";
import { BotService } from "../services";
import {
ALGORITHM_AES_256_GCM,
@ -9,7 +9,7 @@ import {
INTEGRATION_VERCEL
} from "../variables";
import { UnauthorizedRequestError } from "../utils/errors";
import * as Sentry from "@sentry/node";
import { syncSecretsToActiveIntegrationsQueue } from "../queues/integrations/syncSecretsToThirdPartyServices"
interface Update {
workspace: string;
@ -102,69 +102,6 @@ export const handleOAuthExchangeHelper = async ({
return integrationAuth;
};
/**
* Sync/push environment variables in workspace with id [workspaceId] to
* all active integrations for that workspace
* @param {Object} obj
* @param {Object} obj.workspaceId - id of workspace
*/
export const syncIntegrationsHelper = async ({
workspaceId,
environment
}: {
workspaceId: Types.ObjectId;
environment?: string;
}) => {
try {
const integrations = await Integration.find({
workspace: workspaceId,
...(environment
? {
environment
}
: {}),
isActive: true,
app: { $ne: null }
});
// for each workspace integration, sync/push secrets
// to that integration
for await (const integration of integrations) {
// get workspace, environment (shared) secrets
const secrets = await BotService.getSecrets({
workspaceId: integration.workspace,
environment: integration.environment,
secretPath: integration.secretPath
});
const integrationAuth = await IntegrationAuth.findById(integration.integrationAuth);
if (!integrationAuth) throw new Error("Failed to find integration auth");
// get integration auth access token
const access = await getIntegrationAuthAccessHelper({
integrationAuthId: integration.integrationAuth
});
// sync secrets to integration
await syncSecrets({
integration,
integrationAuth,
secrets,
accessId: access.accessId === undefined ? null : access.accessId,
accessToken: access.accessToken
});
}
} catch (err) {
Sentry.captureException(err);
// eslint-disable-next-line
console.log(
`syncIntegrationsHelper: failed with [workspaceId=${workspaceId}] [environment=${environment}]`,
err
); // eslint-disable-line no-use-before-define
throw err;
}
};
/**
* Return decrypted refresh token using the bot's copy

View File

@ -6,7 +6,7 @@ require("express-async-errors");
import helmet from "helmet";
import cors from "cors";
import { DatabaseService } from "./services";
import { EELicenseService, GithubSecretScanningService} from "./ee/services";
import { EELicenseService, GithubSecretScanningService } from "./ee/services";
import { setUpHealthEndpoint } from "./services/health";
import cookieParser from "cookie-parser";
import swaggerUi = require("swagger-ui-express");
@ -72,6 +72,8 @@ import { RouteNotFoundError } from "./utils/errors";
import { requestErrorHandler } from "./middleware/requestErrorHandler";
import { getNodeEnv, getPort, getSecretScanningGitAppId, getSecretScanningPrivateKey, getSecretScanningWebhookProxy, getSecretScanningWebhookSecret, getSiteURL } from "./config";
import { setup } from "./utils/setup";
import { syncSecretsToThirdPartyServices } from "./queues/integrations/syncSecretsToThirdPartyServices";
import { githubPushEventSecretScan } from "./queues/secret-scanning/githubScanPushEvent";
const SmeeClient = require('smee-client') // eslint-disable-line
const main = async () => {
@ -205,6 +207,8 @@ const main = async () => {
server.on("close", async () => {
await DatabaseService.closeDatabase();
syncSecretsToThirdPartyServices.close()
githubPushEventSecretScan.close()
});
return server;

View File

@ -2,7 +2,6 @@ import { exchangeCode } from "./exchange";
import { exchangeRefresh } from "./refresh";
import { getApps } from "./apps";
import { getTeams } from "./teams";
import { syncSecrets } from "./sync";
import { revokeAccess } from "./revoke";
export {
@ -10,6 +9,5 @@ export {
exchangeRefresh,
getApps,
getTeams,
syncSecrets,
revokeAccess,
}

View File

@ -0,0 +1,76 @@
import Queue, { Job } from "bull";
import Integration from "../../models/integration";
import IntegrationAuth from "../../models/integrationAuth";
import { BotService } from "../../services";
import { getIntegrationAuthAccessHelper } from "../../helpers";
import { syncSecrets } from "../../integrations/sync"
type TSyncSecretsToThirdPartyServices = {
workspaceId: string
environment?: string
}
export const syncSecretsToThirdPartyServices = new Queue("sync-secrets-to-third-party-services", process.env.REDIS_URL as string);
syncSecretsToThirdPartyServices.process(async (job: Job) => {
const { workspaceId, environment }: TSyncSecretsToThirdPartyServices = job.data
const integrations = await Integration.find({
workspace: workspaceId,
...(environment
? {
environment
}
: {}),
isActive: true,
app: { $ne: null }
});
// for each workspace integration, sync/push secrets
// to that integration
for await (const integration of integrations) {
// get workspace, environment (shared) secrets
const secrets = await BotService.getSecrets({
workspaceId: integration.workspace,
environment: integration.environment,
secretPath: integration.secretPath
});
const integrationAuth = await IntegrationAuth.findById(integration.integrationAuth);
if (!integrationAuth) throw new Error("Failed to find integration auth");
// get integration auth access token
const access = await getIntegrationAuthAccessHelper({
integrationAuthId: integration.integrationAuth
});
// sync secrets to integration
return await syncSecrets({
integration,
integrationAuth,
secrets,
accessId: access.accessId === undefined ? null : access.accessId,
accessToken: access.accessToken
});
}
})
syncSecretsToThirdPartyServices.on("error", (error) => {
console.log("QUEUE ERROR:", error) // eslint-disable-line
})
export const syncSecretsToActiveIntegrationsQueue = (jobDetails: TSyncSecretsToThirdPartyServices) => {
syncSecretsToThirdPartyServices.add(jobDetails, {
attempts: 5,
backoff: {
type: "exponential",
delay: 3000
},
removeOnComplete: true,
removeOnFail: {
count: 20 // keep the most recent 20 jobs
}
})
}

View File

@ -0,0 +1,201 @@
// import Queue, { Job } from "bull";
// import { ProbotOctokit } from "probot"
// import { Commit, Committer, Repository } from "@octokit/webhooks-types";
// import TelemetryService from "../../services/TelemetryService";
// import { sendMail } from "../../helpers";
// import GitRisks from "../../ee/models/gitRisks";
// import { MembershipOrg, User } from "../../models";
// import { OWNER, ADMIN } from "../../variables";
// import { convertKeysToLowercase, getFilesFromCommit, scanContentAndGetFindings } from "../../ee/services/GithubSecretScanning/helper";
// import { getSecretScanningGitAppId, getSecretScanningPrivateKey } from "../../config";
// const githubFullRepositoryScan = new Queue('github-historical-secret-scanning', 'redis://redis:6379');
// type TScanFullRepositoryDetails = {
// organizationId: string,
// repositories: {
// id: number;
// node_id: string;
// name: string;
// full_name: string;
// private: boolean;
// }[] | undefined
// installationId: number
// }
// type SecretMatch = {
// Description: string;
// StartLine: number;
// EndLine: number;
// StartColumn: number;
// EndColumn: number;
// Match: string;
// Secret: string;
// File: string;
// SymlinkFile: string;
// Commit: string;
// Entropy: number;
// Author: string;
// Email: string;
// Date: string;
// Message: string;
// Tags: string[];
// RuleID: string;
// Fingerprint: string;
// FingerPrintWithoutCommitId: string
// };
// type Helllo = {
// url: string;
// sha: string;
// node_id: string;
// html_url: string;
// comments_url: string;
// commit: {
// url: string;
// author: {
// name?: string | undefined;
// email?: string | undefined;
// date?: string | undefined;
// } | null;
// verification?: {
// } | undefined;
// };
// files?: {}[] | undefined;
// }[]
// githubFullRepositoryScan.process(async (job: Job, done: Queue.DoneCallback) => {
// const { organizationId, repositories, installationId }: TScanFullRepositoryDetails = job.data
// const repositoryFullNamesList = repositories ? repositories.map(repoDetails => repoDetails.full_name) : []
// const octokit = new ProbotOctokit({
// auth: {
// appId: await getSecretScanningGitAppId(),
// privateKey: await getSecretScanningPrivateKey(),
// installationId: installationId
// },
// });
// for (const repositoryFullName of repositoryFullNamesList) {
// const [owner, repo] = repositoryFullName.split("/");
// let page = 1;
// while (true) {
// // octokit.repos.getco
// const { data } = await octokit.repos.listCommits({
// owner,
// repo,
// per_page: 100,
// page
// });
// await getFilesFromCommit(octokit, owner, repo, "646b386605177ed0a2cc0a596eeee0cf57666342")
// page++;
// }
// }
// done()
// // const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
// // for (const commit of commits) {
// // for (const filepath of [...commit.added, ...commit.modified]) {
// // try {
// // const fileContentsResponse = await octokit.repos.getContent({
// // owner,
// // repo,
// // path: filepath,
// // });
// // const data: any = fileContentsResponse.data;
// // const fileContent = Buffer.from(data.content, "base64").toString();
// // const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
// // for (const finding of findings) {
// // const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
// // const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
// // finding.Fingerprint = fingerPrintWithCommitId
// // finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
// // finding.Commit = commit.id
// // finding.File = filepath
// // finding.Author = commit.author.name
// // finding.Email = commit?.author?.email ? commit?.author?.email : ""
// // allFindingsByFingerprint[fingerPrintWithCommitId] = finding
// // }
// // } catch (error) {
// // done(new Error(`gitHubHistoricalScanning.process: unable to fetch content for [filepath=${filepath}] because [error=${error}]`), null)
// // }
// // }
// // }
// // // change to update
// // for (const key in allFindingsByFingerprint) {
// // await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
// // {
// // ...convertKeysToLowercase(allFindingsByFingerprint[key]),
// // installationId: installationId,
// // organization: organizationId,
// // repositoryFullName: repository.fullName,
// // repositoryId: repository.id
// // }, {
// // upsert: true
// // }).lean()
// // }
// // // get emails of admins
// // const adminsOfWork = await MembershipOrg.find({
// // organization: organizationId,
// // $or: [
// // { role: OWNER },
// // { role: ADMIN }
// // ]
// // }).lean()
// // const userEmails = await User.find({
// // _id: {
// // $in: [adminsOfWork.map(orgMembership => orgMembership.user)]
// // }
// // }).select("email").lean()
// // const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
// // const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
// // if (Object.keys(allFindingsByFingerprint).length) {
// // await sendMail({
// // template: "secretLeakIncident.handlebars",
// // subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
// // recipients: usersToNotify,
// // substitutions: {
// // numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
// // pusher_email: pusher.email,
// // pusher_name: pusher.name
// // }
// // });
// // }
// // const postHogClient = await TelemetryService.getPostHogClient();
// // if (postHogClient) {
// // postHogClient.capture({
// // event: "cloud secret scan",
// // distinctId: pusher.email,
// // properties: {
// // numberOfCommitsScanned: commits.length,
// // numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
// // }
// // });
// // }
// // done(null, allFindingsByFingerprint)
// })
// export const scanGithubFullRepositoryForSecretLeaks = (scanFullRepositoryDetails: TScanFullRepositoryDetails) => {
// console.log("full repo scan started")
// githubFullRepositoryScan.add(scanFullRepositoryDetails)
// }

View File

@ -0,0 +1,148 @@
import Queue, { Job } from "bull";
import { ProbotOctokit } from "probot"
import { Commit, Committer, Repository } from "@octokit/webhooks-types";
import TelemetryService from "../../services/TelemetryService";
import { sendMail } from "../../helpers";
import GitRisks from "../../ee/models/gitRisks";
import { MembershipOrg, User } from "../../models";
import { OWNER, ADMIN } from "../../variables";
import { convertKeysToLowercase, scanContentAndGetFindings } from "../../ee/services/GithubSecretScanning/helper";
import { getSecretScanningGitAppId, getSecretScanningPrivateKey } from "../../config";
import { SecretMatch } from "../../ee/services/GithubSecretScanning/types";
export const githubPushEventSecretScan = new Queue('github-push-event-secret-scanning', 'redis://redis:6379');
type TScanPushEventQueueDetails = {
organizationId: string,
commits: Commit[]
pusher: {
name: string,
email: string | null
},
repository: {
id: number,
fullName: string,
},
installationId: number
}
githubPushEventSecretScan.process(async (job: Job, done: Queue.DoneCallback) => {
const { organizationId, commits, pusher, repository, installationId }: TScanPushEventQueueDetails = job.data
const [owner, repo] = repository.fullName.split("/");
const octokit = new ProbotOctokit({
auth: {
appId: await getSecretScanningGitAppId(),
privateKey: await getSecretScanningPrivateKey(),
installationId: installationId
},
});
const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
for (const commit of commits) {
for (const filepath of [...commit.added, ...commit.modified]) {
try {
const fileContentsResponse = await octokit.repos.getContent({
owner,
repo,
path: filepath,
});
const data: any = fileContentsResponse.data;
const fileContent = Buffer.from(data.content, "base64").toString();
const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
for (const finding of findings) {
const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
finding.Fingerprint = fingerPrintWithCommitId
finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
finding.Commit = commit.id
finding.File = filepath
finding.Author = commit.author.name
finding.Email = commit?.author?.email ? commit?.author?.email : ""
allFindingsByFingerprint[fingerPrintWithCommitId] = finding
}
} catch (error) {
done(new Error(`gitHubHistoricalScanning.process: unable to fetch content for [filepath=${filepath}] because [error=${error}]`), null)
}
}
}
// change to update
for (const key in allFindingsByFingerprint) {
await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
{
...convertKeysToLowercase(allFindingsByFingerprint[key]),
installationId: installationId,
organization: organizationId,
repositoryFullName: repository.fullName,
repositoryId: repository.id
}, {
upsert: true
}).lean()
}
// get emails of admins
const adminsOfWork = await MembershipOrg.find({
organization: organizationId,
$or: [
{ role: OWNER },
{ role: ADMIN }
]
}).lean()
const userEmails = await User.find({
_id: {
$in: [adminsOfWork.map(orgMembership => orgMembership.user)]
}
}).select("email").lean()
const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
if (Object.keys(allFindingsByFingerprint).length) {
await sendMail({
template: "secretLeakIncident.handlebars",
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
recipients: usersToNotify,
substitutions: {
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
pusher_email: pusher.email,
pusher_name: pusher.name
}
});
}
const postHogClient = await TelemetryService.getPostHogClient();
if (postHogClient) {
postHogClient.capture({
event: "cloud secret scan",
distinctId: pusher.email,
properties: {
numberOfCommitsScanned: commits.length,
numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
}
});
}
done(null, allFindingsByFingerprint)
})
export const scanGithubPushEventForSecretLeaks = (pushEventPayload: TScanPushEventQueueDetails) => {
githubPushEventSecretScan.add(pushEventPayload, {
attempts: 3,
backoff: {
type: "exponential",
delay: 5000
},
removeOnComplete: true,
removeOnFail: {
count: 20 // keep the most recent 20 jobs
}
})
}

View File

@ -1,17 +1,26 @@
import express, { Request, Response } from "express";
import { getInviteOnlySignup, getSecretScanningGitAppId, getSecretScanningPrivateKey, getSecretScanningWebhookSecret, getSmtpConfigured } from "../../config";
import { getInviteOnlySignup, getRedisUrl, getSecretScanningGitAppId, getSecretScanningPrivateKey, getSecretScanningWebhookSecret, getSmtpConfigured } from "../../config";
const router = express.Router();
router.get(
"/status",
async (req: Request, res: Response) => {
const gitAppId = await getSecretScanningGitAppId()
const gitSecretScanningWebhookSecret = await getSecretScanningWebhookSecret()
const gitSecretScanningPrivateKey = await getSecretScanningPrivateKey()
let secretScanningConfigured = false
if (gitAppId && gitSecretScanningPrivateKey && gitSecretScanningWebhookSecret) {
secretScanningConfigured = true
}
res.status(200).json({
date: new Date(),
message: "Ok",
emailConfigured: await getSmtpConfigured(),
secretScanningConfigured: await getSecretScanningGitAppId() && await getSecretScanningWebhookSecret() && await getSecretScanningPrivateKey(),
inviteOnlySignup: await getInviteOnlySignup()
inviteOnlySignup: await getInviteOnlySignup(),
redisConfigured: await getRedisUrl() !== "" && await getRedisUrl() !== undefined,
secretScanningConfigured: secretScanningConfigured,
})
}
);

View File

@ -46,6 +46,24 @@ router.put(
environmentController.renameWorkspaceEnvironment
);
router.patch(
"/:workspaceId/environments",
requireAuth({
acceptedAuthModes: [AuthMode.JWT],
}),
requireWorkspaceAuth({
acceptedRoles: [ADMIN, MEMBER],
locationWorkspaceId: "params",
}),
param("workspaceId").exists().trim(),
body("environmentSlug").exists().isString().trim(),
body("environmentName").exists().isString().trim(),
body("otherEnvironmentSlug").exists().isString().trim(),
body("otherEnvironmentName").exists().isString().trim(),
validateRequest,
environmentController.reorderWorkspaceEnvironments
);
router.delete(
"/:workspaceId/environments",
requireAuth({

View File

@ -1,4 +1,4 @@
import express from "express";
import express, { Request, Response } from "express";
const router = express.Router();
import { requireAuth, requireWorkspaceAuth, validateRequest } from "../../middleware";
import { body, param, query } from "express-validator";

View File

@ -1,18 +1,18 @@
import { Types } from "mongoose";
import {
import {
getIntegrationAuthAccessHelper,
getIntegrationAuthRefreshHelper,
handleOAuthExchangeHelper,
setIntegrationAuthAccessHelper,
setIntegrationAuthRefreshHelper,
syncIntegrationsHelper,
} from "../helpers/integration";
import { syncSecretsToActiveIntegrationsQueue } from "../queues/integrations/syncSecretsToThirdPartyServices";
/**
* Class to handle integrations
*/
class IntegrationService {
/**
* Perform OAuth2 code-token exchange for workspace with id [workspaceId] and integration
* named [integration]
@ -26,12 +26,12 @@ class IntegrationService {
* @param {String} obj1.code - code
* @returns {IntegrationAuth} integrationAuth - integration authorization after OAuth2 code-token exchange
*/
static async handleOAuthExchange({
static async handleOAuthExchange({
workspaceId,
integration,
code,
environment,
}: {
}: {
workspaceId: string;
integration: string;
code: string;
@ -44,25 +44,23 @@ class IntegrationService {
environment,
});
}
/**
* Sync/push environment variables in workspace with id [workspaceId] to
* all associated integrations
* @param {Object} obj
* @param {Object} obj.workspaceId - id of workspace
*/
static async syncIntegrations({
static syncIntegrations({
workspaceId,
environment,
}: {
workspaceId: Types.ObjectId;
environment?: string;
}) {
return await syncIntegrationsHelper({
workspaceId,
});
syncSecretsToActiveIntegrationsQueue({ workspaceId: workspaceId.toString(), environment: environment })
}
/**
* Return decrypted refresh token for integration auth
* with id [integrationAuthId]
@ -70,12 +68,12 @@ class IntegrationService {
* @param {String} obj.integrationAuthId - id of integration auth
* @param {String} refreshToken - decrypted refresh token
*/
static async getIntegrationAuthRefresh({ integrationAuthId }: { integrationAuthId: Types.ObjectId}) {
static async getIntegrationAuthRefresh({ integrationAuthId }: { integrationAuthId: Types.ObjectId }) {
return await getIntegrationAuthRefreshHelper({
integrationAuthId,
});
}
/**
* Return decrypted access token for integration auth
* with id [integrationAuthId]
@ -98,11 +96,11 @@ class IntegrationService {
* @param {String} obj.refreshToken - refresh token
* @returns {IntegrationAuth} integrationAuth - updated integration auth
*/
static async setIntegrationAuthRefresh({
static async setIntegrationAuthRefresh({
integrationAuthId,
refreshToken,
}: {
integrationAuthId: string;
refreshToken,
}: {
integrationAuthId: string;
refreshToken: string;
}) {
return await setIntegrationAuthRefreshHelper({
@ -122,12 +120,12 @@ class IntegrationService {
* @param {Date} obj.accessExpiresAt - expiration date of access token
* @returns {IntegrationAuth} - updated integration auth
*/
static async setIntegrationAuthAccess({
static async setIntegrationAuthAccess({
integrationAuthId,
accessId,
accessToken,
accessExpiresAt,
}: {
}: {
integrationAuthId: string;
accessId: string | null;
accessToken: string;

View File

@ -19,7 +19,7 @@ import {
backfillTrustedIps,
backfillUserAuthMethods
} from "./backfillData";
import {
import {
reencryptBotOrgKeys,
reencryptBotPrivateKeys,
reencryptSecretBlindIndexDataSalts
@ -27,6 +27,7 @@ import {
import {
getMongoURL,
getNodeEnv,
getRedisUrl,
getSentryDSN
} from "../../config";
import { initializePassport } from "../auth";
@ -42,6 +43,10 @@ import { initializePassport } from "../auth";
* - Re-encrypting data
*/
export const setup = async () => {
if (await getRedisUrl() === undefined || await getRedisUrl() === "") {
console.error("WARNING: Redis is not yet configured. Infisical may not function as expected without it.")
}
await validateEncryptionKeysConfig();
await TelemetryService.logTelemetryMessage();

View File

@ -2,6 +2,8 @@ import { Server } from "http";
import main from "../src";
import { afterAll, beforeAll, describe, expect, it } from "@jest/globals";
import request from "supertest";
import { githubPushEventSecretScan } from "../src/queues/secret-scanning/githubScanPushEvent";
import { syncSecretsToThirdPartyServices } from "../src/queues/integrations/syncSecretsToThirdPartyServices";
let server: Server;
@ -11,6 +13,8 @@ beforeAll(async () => {
afterAll(async () => {
server.close();
githubPushEventSecretScan.close()
syncSecretsToThirdPartyServices.close()
});
describe("Healthcheck endpoint", () => {

View File

@ -21,6 +21,7 @@ services:
depends_on:
- mongo
- smtp-server
- redis
build:
context: ./backend
dockerfile: Dockerfile
@ -99,9 +100,36 @@ services:
networks:
- infisical-dev
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
volumes:
- redis_data:/data
networks:
- infisical-dev
redis-commander:
container_name: infisical-dev-redis-commander
image: rediscommander/redis-commander
restart: always
depends_on:
- redis
environment:
- REDIS_HOSTS=local:redis:6379
ports:
- "8085:8081"
networks:
- infisical-dev
volumes:
mongo-data:
driver: local
redis_data:
driver: local
networks:
infisical-dev:

View File

@ -41,19 +41,17 @@ services:
networks:
- infisical
# secret-scanning-git-app:
# container_name: infisical-secret-scanning-git-app
# restart: unless-stopped
# depends_on:
# - backend
# - frontend
# - mongo
# ports:
# - "3000:3001"
# image: infisical/staging_deployment_secret-scanning-git-app
# env_file: .env
# networks:
# - infisical
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
networks:
- infisical
volumes:
- redis_data:/data
mongo:
container_name: infisical-mongo
@ -71,6 +69,8 @@ services:
volumes:
mongo-data:
driver: local
redis_data:
driver: local
networks:
infisical:

View File

@ -144,13 +144,12 @@
"self-hosting/deployment-options/aws-ec2",
"self-hosting/deployment-options/docker-compose",
"self-hosting/deployment-options/standalone-infisical",
"self-hosting/deployment-options/fly.io",
"self-hosting/deployment-options/render",
"self-hosting/deployment-options/digital-ocean-marketplace"
]
},
"self-hosting/configuration/envars",
"self-hosting/configuration/email",
"self-hosting/configuration/redis",
"self-hosting/faq"
]
},

View File

@ -49,10 +49,15 @@ Other environment variables are listed below to increase the functionality of yo
16`
</ParamField>
<ParamField query="MONGO_URL" type="string" default="none" required>
*TLS based connection string is not yet supported
</ParamField>
</Tab>
<ParamField query="MONGO_URL" type="string" default="none" required>
*TLS based connection string is not yet supported
</ParamField>
<ParamField query="REDIS_URL" type="string" default="none" required>
Redis connection string
</ParamField>
</Tab>
<Tab title="Email service">
<Info>When email service is not configured, Infisical will have limited functionality</Info>

View File

@ -0,0 +1,83 @@
---
title: "Configure Redis"
description: "Learn to configure Redis with your self hosted Infisical"
---
## Why Redis?
As the features and use case of Infisical have grown, the need for a fast and reliable in-memory data storage has become clear.
By adding Redis to Infisical, we can now support more complex workflows such as queuing system to run long running asynchronous tasks, cron jobs, and access reliable cache to speed up frequently used resources.
<Info>
Starting with Infisical version v0.31.0, Redis will be required to fully use Infisical
</Info>
### Adding Redis to your self hosted instance of Infisical
To add Redis to your self hosted instance, follow the instructions for the deployment method you used.
<Tabs>
<Tab title="Kubernetes Helm chart">
### In cluster Redis
By default, new versions of the Infisical Helm chart already comes with an in-cluster Redis instance. To deploy a in-cluster Redis instance along with your Infisical instance, update your Infisical chart then redeploy/upgrade your release.
This will spin up a Redis instance and automatically configure it with your Infisical backend.
1. Update Infisical Helm chart
```bash
helm repo update
```
2. Upgrade Infisical release
```bash
helm upgrade <infisical release name> infisical-helm-charts/infisical --values <path to your values file>
```
### External Redis
If you want to use an external Redis instance, please add a Redis connection URL under the backend environments variables and then upgrade/redeploy your Infisical instance.
1. Update your helm values file
```yaml your-values.yaml
backendEnvironmentVariables:
REDIS_URL=<your redis connection string>
```
2. Upgrade Infisical release
```bash
helm upgrade <infisical release name> infisical-helm-charts/infisical --values <path to your values file>
```
</Tab>
<Tab title="Docker compose">
### Internal Redis service
By default, new versions of the docker compose file already comes with a Redis service. To use the pre-configured Redis service, please update your docker compose file to the latest version.
1. Download the new docker compose file
```
wget -O docker-compose.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.yml
```
2. Add Redis environment variable to your .env file
```.env .env
REDIS_URL=redis://redis:6379
```
3. Restart your docker compose services
</Tab>
<Tab title="Standalone Docker image">
This standalone version of Infisical does not have an internal Redis service. To configure Redis with your Infisical instance, you must connect to a external Redis service by setting the connection string as an environment variable.
Example:
```bash
docker run -p 80:80 \
-e ENCRYPTION_KEY=f40c9178624764ad85a6830b37ce239a \
-e JWT_SIGNUP_SECRET=38ea90fb7998b92176080f457d890392 \
-e JWT_REFRESH_SECRET=7764c7bbf3928ad501591a3e005eb364 \
-e JWT_AUTH_SECRET=5239fea3a4720c0e524f814a540e14a2 \
-e JWT_SERVICE_SECRET=8509fb8b90c9b53e9e61d1e35826dcb5 \
-e REDIS_URL=<> \
-e MONGO_URL="<>" \
infisical/infisical:latest
```
Redis environment variable name: `REDIS_URL`
</Tab>
</Tabs>
## Support
If you have questions or need support, please join our [slack channel](https://infisical-users.slack.com) and one of our teammates will be happy to guide you.

View File

@ -32,7 +32,7 @@ However, it's important to specify a particular version of Infisical during inst
View [properties for frontend and backend](https://github.com/Infisical/infisical/tree/main/helm-charts/infisical#parameters).
<Tip>
To find the latest version number of Infisical, follow the links bellow
To find the latest version number of Infisical, follow the links below
- [frontend Docker image](https://hub.docker.com/r/infisical/frontend/tags)
- [backend Docker image](https://hub.docker.com/r/infisical/backend/tags)
</Tip>
@ -92,7 +92,7 @@ ingress:
#### Database
Infisical uses a document database as its persistence layer. With this Helm chart, you spin up a MongoDB instance power by Bitnami along side other Infisical services in your cluster.
When persistence is enabled, the data will be stored a Kubernetes Persistence Volume. View all [properties for mongodb](https://github.com/Infisical/infisical/tree/main/helm-charts/infisical).
When persistence is enabled, the data will be stored as Kubernetes Persistence Volume. View all [properties for mongodb](https://github.com/Infisical/infisical/tree/main/helm-charts/infisical).
```yaml simple-values-example.yaml
mongodb:

View File

@ -46,6 +46,10 @@ Add the required environment variables listed below to your docker run command.
Must be a random 16 byte hex string. Can be generated with `openssl rand -hex 16`
</ParamField>
<ParamField query="REDIS_URL" type="string" default="none" required>
Redis connection string
</ParamField>
<ParamField query="MONGO_URL" type="string" default="none" required>
*TLS based connection string is not yet supported
</ParamField>
@ -61,6 +65,7 @@ docker run -p 80:80 \
-e JWT_AUTH_SECRET=5239fea3a4720c0e524f814a540e14a2 \
-e JWT_SERVICE_SECRET=8509fb8b90c9b53e9e61d1e35826dcb5 \
-e MONGO_URL="<>" \
-e REDIS_URL="<>" \
infisical/infisical:latest
```

View File

@ -22,12 +22,6 @@ Choose from a variety of deployment options listed below to get started.
>
Automatically create and deploy Infisical on to a Kubernetes cluster
</Card>
<Card title="Fly.io" color="#dc2626" href="deployment-options/fly.io">
Use our standalone docker image to deploy on Fly.io
</Card>
<Card title="Render.com" color="#dc2626" href="deployment-options/render">
Install on Render using our standalone docker image
</Card>
<Card title="AWS EC2" color="#0285c7" href="deployment-options/aws-ec2">
Install infisical with just a few clicks using our Cloud Formation template
</Card>

View File

@ -29,7 +29,7 @@ export const ModalContent = forwardRef<HTMLDivElement, ModalContentProps>(
<Card
isRounded
className={twMerge(
"fixed top-1/2 left-1/2 z-[90] max-w-lg -translate-y-2/4 -translate-x-2/4 animate-popIn border border-mineshaft-600 drop-shadow-2xl",
"fixed top-1/2 left-1/2 z-[90] dark:[color-scheme:dark] max-h-screen overflow-y-auto thin-scrollbar max-w-lg -translate-y-2/4 -translate-x-2/4 animate-popIn border border-mineshaft-600 drop-shadow-2xl",
className
)}
>

View File

@ -4,4 +4,5 @@ export type ServerStatus = {
emailConfigured: boolean;
inviteOnlySignup: boolean;
secretScanningConfigured: boolean
redisConfigured: boolean
};

View File

@ -16,6 +16,7 @@ export {
useGetWorkspaceUsers,
useNameWorkspaceSecrets,
useRenameWorkspace,
useReorderWsEnvironment,
useToggleAutoCapitalization,
useUpdateUserWorkspaceRole,
useUpdateWsEnvironment} from "./queries";

View File

@ -13,6 +13,7 @@ import {
GetWsEnvironmentDTO,
NameWorkspaceSecretsDTO,
RenameWorkspaceDTO,
ReorderEnvironmentsDTO,
ToggleAutoCapitalizationDTO,
UpdateEnvironmentDTO,
Workspace,
@ -244,6 +245,21 @@ export const useCreateWsEnvironment = () => {
});
};
export const useReorderWsEnvironment = () => {
const queryClient = useQueryClient();
return useMutation<{}, {}, ReorderEnvironmentsDTO>({
mutationFn: ({ workspaceID, environmentSlug, environmentName, otherEnvironmentSlug, otherEnvironmentName}) => {
return apiRequest.patch(`/api/v2/workspace/${workspaceID}/environments`, {
environmentSlug, environmentName, otherEnvironmentSlug, otherEnvironmentName
});
},
onSuccess: () => {
queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace);
}
});
};
export const useUpdateWsEnvironment = () => {
const queryClient = useQueryClient();

View File

@ -46,6 +46,15 @@ export type CreateEnvironmentDTO = {
environmentName: string;
};
export type ReorderEnvironmentsDTO = {
workspaceID: string;
environmentSlug: string;
environmentName: string;
otherEnvironmentSlug: string;
otherEnvironmentName: string;
};
export type UpdateEnvironmentDTO = {
workspaceID: string;
oldEnvironmentSlug: string;

View File

@ -4,6 +4,7 @@ import { useEffect, useState } from "react";
import { Controller, useForm } from "react-hook-form";
import { useTranslation } from "react-i18next";
import Head from "next/head";
import Link from "next/link";
import { useRouter } from "next/router";
import { IconProp } from "@fortawesome/fontawesome-svg-core";
import { faSlack } from "@fortawesome/free-brands-svg-icons";
@ -11,6 +12,7 @@ import { faFolderOpen } from "@fortawesome/free-regular-svg-icons";
import {
faArrowRight,
faCheckCircle,
faExclamationCircle,
faHandPeace,
faMagnifyingGlass,
faNetworkWired,
@ -37,6 +39,7 @@ import {
import { TabsObject } from "@app/components/v2/Tabs";
import { useSubscription, useUser, useWorkspace } from "@app/context";
import { fetchOrgUsers, useAddUserToWs, useCreateWorkspace, useRegisterUserAction,useUploadWsKey } from "@app/hooks/api";
import { useFetchServerStatus } from "@app/hooks/api/serverDetails";
import { usePopUp } from "@app/hooks/usePopUp";
import { encryptAssymmetric } from "../../../../components/utilities/cryptography/crypto";
@ -269,6 +272,8 @@ export default function Organization() {
const createWs = useCreateWorkspace();
const { user } = useUser();
const uploadWsKey = useUploadWsKey();
const { data: serverDetails } = useFetchServerStatus();
const onCreateProject = async ({ name, addMembers }: TAddProjectFormData) => {
// type check
@ -340,6 +345,18 @@ export default function Organization() {
<title>{t("common.head-title", { title: t("settings.members.title") })}</title>
<link rel="icon" href="/infisical.ico" />
</Head>
{!serverDetails?.redisConfigured && <div className="mb-4 flex flex-col items-start justify-start px-6 py-6 pb-0 text-3xl">
<p className="mr-4 mb-4 font-semibold text-white">Announcements</p>
<div className="w-full border border-blue-400/70 rounded-md bg-blue-900/70 p-2 text-base text-mineshaft-100 flex items-center">
<FontAwesomeIcon icon={faExclamationCircle} className="text-2xl mr-4 p-4 text-mineshaft-50"/>
Attention: Updated versions of Infisical now require Redis for full functionality. Learn how to configure it
<Link href="https://infisical.com/docs/self-hosting/configuration/redis" target="_blank">
<span className="pl-1 text-white underline underline-offset-2 hover:decoration-blue-400 hover:text-blue-200 duration-100 cursor-pointer">
here
</span>
</Link>.
</div>
</div>}
<div className="mb-4 flex flex-col items-start justify-start px-6 py-6 pb-0 text-3xl">
<p className="mr-4 font-semibold text-white">Projects</p>
<div className="mt-6 flex w-full flex-row">

View File

@ -41,7 +41,7 @@ export default function SecretScanning() {
const generateNewIntegrationSession = async () => {
const session = await createNewIntegrationSession(String(localStorage.getItem("orgData.id")))
router.push(`https://github.com/apps/infisical-radar/installations/new?state=${session.sessionId}`)
router.push(`https://github.com/apps/infisical-radar-dev/installations/new?state=${session.sessionId}`)
}
return (

View File

@ -1,6 +1,7 @@
import { faPencil, faXmark } from "@fortawesome/free-solid-svg-icons";
import { faArrowDown,faArrowUp, faPencil, faXmark } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { useNotificationContext } from "@app/components/context/Notifications/NotificationProvider";
import {
EmptyState,
IconButton,
@ -14,6 +15,9 @@ import {
Tr
} from "@app/components/v2";
import { useWorkspace } from "@app/context";
import {
useReorderWsEnvironment
} from "@app/hooks/api";
import { UsePopUpState } from "@app/hooks/usePopUp";
type Props = {
@ -31,6 +35,43 @@ type Props = {
export const EnvironmentTable = ({ handlePopUpOpen }: Props) => {
const { currentWorkspace, isLoading } = useWorkspace();
const { createNotification } = useNotificationContext();
const reorderWsEnvironment = useReorderWsEnvironment();
const handleReorderEnv= async (shouldMoveUp: boolean, name: string, slug: string) => {
try {
if (!currentWorkspace?._id) return;
const indexOfEnv = currentWorkspace.environments.findIndex((env) => env.name === name && env.slug === slug);
// check that this reordering is possible
if (indexOfEnv === 0 && shouldMoveUp || indexOfEnv === currentWorkspace.environments.length - 1 && !shouldMoveUp) {
return
}
const indexToSwap = shouldMoveUp ? indexOfEnv - 1 : indexOfEnv + 1
await reorderWsEnvironment.mutateAsync({
workspaceID: currentWorkspace._id,
environmentSlug: slug,
environmentName: name,
otherEnvironmentSlug: currentWorkspace.environments[indexToSwap].slug,
otherEnvironmentName: currentWorkspace.environments[indexToSwap].name
});
createNotification({
text: "Successfully re-ordered environments",
type: "success"
});
} catch (err) {
console.error(err);
createNotification({
text: "Failed to re-order environments",
type: "error"
});
}
};
return (
<TableContainer>
<Table>
@ -45,11 +86,35 @@ export const EnvironmentTable = ({ handlePopUpOpen }: Props) => {
{isLoading && <TableSkeleton columns={3} innerKey="project-envs" />}
{!isLoading &&
currentWorkspace &&
currentWorkspace.environments.map(({ name, slug }) => (
currentWorkspace.environments.map(({ name, slug }, pos) => (
<Tr key={name}>
<Td>{name}</Td>
<Td>{slug}</Td>
<Td className="flex items-center justify-end">
<IconButton
className="mr-3 py-2"
onClick={() => {
handleReorderEnv(false, name, slug)
}}
colorSchema="primary"
variant="plain"
ariaLabel="update"
isDisabled={pos === currentWorkspace.environments.length - 1}
>
<FontAwesomeIcon icon={faArrowDown} />
</IconButton>
<IconButton
className="mr-3 py-2"
onClick={() => {
handleReorderEnv(true, name, slug)
}}
colorSchema="primary"
variant="plain"
ariaLabel="update"
isDisabled={pos === 0}
>
<FontAwesomeIcon icon={faArrowUp} />
</IconButton>
<IconButton
className="mr-3 py-2"
onClick={() => {

View File

@ -5,8 +5,11 @@ dependencies:
- name: mailhog
repository: https://codecentric.github.io/helm-charts
version: 5.2.3
- name: redis
repository: https://charts.bitnami.com/bitnami
version: 17.15.0
- name: ingress-nginx
repository: https://kubernetes.github.io/ingress-nginx
version: 4.0.13
digest: sha256:d1a679e6c30e37da96b7a4b6115e285f61e6ce0dd921ffbe2cf557418c229f33
generated: "2023-04-08T15:59:12.950942-07:00"
digest: sha256:1762132c45000bb6d410c6da2291ac5c65f91331550a473b370374ba042d0744
generated: "2023-08-10T15:03:12.219788-04:00"

View File

@ -7,7 +7,7 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.2.1
version: 0.3.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
@ -24,6 +24,10 @@ dependencies:
version: "~5.2.3"
repository: "https://codecentric.github.io/helm-charts"
condition: mailhog.enabled
- name: redis
version: 17.15.0
repository: https://charts.bitnami.com/bitnami
condition: redis.enabled
- name: ingress-nginx
version: 4.0.13
repository: https://kubernetes.github.io/ingress-nginx

View File

@ -7,6 +7,7 @@ This is the Infisical application Helm chart. This chart includes the following
| `frontend` | Infisical's Web UI |
| `backend` | Infisical's API |
| `mongodb` | Infisical's database |
| `redis` | Infisical's cache service |
| `mailhog` | Infisical's development SMTP server |
## Installation
@ -58,7 +59,6 @@ kubectl get secrets -n <namespace> <secret-name> \
| `nameOverride` | Override release name | `""` |
| `fullnameOverride` | Override release fullname | `""` |
### Infisical frontend parameters
| Name | Description | Value |
@ -78,41 +78,41 @@ kubectl get secrets -n <namespace> <secret-name> \
| `frontend.service.nodePort` | Backend service nodePort (used if above type is `NodePort`) | `""` |
| `frontendEnvironmentVariables.SITE_URL` | Absolute URL including the protocol (e.g. https://app.infisical.com) | `infisical.local` |
### Infisical backend parameters
| Name | Description | Value |
| ------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------- |
| `backend.enabled` | Enable backend | `true` |
| `backend.name` | Backend name | `backend` |
| `backend.fullnameOverride` | Backend fullnameOverride | `""` |
| `backend.podAnnotations` | Backend pod annotations | `{}` |
| `backend.deploymentAnnotations` | Backend deployment annotations | `{}` |
| `backend.replicaCount` | Backend replica count | `2` |
| `backend.image.repository` | Backend image repository | `infisical/backend` |
| `backend.image.tag` | Backend image tag | `latest` |
| `backend.image.pullPolicy` | Backend image pullPolicy | `IfNotPresent` |
| `backend.kubeSecretRef` | Backend secret resource reference name (containing required [backend configuration variables](https://infisical.com/docs/self-hosting/configuration/envars)) | `""` |
| `backend.service.annotations` | Backend service annotations | `{}` |
| `backend.service.type` | Backend service type | `ClusterIP` |
| `backend.service.nodePort` | Backend service nodePort (used if above type is `NodePort`) | `""` |
| `backendEnvironmentVariables.ENCRYPTION_KEY` | **Required** Backend encryption key (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_SIGNUP_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_REFRESH_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_AUTH_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_SERVICE_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_MFA_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.SMTP_HOST` | **Required** Hostname to connect to for establishing SMTP connections | `""` |
| `backendEnvironmentVariables.SMTP_PORT` | Port to connect to for establishing SMTP connections | `587` |
| `backendEnvironmentVariables.SMTP_SECURE` | If true, use TLS when connecting to host. If false, TLS will be used if STARTTLS is supported | `false` |
| `backendEnvironmentVariables.SMTP_FROM_NAME` | Name label to be used in From field (e.g. Infisical) | `Infisical` |
| `backendEnvironmentVariables.SMTP_FROM_ADDRESS` | **Required** Email address to be used for sending emails (e.g. dev@infisical.com) | `""` |
| `backendEnvironmentVariables.SMTP_USERNAME` | **Required** Credential to connect to host (e.g. team@infisical.com) | `""` |
| `backendEnvironmentVariables.SMTP_PASSWORD` | **Required** Credential to connect to host | `""` |
| `backendEnvironmentVariables.SITE_URL` | Absolute URL including the protocol (e.g. https://app.infisical.com) | `infisical.local` |
| `backendEnvironmentVariables.INVITE_ONLY_SIGNUP` | To disable account creation from the login page (invites only) | `false` |
| `backendEnvironmentVariables.MONGO_URL` | MongoDB connection string (external or internal)</br>Leave it empty for auto-generated connection string | `""` |
| Name | Description | Value |
| ------------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------- |
| `backend.enabled` | Enable backend | `true` |
| `backend.name` | Backend name | `backend` |
| `backend.fullnameOverride` | Backend fullnameOverride | `""` |
| `backend.podAnnotations` | Backend pod annotations | `{}` |
| `backend.deploymentAnnotations` | Backend deployment annotations | `{}` |
| `backend.replicaCount` | Backend replica count | `2` |
| `backend.image.repository` | Backend image repository | `infisical/backend` |
| `backend.image.tag` | Backend image tag | `latest` |
| `backend.image.pullPolicy` | Backend image pullPolicy | `IfNotPresent` |
| `backend.kubeSecretRef` | Backend secret resource reference name (containing required [backend configuration variables](https://infisical.com/docs/self-hosting/configuration/envars)) | `""` |
| `backend.service.annotations` | Backend service annotations | `{}` |
| `backend.service.type` | Backend service type | `ClusterIP` |
| `backend.service.nodePort` | Backend service nodePort (used if above type is `NodePort`) | `""` |
| `backendEnvironmentVariables.ENCRYPTION_KEY` | **Required** Backend encryption key (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_SIGNUP_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_REFRESH_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_AUTH_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_SERVICE_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_MFA_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_PROVIDER_AUTH_SECRET` | **Required** Secrets to sign JWT OAuth tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.SMTP_HOST` | **Required** Hostname to connect to for establishing SMTP connections | `""` |
| `backendEnvironmentVariables.SMTP_PORT` | Port to connect to for establishing SMTP connections | `587` |
| `backendEnvironmentVariables.SMTP_SECURE` | If true, use TLS when connecting to host. If false, TLS will be used if STARTTLS is supported | `false` |
| `backendEnvironmentVariables.SMTP_FROM_NAME` | Name label to be used in From field (e.g. Infisical) | `Infisical` |
| `backendEnvironmentVariables.SMTP_FROM_ADDRESS` | **Required** Email address to be used for sending emails (e.g. dev@infisical.com) | `""` |
| `backendEnvironmentVariables.SMTP_USERNAME` | **Required** Credential to connect to host (e.g. team@infisical.com) | `""` |
| `backendEnvironmentVariables.SMTP_PASSWORD` | **Required** Credential to connect to host | `""` |
| `backendEnvironmentVariables.SITE_URL` | Absolute URL including the protocol (e.g. https://app.infisical.com) | `infisical.local` |
| `backendEnvironmentVariables.INVITE_ONLY_SIGNUP` | To disable account creation from the login page (invites only) | `false` |
| `backendEnvironmentVariables.MONGO_URL` | MongoDB connection string (external or internal)</br>Leave it empty for auto-generated connection string | `""` |
| `backendEnvironmentVariables.REDIS_URL` | | `redis://redis-master:6379` |
### MongoDB(&reg;) parameters
@ -154,18 +154,17 @@ kubectl get secrets -n <namespace> <secret-name> \
| `mongodb.persistence.size` | Persistent storage request size | `8Gi` |
| `mongodbConnection.externalMongoDBConnectionString` | Deprecated :warning: External MongoDB connection string</br>Use backendEnvironmentVariables.MONGO_URL instead | `""` |
### Ingress parameters
| Name | Description | Value |
| -------------------------- | ------------------------------------------------------------------------ | ------- |
| `ingress.enabled` | Enable ingress | `true` |
| `ingress.ingressClassName` | Ingress class name | `nginx` |
| `ingress.nginx.enabled` | Ingress controller | `false` |
| `ingress.annotations` | Ingress annotations | `{}` |
| `ingress.hostName` | Ingress hostname (your custom domain name, e.g. `infisical.example.org`) | `""` |
| `ingress.tls` | Ingress TLS hosts (matching above hostName) | `[]` |
### Mailhog parameters
| Name | Description | Value |
@ -184,6 +183,10 @@ kubectl get secrets -n <namespace> <secret-name> \
| `mailhog.ingress.labels` | Ingress labels | `{}` |
| `mailhog.ingress.hosts[0].host` | Mailhog host | `mailhog.infisical.local` |
### Redis parameters
## Persistence

View File

@ -126,5 +126,4 @@ Create the mongodb connection string.
{{- if .Values.mongodbConnection.externalMongoDBConnectionString -}}
{{- $connectionString = .Values.mongodbConnection.externalMongoDBConnectionString -}}
{{- end -}}
{{- printf "%s" $connectionString -}}
{{- end -}}
{{- end -}}

View File

@ -83,6 +83,7 @@ stringData:
"JWT_SERVICE_SECRET" (randAlphaNum 32 | lower)
"JWT_MFA_SECRET" (randAlphaNum 32 | lower)
"JWT_PROVIDER_AUTH_SECRET" (randAlphaNum 32 | lower)
"REDIS_URL" (include "infisical.redis.connectionString" .)
"MONGO_URL" (include "infisical.mongodb.connectionString" .) }}
{{- $secretObj := (lookup "v1" "Secret" .Release.Namespace (include "infisical.backend.fullname" .)) | default dict }}
{{- $secretData := (get $secretObj "data") | default dict }}

View File

@ -169,6 +169,10 @@ backendEnvironmentVariables:
##
MONGO_URL: ""
## @param backendEnvironmentVariables.REDIS_URL
## By default, the backend will use the Redis that is auto deployed along with Infisical
REDIS_URL: "redis://redis-master:6379"
## @section MongoDB(&reg;) parameters
## Documentation : https://github.com/bitnami/charts/blob/main/bitnami/mongodb/values.yaml
##
@ -419,3 +423,16 @@ mailhog:
paths:
- path: "/"
pathType: Prefix
## @section Redis parameters
## Documentation : https://github.com/bitnami/charts/tree/main/bitnami/redis#parameters
##
## @skip redis
##
redis:
name: "redis"
fullnameOverride: "redis"
enabled: true
architecture: standalone
auth:
enabled: false