Merge remote-tracking branch 'origin' into email-case-sensitive

This commit is contained in:
Tuan Dang
2023-08-23 17:50:46 +07:00
73 changed files with 5642 additions and 649 deletions

View File

@@ -25,6 +25,9 @@ JWT_PROVIDER_AUTH_LIFETIME=
# Required
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
# Redis
REDIS_URL=redis://redis:6379
# Optional credentials for MongoDB container instance and Mongo-Express
MONGO_USERNAME=root
MONGO_PASSWORD=example

View File

@@ -1,3 +0,0 @@
{
"workbench.editor.wrapTabs": true
}

4289
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -50,7 +50,7 @@
"typescript": "^4.9.3",
"utility-types": "^3.10.0",
"winston": "^3.8.2",
"winston-loki": "^6.0.6"
"winston-loki": "^6.0.7"
},
"name": "infisical-api",
"version": "1.0.0",
@@ -84,6 +84,7 @@
"@posthog/plugin-scaffold": "^1.3.4",
"@types/bcrypt": "^5.0.0",
"@types/bcryptjs": "^2.4.2",
"@types/bull": "^4.10.0",
"@types/cookie-parser": "^1.4.3",
"@types/cors": "^2.8.12",
"@types/express": "^4.17.14",

View File

@@ -68,6 +68,8 @@ export const getSecretScanningWebhookSecret = async () => (await client.getSecre
export const getSecretScanningGitAppId = async () => (await client.getSecret("SECRET_SCANNING_GIT_APP_ID")).secretValue;
export const getSecretScanningPrivateKey = async () => (await client.getSecret("SECRET_SCANNING_PRIVATE_KEY")).secretValue;
export const getRedisUrl = async () => (await client.getSecret("REDIS_URL")).secretValue;
export const getLicenseKey = async () => {
const secretValue = (await client.getSecret("LICENSE_KEY")).secretValue;
return secretValue === "" ? undefined : secretValue;

View File

@@ -8,6 +8,7 @@ import { getFolderByPath } from "../../services/FolderService";
import { BadRequestError } from "../../utils/errors";
import { EEAuditLogService } from "../../ee/services";
import { EventType } from "../../ee/models";
import { syncSecretsToActiveIntegrationsQueue } from "../../queues/integrations/syncSecretsToThirdPartyServices";
/**
* Create/initialize an (empty) integration for integration authorization
@@ -76,7 +77,7 @@ export const createIntegration = async (req: Request, res: Response) => {
})
});
}
await EEAuditLogService.createAuditLog(
req.authData,
{
@@ -218,3 +219,15 @@ export const deleteIntegration = async (req: Request, res: Response) => {
integration
});
};
// Will trigger sync for all integrations within the given env and workspace id
export const manualSync = async (req: Request, res: Response) => {
const { workspaceId, environment } = req.body;
syncSecretsToActiveIntegrationsQueue({
workspaceId,
environment
})
res.status(200).send()
};

View File

@@ -85,6 +85,43 @@ export const createWorkspaceEnvironment = async (
});
};
/**
* Swaps the ordering of two environments in the database. This is purely for aesthetic purposes.
* @param req
* @param res
* @returns
*/
export const reorderWorkspaceEnvironments = async (
req: Request,
res: Response
) => {
const { workspaceId } = req.params;
const { environmentSlug, environmentName, otherEnvironmentSlug, otherEnvironmentName } = req.body;
// atomic update the env to avoid conflict
const workspace = await Workspace.findById(workspaceId).exec();
if (!workspace) {
throw BadRequestError({message: "Couldn't load workspace"});
}
const environmentIndex = workspace.environments.findIndex((env) => env.name === environmentName && env.slug === environmentSlug)
const otherEnvironmentIndex = workspace.environments.findIndex((env) => env.name === otherEnvironmentName && env.slug === otherEnvironmentSlug)
if (environmentIndex === -1 || otherEnvironmentIndex === -1) {
throw BadRequestError({message: "environment or otherEnvironment couldn't be found"})
}
// swap the order of the environments
[workspace.environments[environmentIndex], workspace.environments[otherEnvironmentIndex]] = [workspace.environments[otherEnvironmentIndex], workspace.environments[environmentIndex]]
await workspace.save()
return res.status(200).send({
message: "Successfully reordered environments",
workspace: workspaceId,
});
};
/**
* Rename workspace environment with new name and slug of a workspace with [workspaceId]
* Old slug [oldEnvironmentSlug] must be provided
@@ -124,7 +161,7 @@ export const renameWorkspaceEnvironment = async (
if (envIndex === -1) {
throw new Error("Invalid environment given");
}
const oldEnvironment = workspace.environments[envIndex];
workspace.environments[envIndex].name = environmentName;
@@ -159,7 +196,7 @@ export const renameWorkspaceEnvironment = async (
{ $set: { "deniedPermissions.$[element].environmentSlug": environmentSlug } },
{ arrayFilters: [{ "element.environmentSlug": oldEnvironmentSlug }] }
);
await EEAuditLogService.createAuditLog(
req.authData,
{
@@ -210,7 +247,7 @@ export const deleteWorkspaceEnvironment = async (
if (envIndex === -1) {
throw new Error("Invalid environment given");
}
const oldEnvironment = workspace.environments[envIndex];
workspace.environments.splice(envIndex, 1);

View File

@@ -956,7 +956,7 @@ export const getSecrets = async (req: Request, res: Response) => {
type: EventType.GET_SECRETS,
metadata: {
environment,
secretPath: secretPath as string,
secretPath: (secretPath as string) ?? "/",
numberOfSecrets: secrets.length
}
},

View File

@@ -47,17 +47,16 @@ export const login1 = async (req: Request, res: Response) => {
clientPublicKey: string,
providerAuthToken?: string;
} = req.body;
const user = await User.findOne({
email,
}).select("+salt +verifier");
if (!user) throw new Error("Failed to find user");
if (!user.authMethods.includes(AuthMethod.EMAIL)) {
await validateProviderAuthToken({
email,
user,
providerAuthToken,
});
}
@@ -109,7 +108,6 @@ export const login2 = async (req: Request, res: Response) => {
if (!user.authMethods.includes(AuthMethod.EMAIL)) {
await validateProviderAuthToken({
email,
user,
providerAuthToken,
})
}

View File

@@ -71,8 +71,7 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
if (providerAuthToken) {
await validateProviderAuthToken({
email,
providerAuthToken,
user,
providerAuthToken
});
} else {
const [AUTH_TOKEN_TYPE, AUTH_TOKEN_VALUE] = <[string, string]>req.headers["authorization"]?.split(" ", 2) ?? [null, null]
@@ -117,16 +116,8 @@ export const completeAccountSignup = async (req: Request, res: Response) => {
if (!user)
throw new Error("Failed to complete account for non-existent user"); // ensure user is non-null
const hasSamlEnabled = user.authMethods
.some(
(authMethod: AuthMethod) =>
[
AuthMethod.OKTA_SAML,
AuthMethod.AZURE_SAML,
AuthMethod.JUMPCLOUD_SAML
].includes(authMethod)
);
const hasSamlEnabled = user.authMethods.some((authMethod: AuthMethod) => [AuthMethod.OKTA_SAML, AuthMethod.AZURE_SAML, AuthMethod.JUMPCLOUD_SAML].includes(authMethod));
if (!hasSamlEnabled) { // TODO: modify this part
// initialize default organization and workspace
await initializeDefaultOrg({

View File

@@ -1,58 +1,29 @@
import { Probot } from "probot";
import { exec } from "child_process";
import { mkdir, readFile, rm, writeFile } from "fs";
import { tmpdir } from "os";
import { join } from "path"
import GitRisks from "../../models/gitRisks";
import GitAppOrganizationInstallation from "../../models/gitAppOrganizationInstallation";
import MembershipOrg from "../../../models/membershipOrg";
import { ADMIN, OWNER } from "../../../variables";
import User from "../../../models/user";
import { sendMail } from "../../../helpers";
import TelemetryService from "../../../services/TelemetryService";
type SecretMatch = {
Description: string;
StartLine: number;
EndLine: number;
StartColumn: number;
EndColumn: number;
Match: string;
Secret: string;
File: string;
SymlinkFile: string;
Commit: string;
Entropy: number;
Author: string;
Email: string;
Date: string;
Message: string;
Tags: string[];
RuleID: string;
Fingerprint: string;
FingerPrintWithoutCommitId: string
};
import { scanGithubPushEventForSecretLeaks } from "../../../queues/secret-scanning/githubScanPushEvent";
export default async (app: Probot) => {
app.on("installation.deleted", async (context) => {
const { payload } = context;
const { installation, repositories } = payload;
if (installation.repository_selection == "all") {
await GitRisks.deleteMany({ installationId: installation.id })
await GitAppOrganizationInstallation.deleteOne({ installationId: installation.id })
} else {
if (repositories) {
for (const repository of repositories) {
await GitRisks.deleteMany({ repositoryId: repository.id })
}
if (repositories) {
for (const repository of repositories) {
await GitRisks.deleteMany({ repositoryId: repository.id })
}
await GitAppOrganizationInstallation.deleteOne({ installationId: installation.id })
}
})
app.on("installation", async (context) => {
const { payload } = context;
payload.repositories
const { installation, repositories } = payload;
// TODO: start full repo scans
})
app.on("push", async (context) => {
const { payload } = context;
const { commits, repository, installation, pusher } = payload;
const [owner, repo] = repository.full_name.split("/");
if (!commits || !repository || !installation || !pusher) {
return
@@ -63,188 +34,12 @@ export default async (app: Probot) => {
return
}
const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
for (const commit of commits) {
for (const filepath of [...commit.added, ...commit.modified]) {
try {
const fileContentsResponse = await context.octokit.repos.getContent({
owner,
repo,
path: filepath,
});
const data: any = fileContentsResponse.data;
const fileContent = Buffer.from(data.content, "base64").toString();
const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
for (const finding of findings) {
const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
finding.Fingerprint = fingerPrintWithCommitId
finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
finding.Commit = commit.id
finding.File = filepath
finding.Author = commit.author.name
finding.Email = commit?.author?.email ? commit?.author?.email : ""
allFindingsByFingerprint[fingerPrintWithCommitId] = finding
}
} catch (error) {
console.error(`Error fetching content for ${filepath}`, error); // eslint-disable-line
}
}
}
// change to update
for (const key in allFindingsByFingerprint) {
const risk = await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
{
...convertKeysToLowercase(allFindingsByFingerprint[key]),
installationId: installation.id,
organization: installationLinkToOrgExists.organizationId,
repositoryFullName: repository.full_name,
repositoryId: repository.id
}, {
upsert: true
}).lean()
}
// get emails of admins
const adminsOfWork = await MembershipOrg.find({
organization: installationLinkToOrgExists.organizationId,
$or: [
{ role: OWNER },
{ role: ADMIN }
]
}).lean()
const userEmails = await User.find({
_id: {
$in: [adminsOfWork.map(orgMembership => orgMembership.user)]
}
}).select("email").lean()
const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
if (Object.keys(allFindingsByFingerprint).length) {
await sendMail({
template: "secretLeakIncident.handlebars",
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.full_name}`,
recipients: usersToNotify,
substitutions: {
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
pusher_email: pusher.email,
pusher_name: pusher.name
}
});
}
const postHogClient = await TelemetryService.getPostHogClient();
if (postHogClient) {
postHogClient.capture({
event: "cloud secret scan",
distinctId: pusher.email,
properties: {
numberOfCommitsScanned: commits.length,
numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
}
});
}
scanGithubPushEventForSecretLeaks({
commits: commits,
pusher: { name: pusher.name, email: pusher.email },
repository: { fullName: repository.full_name, id: repository.id },
organizationId: installationLinkToOrgExists.organizationId,
installationId: installation.id
})
});
};
async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData);
} finally {
await deleteTempFolder(tempFolder);
}
}
function createTempFolder(): Promise<string> {
return new Promise((resolve, reject) => {
const tempDir = tmpdir()
const tempFolderName = Math.random().toString(36).substring(2);
const tempFolderPath = join(tempDir, tempFolderName);
mkdir(tempFolderPath, (err: any) => {
if (err) {
reject(err);
} else {
resolve(tempFolderPath);
}
});
});
}
function writeTextToFile(filePath: string, content: string): Promise<void> {
return new Promise((resolve, reject) => {
writeFile(filePath, content, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
exec(command, (error) => {
if (error && error.code != 77) {
reject(error);
} else {
resolve();
}
});
});
}
function readFindingsFile(filePath: string): Promise<string> {
return new Promise((resolve, reject) => {
readFile(filePath, "utf8", (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
function deleteTempFolder(folderPath: string): Promise<void> {
return new Promise((resolve, reject) => {
rm(folderPath, { recursive: true }, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function convertKeysToLowercase<T>(obj: T): T {
const convertedObj = {} as T;
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const lowercaseKey = key.charAt(0).toLowerCase() + key.slice(1);
convertedObj[lowercaseKey as keyof T] = obj[key];
}
}
return convertedObj;
}

View File

@@ -0,0 +1,125 @@
import { exec } from "child_process";
import { mkdir, readFile, rm, writeFile } from "fs";
import { tmpdir } from "os";
import { join } from "path"
import { SecretMatch } from "./types";
import { Octokit } from "@octokit/rest";
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData);
} finally {
await deleteTempFolder(tempFolder);
}
}
export function createTempFolder(): Promise<string> {
return new Promise((resolve, reject) => {
const tempDir = tmpdir()
const tempFolderName = Math.random().toString(36).substring(2);
const tempFolderPath = join(tempDir, tempFolderName);
mkdir(tempFolderPath, (err: any) => {
if (err) {
reject(err);
} else {
resolve(tempFolderPath);
}
});
});
}
export function writeTextToFile(filePath: string, content: string): Promise<void> {
return new Promise((resolve, reject) => {
writeFile(filePath, content, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
exec(command, (error) => {
if (error && error.code != 77) {
reject(error);
} else {
resolve();
}
});
});
}
export function readFindingsFile(filePath: string): Promise<string> {
return new Promise((resolve, reject) => {
readFile(filePath, "utf8", (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
export function deleteTempFolder(folderPath: string): Promise<void> {
return new Promise((resolve, reject) => {
rm(folderPath, { recursive: true }, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
export function convertKeysToLowercase<T>(obj: T): T {
const convertedObj = {} as T;
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const lowercaseKey = key.charAt(0).toLowerCase() + key.slice(1);
convertedObj[lowercaseKey as keyof T] = obj[key];
}
}
return convertedObj;
}
export async function getCommits(octokit: Octokit, owner: string, repo: string) {
let commits: { sha: string }[] = [];
let page = 1;
while (true) {
const response = await octokit.repos.listCommits({
owner,
repo,
per_page: 100,
page,
});
commits = commits.concat(response.data);
if (response.data.length == 0) break;
page++;
}
return commits;
}
export async function getFilesFromCommit(octokit: any, owner: string, repo: string, sha: string) {
const response = await octokit.repos.getCommit({
owner,
repo,
ref: sha,
});
}

View File

@@ -0,0 +1,21 @@
export type SecretMatch = {
Description: string;
StartLine: number;
EndLine: number;
StartColumn: number;
EndColumn: number;
Match: string;
Secret: string;
File: string;
SymlinkFile: string;
Commit: string;
Entropy: number;
Author: string;
Email: string;
Date: string;
Message: string;
Tags: string[];
RuleID: string;
Fingerprint: string;
FingerPrintWithoutCommitId: string
};

View File

@@ -392,11 +392,9 @@ export const createToken = ({
export const validateProviderAuthToken = async ({
email,
user,
providerAuthToken,
}: {
email: string;
user: IUser,
providerAuthToken?: string;
}) => {
if (!providerAuthToken) {
@@ -407,10 +405,7 @@ export const validateProviderAuthToken = async ({
jwt.verify(providerAuthToken, await getJwtProviderAuthSecret())
);
if (
!user.authMethods.includes(decodedToken.authMethod) ||
decodedToken.email !== email
) {
if (decodedToken.email !== email) {
throw new Error("Invalid authentication credentials.")
}
}

View File

@@ -32,7 +32,7 @@ export const handleEventHelper = async ({ event }: { event: Event }) => {
switch (event.name) {
case EVENT_PUSH_SECRETS:
if (bot) {
await IntegrationService.syncIntegrations({
IntegrationService.syncIntegrations({
workspaceId,
environment
});

View File

@@ -1,6 +1,6 @@
import { Types } from "mongoose";
import { Bot, Integration, IntegrationAuth } from "../models";
import { exchangeCode, exchangeRefresh, syncSecrets } from "../integrations";
import { Bot, IntegrationAuth } from "../models";
import { exchangeCode, exchangeRefresh } from "../integrations";
import { BotService } from "../services";
import {
ALGORITHM_AES_256_GCM,
@@ -9,7 +9,7 @@ import {
INTEGRATION_VERCEL
} from "../variables";
import { UnauthorizedRequestError } from "../utils/errors";
import * as Sentry from "@sentry/node";
import { syncSecretsToActiveIntegrationsQueue } from "../queues/integrations/syncSecretsToThirdPartyServices"
interface Update {
workspace: string;
@@ -102,69 +102,6 @@ export const handleOAuthExchangeHelper = async ({
return integrationAuth;
};
/**
* Sync/push environment variables in workspace with id [workspaceId] to
* all active integrations for that workspace
* @param {Object} obj
* @param {Object} obj.workspaceId - id of workspace
*/
export const syncIntegrationsHelper = async ({
workspaceId,
environment
}: {
workspaceId: Types.ObjectId;
environment?: string;
}) => {
try {
const integrations = await Integration.find({
workspace: workspaceId,
...(environment
? {
environment
}
: {}),
isActive: true,
app: { $ne: null }
});
// for each workspace integration, sync/push secrets
// to that integration
for await (const integration of integrations) {
// get workspace, environment (shared) secrets
const secrets = await BotService.getSecrets({
workspaceId: integration.workspace,
environment: integration.environment,
secretPath: integration.secretPath
});
const integrationAuth = await IntegrationAuth.findById(integration.integrationAuth);
if (!integrationAuth) throw new Error("Failed to find integration auth");
// get integration auth access token
const access = await getIntegrationAuthAccessHelper({
integrationAuthId: integration.integrationAuth
});
// sync secrets to integration
await syncSecrets({
integration,
integrationAuth,
secrets,
accessId: access.accessId === undefined ? null : access.accessId,
accessToken: access.accessToken
});
}
} catch (err) {
Sentry.captureException(err);
// eslint-disable-next-line
console.log(
`syncIntegrationsHelper: failed with [workspaceId=${workspaceId}] [environment=${environment}]`,
err
); // eslint-disable-line no-use-before-define
throw err;
}
};
/**
* Return decrypted refresh token using the bot's copy

View File

@@ -11,7 +11,7 @@ import {
IServiceTokenData,
Secret,
SecretBlindIndexData,
ServiceTokenData,
ServiceTokenData
} from "../models";
import { EventType, SecretVersion } from "../ee/models";
import {
@@ -463,8 +463,8 @@ export const createSecretHelper = async ({
});
const postHogClient = await TelemetryService.getPostHogClient();
if (postHogClient && (metadata?.source !== "signup")) {
if (postHogClient && metadata?.source !== "signup") {
postHogClient.capture({
event: "secrets added",
distinctId: await TelemetryService.getDistinctId({
@@ -549,7 +549,7 @@ export const getSecretsHelper = async ({
channel: authData.userAgentType,
ipAddress: authData.ipAddress
}));
await EEAuditLogService.createAuditLog(
authData,
{
@@ -659,7 +659,7 @@ export const getSecretHelper = async ({
ipAddress: authData.ipAddress
}));
await EEAuditLogService.createAuditLog(
await EEAuditLogService.createAuditLog(
authData,
{
type: EventType.GET_SECRET,
@@ -824,8 +824,8 @@ export const updateSecretHelper = async ({
channel: authData.userAgentType,
ipAddress: authData.ipAddress
}));
await EEAuditLogService.createAuditLog(
await EEAuditLogService.createAuditLog(
authData,
{
type: EventType.UPDATE_SECRET,
@@ -908,14 +908,14 @@ export const deleteSecretHelper = async ({
if (type === SECRET_SHARED) {
secrets = await Secret.find({
secretBlindIndex,
workspaceId: new Types.ObjectId(workspaceId),
workspace: new Types.ObjectId(workspaceId),
environment,
folder: folderId
}).lean();
secret = await Secret.findOneAndDelete({
secretBlindIndex,
workspaceId: new Types.ObjectId(workspaceId),
workspace: new Types.ObjectId(workspaceId),
environment,
type,
folder: folderId
@@ -931,7 +931,7 @@ export const deleteSecretHelper = async ({
secret = await Secret.findOneAndDelete({
secretBlindIndex,
folder: folderId,
workspaceId: new Types.ObjectId(workspaceId),
workspace: new Types.ObjectId(workspaceId),
environment,
type,
...getAuthDataPayloadUserObj(authData)
@@ -1088,7 +1088,8 @@ const recursivelyExpandSecret = async (
let interpolatedValue = interpolatedSec[key];
if (!interpolatedValue) {
throw new Error(`Couldn't find referenced value - ${key}`);
console.error(`Couldn't find referenced value - ${key}`);
return "";
}
const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG);

View File

@@ -6,7 +6,7 @@ require("express-async-errors");
import helmet from "helmet";
import cors from "cors";
import { DatabaseService } from "./services";
import { EELicenseService, GithubSecretScanningService} from "./ee/services";
import { EELicenseService, GithubSecretScanningService } from "./ee/services";
import { setUpHealthEndpoint } from "./services/health";
import cookieParser from "cookie-parser";
import swaggerUi = require("swagger-ui-express");
@@ -72,6 +72,8 @@ import { RouteNotFoundError } from "./utils/errors";
import { requestErrorHandler } from "./middleware/requestErrorHandler";
import { getNodeEnv, getPort, getSecretScanningGitAppId, getSecretScanningPrivateKey, getSecretScanningWebhookProxy, getSecretScanningWebhookSecret, getSiteURL } from "./config";
import { setup } from "./utils/setup";
import { syncSecretsToThirdPartyServices } from "./queues/integrations/syncSecretsToThirdPartyServices";
import { githubPushEventSecretScan } from "./queues/secret-scanning/githubScanPushEvent";
const SmeeClient = require('smee-client') // eslint-disable-line
const main = async () => {
@@ -205,6 +207,8 @@ const main = async () => {
server.on("close", async () => {
await DatabaseService.closeDatabase();
syncSecretsToThirdPartyServices.close()
githubPushEventSecretScan.close()
});
return server;

View File

@@ -2,7 +2,6 @@ import { exchangeCode } from "./exchange";
import { exchangeRefresh } from "./refresh";
import { getApps } from "./apps";
import { getTeams } from "./teams";
import { syncSecrets } from "./sync";
import { revokeAccess } from "./revoke";
export {
@@ -10,6 +9,5 @@ export {
exchangeRefresh,
getApps,
getTeams,
syncSecrets,
revokeAccess,
}

View File

@@ -0,0 +1,76 @@
import Queue, { Job } from "bull";
import Integration from "../../models/integration";
import IntegrationAuth from "../../models/integrationAuth";
import { BotService } from "../../services";
import { getIntegrationAuthAccessHelper } from "../../helpers";
import { syncSecrets } from "../../integrations/sync"
type TSyncSecretsToThirdPartyServices = {
workspaceId: string
environment?: string
}
export const syncSecretsToThirdPartyServices = new Queue("sync-secrets-to-third-party-services", process.env.REDIS_URL as string);
syncSecretsToThirdPartyServices.process(async (job: Job) => {
const { workspaceId, environment }: TSyncSecretsToThirdPartyServices = job.data
const integrations = await Integration.find({
workspace: workspaceId,
...(environment
? {
environment
}
: {}),
isActive: true,
app: { $ne: null }
});
// for each workspace integration, sync/push secrets
// to that integration
for (const integration of integrations) {
// get workspace, environment (shared) secrets
const secrets = await BotService.getSecrets({
workspaceId: integration.workspace,
environment: integration.environment,
secretPath: integration.secretPath
});
const integrationAuth = await IntegrationAuth.findById(integration.integrationAuth);
if (!integrationAuth) throw new Error("Failed to find integration auth");
// get integration auth access token
const access = await getIntegrationAuthAccessHelper({
integrationAuthId: integration.integrationAuth
});
// sync secrets to integration
await syncSecrets({
integration,
integrationAuth,
secrets,
accessId: access.accessId === undefined ? null : access.accessId,
accessToken: access.accessToken
});
}
})
syncSecretsToThirdPartyServices.on("error", (error) => {
console.log("QUEUE ERROR:", error) // eslint-disable-line
})
export const syncSecretsToActiveIntegrationsQueue = (jobDetails: TSyncSecretsToThirdPartyServices) => {
syncSecretsToThirdPartyServices.add(jobDetails, {
attempts: 5,
backoff: {
type: "exponential",
delay: 3000
},
removeOnComplete: true,
removeOnFail: {
count: 20 // keep the most recent 20 jobs
}
})
}

View File

@@ -0,0 +1,201 @@
// import Queue, { Job } from "bull";
// import { ProbotOctokit } from "probot"
// import { Commit, Committer, Repository } from "@octokit/webhooks-types";
// import TelemetryService from "../../services/TelemetryService";
// import { sendMail } from "../../helpers";
// import GitRisks from "../../ee/models/gitRisks";
// import { MembershipOrg, User } from "../../models";
// import { OWNER, ADMIN } from "../../variables";
// import { convertKeysToLowercase, getFilesFromCommit, scanContentAndGetFindings } from "../../ee/services/GithubSecretScanning/helper";
// import { getSecretScanningGitAppId, getSecretScanningPrivateKey } from "../../config";
// const githubFullRepositoryScan = new Queue('github-historical-secret-scanning', 'redis://redis:6379');
// type TScanFullRepositoryDetails = {
// organizationId: string,
// repositories: {
// id: number;
// node_id: string;
// name: string;
// full_name: string;
// private: boolean;
// }[] | undefined
// installationId: number
// }
// type SecretMatch = {
// Description: string;
// StartLine: number;
// EndLine: number;
// StartColumn: number;
// EndColumn: number;
// Match: string;
// Secret: string;
// File: string;
// SymlinkFile: string;
// Commit: string;
// Entropy: number;
// Author: string;
// Email: string;
// Date: string;
// Message: string;
// Tags: string[];
// RuleID: string;
// Fingerprint: string;
// FingerPrintWithoutCommitId: string
// };
// type Helllo = {
// url: string;
// sha: string;
// node_id: string;
// html_url: string;
// comments_url: string;
// commit: {
// url: string;
// author: {
// name?: string | undefined;
// email?: string | undefined;
// date?: string | undefined;
// } | null;
// verification?: {
// } | undefined;
// };
// files?: {}[] | undefined;
// }[]
// githubFullRepositoryScan.process(async (job: Job, done: Queue.DoneCallback) => {
// const { organizationId, repositories, installationId }: TScanFullRepositoryDetails = job.data
// const repositoryFullNamesList = repositories ? repositories.map(repoDetails => repoDetails.full_name) : []
// const octokit = new ProbotOctokit({
// auth: {
// appId: await getSecretScanningGitAppId(),
// privateKey: await getSecretScanningPrivateKey(),
// installationId: installationId
// },
// });
// for (const repositoryFullName of repositoryFullNamesList) {
// const [owner, repo] = repositoryFullName.split("/");
// let page = 1;
// while (true) {
// // octokit.repos.getco
// const { data } = await octokit.repos.listCommits({
// owner,
// repo,
// per_page: 100,
// page
// });
// await getFilesFromCommit(octokit, owner, repo, "646b386605177ed0a2cc0a596eeee0cf57666342")
// page++;
// }
// }
// done()
// // const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
// // for (const commit of commits) {
// // for (const filepath of [...commit.added, ...commit.modified]) {
// // try {
// // const fileContentsResponse = await octokit.repos.getContent({
// // owner,
// // repo,
// // path: filepath,
// // });
// // const data: any = fileContentsResponse.data;
// // const fileContent = Buffer.from(data.content, "base64").toString();
// // const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
// // for (const finding of findings) {
// // const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
// // const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
// // finding.Fingerprint = fingerPrintWithCommitId
// // finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
// // finding.Commit = commit.id
// // finding.File = filepath
// // finding.Author = commit.author.name
// // finding.Email = commit?.author?.email ? commit?.author?.email : ""
// // allFindingsByFingerprint[fingerPrintWithCommitId] = finding
// // }
// // } catch (error) {
// // done(new Error(`gitHubHistoricalScanning.process: unable to fetch content for [filepath=${filepath}] because [error=${error}]`), null)
// // }
// // }
// // }
// // // change to update
// // for (const key in allFindingsByFingerprint) {
// // await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
// // {
// // ...convertKeysToLowercase(allFindingsByFingerprint[key]),
// // installationId: installationId,
// // organization: organizationId,
// // repositoryFullName: repository.fullName,
// // repositoryId: repository.id
// // }, {
// // upsert: true
// // }).lean()
// // }
// // // get emails of admins
// // const adminsOfWork = await MembershipOrg.find({
// // organization: organizationId,
// // $or: [
// // { role: OWNER },
// // { role: ADMIN }
// // ]
// // }).lean()
// // const userEmails = await User.find({
// // _id: {
// // $in: [adminsOfWork.map(orgMembership => orgMembership.user)]
// // }
// // }).select("email").lean()
// // const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
// // const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
// // if (Object.keys(allFindingsByFingerprint).length) {
// // await sendMail({
// // template: "secretLeakIncident.handlebars",
// // subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
// // recipients: usersToNotify,
// // substitutions: {
// // numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
// // pusher_email: pusher.email,
// // pusher_name: pusher.name
// // }
// // });
// // }
// // const postHogClient = await TelemetryService.getPostHogClient();
// // if (postHogClient) {
// // postHogClient.capture({
// // event: "cloud secret scan",
// // distinctId: pusher.email,
// // properties: {
// // numberOfCommitsScanned: commits.length,
// // numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
// // }
// // });
// // }
// // done(null, allFindingsByFingerprint)
// })
// export const scanGithubFullRepositoryForSecretLeaks = (scanFullRepositoryDetails: TScanFullRepositoryDetails) => {
// console.log("full repo scan started")
// githubFullRepositoryScan.add(scanFullRepositoryDetails)
// }

View File

@@ -0,0 +1,148 @@
import Queue, { Job } from "bull";
import { ProbotOctokit } from "probot"
import { Commit, Committer, Repository } from "@octokit/webhooks-types";
import TelemetryService from "../../services/TelemetryService";
import { sendMail } from "../../helpers";
import GitRisks from "../../ee/models/gitRisks";
import { MembershipOrg, User } from "../../models";
import { OWNER, ADMIN } from "../../variables";
import { convertKeysToLowercase, scanContentAndGetFindings } from "../../ee/services/GithubSecretScanning/helper";
import { getSecretScanningGitAppId, getSecretScanningPrivateKey } from "../../config";
import { SecretMatch } from "../../ee/services/GithubSecretScanning/types";
export const githubPushEventSecretScan = new Queue('github-push-event-secret-scanning', 'redis://redis:6379');
type TScanPushEventQueueDetails = {
organizationId: string,
commits: Commit[]
pusher: {
name: string,
email: string | null
},
repository: {
id: number,
fullName: string,
},
installationId: number
}
githubPushEventSecretScan.process(async (job: Job, done: Queue.DoneCallback) => {
const { organizationId, commits, pusher, repository, installationId }: TScanPushEventQueueDetails = job.data
const [owner, repo] = repository.fullName.split("/");
const octokit = new ProbotOctokit({
auth: {
appId: await getSecretScanningGitAppId(),
privateKey: await getSecretScanningPrivateKey(),
installationId: installationId
},
});
const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
for (const commit of commits) {
for (const filepath of [...commit.added, ...commit.modified]) {
try {
const fileContentsResponse = await octokit.repos.getContent({
owner,
repo,
path: filepath,
});
const data: any = fileContentsResponse.data;
const fileContent = Buffer.from(data.content, "base64").toString();
const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
for (const finding of findings) {
const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
finding.Fingerprint = fingerPrintWithCommitId
finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
finding.Commit = commit.id
finding.File = filepath
finding.Author = commit.author.name
finding.Email = commit?.author?.email ? commit?.author?.email : ""
allFindingsByFingerprint[fingerPrintWithCommitId] = finding
}
} catch (error) {
done(new Error(`gitHubHistoricalScanning.process: unable to fetch content for [filepath=${filepath}] because [error=${error}]`), null)
}
}
}
// change to update
for (const key in allFindingsByFingerprint) {
await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
{
...convertKeysToLowercase(allFindingsByFingerprint[key]),
installationId: installationId,
organization: organizationId,
repositoryFullName: repository.fullName,
repositoryId: repository.id
}, {
upsert: true
}).lean()
}
// get emails of admins
const adminsOfWork = await MembershipOrg.find({
organization: organizationId,
$or: [
{ role: OWNER },
{ role: ADMIN }
]
}).lean()
const userEmails = await User.find({
_id: {
$in: [adminsOfWork.map(orgMembership => orgMembership.user)]
}
}).select("email").lean()
const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
if (Object.keys(allFindingsByFingerprint).length) {
await sendMail({
template: "secretLeakIncident.handlebars",
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
recipients: usersToNotify,
substitutions: {
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
pusher_email: pusher.email,
pusher_name: pusher.name
}
});
}
const postHogClient = await TelemetryService.getPostHogClient();
if (postHogClient) {
postHogClient.capture({
event: "cloud secret scan",
distinctId: pusher.email,
properties: {
numberOfCommitsScanned: commits.length,
numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
}
});
}
done(null, allFindingsByFingerprint)
})
export const scanGithubPushEventForSecretLeaks = (pushEventPayload: TScanPushEventQueueDetails) => {
githubPushEventSecretScan.add(pushEventPayload, {
attempts: 3,
backoff: {
type: "exponential",
delay: 5000
},
removeOnComplete: true,
removeOnFail: {
count: 20 // keep the most recent 20 jobs
}
})
}

View File

@@ -1,17 +1,26 @@
import express, { Request, Response } from "express";
import { getInviteOnlySignup, getSecretScanningGitAppId, getSecretScanningPrivateKey, getSecretScanningWebhookSecret, getSmtpConfigured } from "../../config";
import { getInviteOnlySignup, getRedisUrl, getSecretScanningGitAppId, getSecretScanningPrivateKey, getSecretScanningWebhookSecret, getSmtpConfigured } from "../../config";
const router = express.Router();
router.get(
"/status",
async (req: Request, res: Response) => {
const gitAppId = await getSecretScanningGitAppId()
const gitSecretScanningWebhookSecret = await getSecretScanningWebhookSecret()
const gitSecretScanningPrivateKey = await getSecretScanningPrivateKey()
let secretScanningConfigured = false
if (gitAppId && gitSecretScanningPrivateKey && gitSecretScanningWebhookSecret) {
secretScanningConfigured = true
}
res.status(200).json({
date: new Date(),
message: "Ok",
emailConfigured: await getSmtpConfigured(),
secretScanningConfigured: await getSecretScanningGitAppId() && await getSecretScanningWebhookSecret() && await getSecretScanningPrivateKey(),
inviteOnlySignup: await getInviteOnlySignup()
inviteOnlySignup: await getInviteOnlySignup(),
redisConfigured: await getRedisUrl() !== "" && await getRedisUrl() !== undefined,
secretScanningConfigured: secretScanningConfigured,
})
}
);

View File

@@ -4,6 +4,7 @@ import {
requireAuth,
requireIntegrationAuth,
requireIntegrationAuthorizationAuth,
requireWorkspaceAuth,
validateRequest,
} from "../../middleware";
import {
@@ -73,4 +74,19 @@ router.delete(
integrationController.deleteIntegration
);
router.post(
"/manual-sync",
requireAuth({
acceptedAuthModes: [AuthMode.JWT]
}),
requireWorkspaceAuth({
acceptedRoles: [ADMIN, MEMBER],
locationWorkspaceId: "body",
}),
body("environment").isString().exists().trim(),
body("workspaceId").exists().trim(),
validateRequest,
integrationController.manualSync
);
export default router;

View File

@@ -46,6 +46,24 @@ router.put(
environmentController.renameWorkspaceEnvironment
);
router.patch(
"/:workspaceId/environments",
requireAuth({
acceptedAuthModes: [AuthMode.JWT],
}),
requireWorkspaceAuth({
acceptedRoles: [ADMIN, MEMBER],
locationWorkspaceId: "params",
}),
param("workspaceId").exists().trim(),
body("environmentSlug").exists().isString().trim(),
body("environmentName").exists().isString().trim(),
body("otherEnvironmentSlug").exists().isString().trim(),
body("otherEnvironmentName").exists().isString().trim(),
validateRequest,
environmentController.reorderWorkspaceEnvironments
);
router.delete(
"/:workspaceId/environments",
requireAuth({

View File

@@ -1,4 +1,4 @@
import express from "express";
import express, { Request, Response } from "express";
const router = express.Router();
import { requireAuth, requireWorkspaceAuth, validateRequest } from "../../middleware";
import { body, param, query } from "express-validator";

View File

@@ -1,18 +1,18 @@
import { Types } from "mongoose";
import {
import {
getIntegrationAuthAccessHelper,
getIntegrationAuthRefreshHelper,
handleOAuthExchangeHelper,
setIntegrationAuthAccessHelper,
setIntegrationAuthRefreshHelper,
syncIntegrationsHelper,
} from "../helpers/integration";
import { syncSecretsToActiveIntegrationsQueue } from "../queues/integrations/syncSecretsToThirdPartyServices";
/**
* Class to handle integrations
*/
class IntegrationService {
/**
* Perform OAuth2 code-token exchange for workspace with id [workspaceId] and integration
* named [integration]
@@ -26,12 +26,12 @@ class IntegrationService {
* @param {String} obj1.code - code
* @returns {IntegrationAuth} integrationAuth - integration authorization after OAuth2 code-token exchange
*/
static async handleOAuthExchange({
static async handleOAuthExchange({
workspaceId,
integration,
code,
environment,
}: {
}: {
workspaceId: string;
integration: string;
code: string;
@@ -44,25 +44,23 @@ class IntegrationService {
environment,
});
}
/**
* Sync/push environment variables in workspace with id [workspaceId] to
* all associated integrations
* @param {Object} obj
* @param {Object} obj.workspaceId - id of workspace
*/
static async syncIntegrations({
static syncIntegrations({
workspaceId,
environment,
}: {
workspaceId: Types.ObjectId;
environment?: string;
}) {
return await syncIntegrationsHelper({
workspaceId,
});
syncSecretsToActiveIntegrationsQueue({ workspaceId: workspaceId.toString(), environment: environment })
}
/**
* Return decrypted refresh token for integration auth
* with id [integrationAuthId]
@@ -70,12 +68,12 @@ class IntegrationService {
* @param {String} obj.integrationAuthId - id of integration auth
* @param {String} refreshToken - decrypted refresh token
*/
static async getIntegrationAuthRefresh({ integrationAuthId }: { integrationAuthId: Types.ObjectId}) {
static async getIntegrationAuthRefresh({ integrationAuthId }: { integrationAuthId: Types.ObjectId }) {
return await getIntegrationAuthRefreshHelper({
integrationAuthId,
});
}
/**
* Return decrypted access token for integration auth
* with id [integrationAuthId]
@@ -98,11 +96,11 @@ class IntegrationService {
* @param {String} obj.refreshToken - refresh token
* @returns {IntegrationAuth} integrationAuth - updated integration auth
*/
static async setIntegrationAuthRefresh({
static async setIntegrationAuthRefresh({
integrationAuthId,
refreshToken,
}: {
integrationAuthId: string;
refreshToken,
}: {
integrationAuthId: string;
refreshToken: string;
}) {
return await setIntegrationAuthRefreshHelper({
@@ -122,12 +120,12 @@ class IntegrationService {
* @param {Date} obj.accessExpiresAt - expiration date of access token
* @returns {IntegrationAuth} - updated integration auth
*/
static async setIntegrationAuthAccess({
static async setIntegrationAuthAccess({
integrationAuthId,
accessId,
accessToken,
accessExpiresAt,
}: {
}: {
integrationAuthId: string;
accessId: string | null;
accessToken: string;

View File

@@ -19,7 +19,7 @@ import {
backfillTrustedIps,
backfillUserAuthMethods
} from "./backfillData";
import {
import {
reencryptBotOrgKeys,
reencryptBotPrivateKeys,
reencryptSecretBlindIndexDataSalts
@@ -27,6 +27,7 @@ import {
import {
getMongoURL,
getNodeEnv,
getRedisUrl,
getSentryDSN
} from "../../config";
import { initializePassport } from "../auth";
@@ -42,6 +43,10 @@ import { initializePassport } from "../auth";
* - Re-encrypting data
*/
export const setup = async () => {
if (await getRedisUrl() === undefined || await getRedisUrl() === "") {
console.error("WARNING: Redis is not yet configured. Infisical may not function as expected without it.")
}
await validateEncryptionKeysConfig();
await TelemetryService.logTelemetryMessage();

View File

@@ -2,6 +2,8 @@ import { Server } from "http";
import main from "../src";
import { afterAll, beforeAll, describe, expect, it } from "@jest/globals";
import request from "supertest";
import { githubPushEventSecretScan } from "../src/queues/secret-scanning/githubScanPushEvent";
import { syncSecretsToThirdPartyServices } from "../src/queues/integrations/syncSecretsToThirdPartyServices";
let server: Server;
@@ -11,6 +13,8 @@ beforeAll(async () => {
afterAll(async () => {
server.close();
githubPushEventSecretScan.close()
syncSecretsToThirdPartyServices.close()
});
describe("Healthcheck endpoint", () => {

View File

@@ -22,7 +22,7 @@ require (
github.com/spf13/viper v1.8.1
github.com/stretchr/testify v1.8.1
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d
golang.org/x/term v0.9.0
golang.org/x/term v0.11.0
)
require (
@@ -31,6 +31,7 @@ require (
github.com/chzyer/readline v1.5.1 // indirect
github.com/danieljoos/wincred v1.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dvsekhvalnov/jose2go v1.5.0 // indirect
github.com/fsnotify/fsnotify v1.4.9 // indirect
github.com/go-openapi/errors v0.20.2 // indirect
github.com/go-openapi/strfmt v0.21.3 // indirect
@@ -41,6 +42,7 @@ require (
github.com/mattn/go-colorable v0.1.9 // indirect
github.com/mattn/go-runewidth v0.0.14 // indirect
github.com/mitchellh/mapstructure v1.4.1 // indirect
github.com/mtibben/percent v0.2.1 // indirect
github.com/muesli/mango v0.1.0 // indirect
github.com/muesli/mango-pflag v0.1.0 // indirect
github.com/muesli/termenv v0.11.1-0.20220204035834-5ac8409525e0 // indirect
@@ -56,9 +58,8 @@ require (
go.mongodb.org/mongo-driver v1.10.0 // indirect
golang.org/x/net v0.7.0 // indirect
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
golang.org/x/sys v0.9.0 // indirect
golang.org/x/sys v0.11.0 // indirect
golang.org/x/text v0.7.0 // indirect
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b // indirect
gopkg.in/ini.v1 v1.62.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
@@ -73,3 +74,5 @@ require (
github.com/spf13/pflag v1.0.5 // indirect
github.com/zalando/go-keyring v0.2.3
)
replace github.com/zalando/go-keyring => github.com/Infisical/go-keyring v1.0.1

View File

@@ -39,6 +39,8 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Infisical/go-keyring v1.0.1 h1:E8XpqoT0H1G9C1kgxU+NeReXOeobmH7LbBHNpcOI380=
github.com/Infisical/go-keyring v1.0.1/go.mod h1:LWOnn/sw9FxDW/0VY+jHFAfOFEe03xmwBVSfJnBowto=
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
@@ -77,6 +79,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ=
github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/dvsekhvalnov/jose2go v1.5.0 h1:3j8ya4Z4kMCwT5nXIKFSV84YS+HdqSSO0VsTQxaLAeM=
github.com/dvsekhvalnov/jose2go v1.5.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
@@ -251,6 +255,8 @@ github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJ
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a h1:jlDOeO5TU0pYlbc/y6PFguab5IjANI0Knrpg3u/ton4=
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/muesli/mango v0.1.0 h1:DZQK45d2gGbql1arsYA4vfg4d7I9Hfx5rX/GCmzsAvI=
@@ -348,8 +354,6 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/zalando/go-keyring v0.2.3 h1:v9CUu9phlABObO4LPWycf+zwMG7nlbb3t/B5wa97yms=
github.com/zalando/go-keyring v0.2.3/go.mod h1:HL4k+OXQfJUWaMnqyuSOc0drfGPX2b51Du6K+MRgZMk=
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ=
@@ -524,11 +528,11 @@ golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.9.0 h1:GRRCnKYhdQrD8kfRAdQ6Zcw1P0OcELxGLKJvtjVMZ28=
golang.org/x/term v0.9.0/go.mod h1:M6DEAAIenWoTxdKrOltXcmDY3rSplQUkrvaDU5FcQyo=
golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0=
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=

View File

@@ -1,68 +0,0 @@
package keyringwrapper
import (
"time"
"github.com/zalando/go-keyring"
)
const MAIN_KEYRING_SERVICE = "infisical-cli"
type TimeoutError struct {
message string
}
func (e *TimeoutError) Error() string {
return e.message
}
func Set(key, value string) error {
ch := make(chan error, 1)
go func() {
defer close(ch)
ch <- keyring.Set(MAIN_KEYRING_SERVICE, key, value)
}()
select {
case err := <-ch:
return err
case <-time.After(3 * time.Second):
return &TimeoutError{"timeout while trying to set secret in keyring"}
}
}
func Get(key string) (string, error) {
ch := make(chan struct {
val string
err error
}, 1)
go func() {
defer close(ch)
val, err := keyring.Get(MAIN_KEYRING_SERVICE, key)
ch <- struct {
val string
err error
}{val, err}
}()
select {
case res := <-ch:
return res.val, res.err
case <-time.After(3 * time.Second):
return "", &TimeoutError{"timeout while trying to get secret from keyring"}
}
}
func Delete(key string) error {
ch := make(chan error, 1)
go func() {
defer close(ch)
ch <- keyring.Delete(MAIN_KEYRING_SERVICE, key)
}()
select {
case err := <-ch:
return err
case <-time.After(3 * time.Second):
return &TimeoutError{"timeout while trying to delete secret from keyring"}
}
}

View File

@@ -57,14 +57,14 @@ func CallLogin1V2(httpClient *resty.Client, request GetLoginOneV2Request) (GetLo
SetResult(&loginOneV2Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v2/auth/login1", config.INFISICAL_URL))
Post(fmt.Sprintf("%v/v3/auth/login1", config.INFISICAL_URL))
if err != nil {
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V2: Unable to complete api request [err=%s]", err)
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V3: Unable to complete api request [err=%s]", err)
}
if response.IsError() {
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V2: Unsuccessful response: [response=%s]", response)
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V3: Unsuccessful response: [response=%s]", response)
}
return loginOneV2Response, nil
@@ -115,7 +115,7 @@ func CallLogin2V2(httpClient *resty.Client, request GetLoginTwoV2Request) (GetLo
SetResult(&loginTwoV2Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v2/auth/login2", config.INFISICAL_URL))
Post(fmt.Sprintf("%v/v3/auth/login2", config.INFISICAL_URL))
cookies := response.Cookies()
// Find a cookie by name
@@ -134,11 +134,11 @@ func CallLogin2V2(httpClient *resty.Client, request GetLoginTwoV2Request) (GetLo
}
if err != nil {
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V2: Unable to complete api request [err=%s]", err)
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V3: Unable to complete api request [err=%s]", err)
}
if response.IsError() {
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V2: Unsuccessful response: [response=%s]", response)
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V3: Unsuccessful response: [response=%s]", response)
}
return loginTwoV2Response, nil

View File

@@ -107,7 +107,7 @@ var loginCmd = &cobra.Command{
//call browser login function
if !interactiveLogin {
fmt.Printf("\nLogging in via browser... Hit '%s' to cancel\n", QUIT_BROWSER_LOGIN)
fmt.Println("Logging in via browser... To login via interactive mode run [infisical login -i]")
userCredentialsToBeStored, err = browserCliLogin()
if err != nil {
//default to cli login on error
@@ -540,7 +540,12 @@ func browserCliLogin() (models.UserCredentials, error) {
quit := make(chan bool)
//terminal state
var oldState term.State
oldState, err := term.GetState(int(os.Stdin.Fd()))
if err != nil {
return models.UserCredentials{}, err
}
defer restoreTerminal(oldState)
//create handler
c := cors.New(cors.Options{
@@ -553,29 +558,25 @@ func browserCliLogin() (models.UserCredentials, error) {
corsHandler := c.Handler(browserLoginHandler(success, failure))
log.Debug().Msgf("Callback server listening on port %d", callbackPort)
go quitBrowserLogin(quit, &oldState)
go http.Serve(listener, corsHandler)
for {
select {
case loginResponse := <-success:
err = closeListener(&listener)
restoreTerminal(&oldState)
_ = closeListener(&listener)
return loginResponse, nil
case err = <-failure:
case <-failure:
err = closeListener(&listener)
restoreTerminal(&oldState)
return models.UserCredentials{}, err
case _ = <-timeout:
err = closeListener(&listener)
restoreTerminal(&oldState)
case <-timeout:
_ = closeListener(&listener)
return models.UserCredentials{}, errors.New("server timeout")
case _ = <-quit:
case <-quit:
return models.UserCredentials{}, errors.New("quitting browser login, defaulting to cli...")
}
}
}
@@ -584,25 +585,24 @@ func restoreTerminal(oldState *term.State) {
term.Restore(int(os.Stdin.Fd()), oldState)
}
// listens to 'q' input on terminal and
// sends 'true' to 'quit' channel
func quitBrowserLogin(quit chan bool, oState *term.State) {
//
oldState, err := term.MakeRaw(int(os.Stdin.Fd()))
if err != nil {
return
}
*oState = *oldState
defer restoreTerminal(oldState)
b := make([]byte, 1)
for {
_, _ = os.Stdin.Read(b)
if string(b) == QUIT_BROWSER_LOGIN {
quit <- true
break
}
}
}
// // listens to 'q' input on terminal and
// // sends 'true' to 'quit' channel
// func quitBrowserLogin(quit chan bool, oState *term.State) {
// oldState, err := term.MakeRaw(int(os.Stdin.Fd()))
// if err != nil {
// return
// }
// *oState = *oldState
// defer restoreTerminal(oldState)
// b := make([]byte, 1)
// for {
// _, _ = os.Stdin.Read(b)
// if string(b) == QUIT_BROWSER_LOGIN {
// quit <- true
// break
// }
// }
// }
func closeListener(listener *net.Listener) error {
err := (*listener).Close()

View File

@@ -18,6 +18,12 @@ var resetCmd = &cobra.Command{
Example: "infisical reset",
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
// delete keyring item of current logged in user
configFile, _ := util.GetConfigFile()
// delete from keyring
util.DeleteValueInKeyring(configFile.LoggedInUserEmail)
// delete config
_, pathToDir, err := util.GetFullConfigFilePath()
if err != nil {

View File

@@ -64,11 +64,22 @@ var secretsCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports})
if err != nil {
util.HandleError(err)
}
if secretOverriding {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
} else {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
}
if shouldExpandSecrets {
secrets = util.ExpandSecrets(secrets, infisicalToken)
}
@@ -641,6 +652,7 @@ func init() {
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsCmd.AddCommand(secretsGetCmd)
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsSetCmd)
secretsSetCmd.Flags().String("path", "/", "get secrets within a folder path")

93
cli/packages/cmd/vault.go Normal file
View File

@@ -0,0 +1,93 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"fmt"
"strings"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/posthog/posthog-go"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
var AvailableVaultsAndDescriptions = []string{"auto (automatically select native vault on system)", "file (encrypted file vault)"}
var AvailableVaults = []string{"auto", "file"}
var vaultSetCmd = &cobra.Command{
Example: `infisical vault set pass`,
Use: "set [vault-name]",
Short: "Used to set the vault backend to store your login details securely at rest",
DisableFlagsInUseLine: true,
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
wantedVaultTypeName := args[0]
currentVaultBackend, err := util.GetCurrentVaultBackend()
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because of [err=%s]", wantedVaultTypeName, err)
return
}
if wantedVaultTypeName == string(currentVaultBackend) {
log.Error().Msgf("You are already on vault backend [%s]", currentVaultBackend)
return
}
if wantedVaultTypeName == "auto" || wantedVaultTypeName == "file" {
configFile, err := util.GetConfigFile()
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because of [err=%s]", wantedVaultTypeName, err)
return
}
configFile.VaultBackendType = wantedVaultTypeName // save selected vault
configFile.LoggedInUserEmail = "" // reset the logged in user to prompt them to re login
err = util.WriteConfigFile(&configFile)
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because an error occurred when saving the config file [err=%s]", wantedVaultTypeName, err)
return
}
fmt.Printf("\nSuccessfully, switched vault backend from [%s] to [%s]. Please login in again to store your login details in the new vault with [infisical login]\n", currentVaultBackend, wantedVaultTypeName)
Telemetry.CaptureEvent("cli-command:vault set", posthog.NewProperties().Set("currentVault", currentVaultBackend).Set("wantedVault", wantedVaultTypeName).Set("version", util.CLI_VERSION))
} else {
log.Error().Msgf("The requested vault type [%s] is not available on this system. Only the following vault backends are available for you system: %s", wantedVaultTypeName, strings.Join(AvailableVaults, ", "))
}
},
}
// runCmd represents the run command
var vaultCmd = &cobra.Command{
Use: "vault",
Short: "Used to manage where your Infisical login token is saved on your machine",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
printAvailableVaultBackends()
},
}
func printAvailableVaultBackends() {
fmt.Printf("Vaults are used to securely store your login details locally. Available vaults:")
for _, backend := range AvailableVaultsAndDescriptions {
fmt.Printf("\n- %s", backend)
}
currentVaultBackend, err := util.GetCurrentVaultBackend()
if err != nil {
log.Error().Msgf("printAvailableVaultBackends: unable to print the available vault backend because of error [err=%s]", err)
}
Telemetry.CaptureEvent("cli-command:vault", posthog.NewProperties().Set("currentVault", currentVaultBackend).Set("version", util.CLI_VERSION))
fmt.Printf("\n\nYou are currently using [%s] vault to store your login credentials\n", string(currentVaultBackend))
}
func init() {
vaultCmd.AddCommand(vaultSetCmd)
rootCmd.AddCommand(vaultCmd)
}

View File

@@ -12,6 +12,7 @@ type ConfigFile struct {
LoggedInUserEmail string `json:"loggedInUserEmail"`
LoggedInUserDomain string `json:"LoggedInUserDomain,omitempty"`
LoggedInUsers []LoggedInUser `json:"loggedInUsers,omitempty"`
VaultBackendType string `json:"vaultBackendType,omitempty"`
}
type LoggedInUser struct {

View File

@@ -53,6 +53,7 @@ func WriteInitalConfig(userCredentials *models.UserCredentials) error {
LoggedInUserEmail: userCredentials.Email,
LoggedInUserDomain: config.INFISICAL_URL,
LoggedInUsers: existingConfigFile.LoggedInUsers,
VaultBackendType: existingConfigFile.VaultBackendType,
}
configFileMarshalled, err := json.Marshal(configFile)

View File

@@ -6,7 +6,6 @@ import (
"fmt"
"strings"
keyringwrapper "github.com/Infisical/infisical-merge/internal"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/models"
@@ -26,7 +25,7 @@ func StoreUserCredsInKeyRing(userCred *models.UserCredentials) error {
return fmt.Errorf("StoreUserCredsInKeyRing: something went wrong when marshalling user creds [err=%s]", err)
}
err = keyringwrapper.Set(userCred.Email, string(userCredMarshalled))
err = SetValueInKeyring(userCred.Email, string(userCredMarshalled))
if err != nil {
return fmt.Errorf("StoreUserCredsInKeyRing: unable to store user credentials because [err=%s]", err)
}
@@ -35,7 +34,7 @@ func StoreUserCredsInKeyRing(userCred *models.UserCredentials) error {
}
func GetUserCredsFromKeyRing(userEmail string) (credentials models.UserCredentials, err error) {
credentialsValue, err := keyringwrapper.Get(userEmail)
credentialsValue, err := GetValueInKeyring(userEmail)
if err != nil {
if err == keyring.ErrUnsupportedPlatform {
return models.UserCredentials{}, errors.New("your OS does not support keyring. Consider using a service token https://infisical.com/docs/documentation/platform/token")

View File

@@ -0,0 +1,42 @@
package util
import (
"github.com/zalando/go-keyring"
)
const MAIN_KEYRING_SERVICE = "infisical-cli"
type TimeoutError struct {
message string
}
func (e *TimeoutError) Error() string {
return e.message
}
func SetValueInKeyring(key, value string) error {
currentVaultBackend, err := GetCurrentVaultBackend()
if err != nil {
PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical rest] then try again")
}
return keyring.Set(currentVaultBackend, MAIN_KEYRING_SERVICE, key, value)
}
func GetValueInKeyring(key string) (string, error) {
currentVaultBackend, err := GetCurrentVaultBackend()
if err != nil {
PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical rest] then try again")
}
return keyring.Get(currentVaultBackend, MAIN_KEYRING_SERVICE, key)
}
func DeleteValueInKeyring(key string) error {
currentVaultBackend, err := GetCurrentVaultBackend()
if err != nil {
return err
}
return keyring.Delete(currentVaultBackend, MAIN_KEYRING_SERVICE, key)
}

View File

@@ -0,0 +1,22 @@
package util
import (
"fmt"
)
func GetCurrentVaultBackend() (string, error) {
configFile, err := GetConfigFile()
if err != nil {
return "", fmt.Errorf("getCurrentVaultBackend: unable to get config file [err=%s]", err)
}
if configFile.VaultBackendType == "" {
return "auto", nil
}
if configFile.VaultBackendType != "auto" && configFile.VaultBackendType != "file" {
return "auto", nil
}
return configFile.VaultBackendType, nil
}

View File

@@ -21,6 +21,7 @@ services:
depends_on:
- mongo
- smtp-server
- redis
build:
context: ./backend
dockerfile: Dockerfile
@@ -99,9 +100,36 @@ services:
networks:
- infisical-dev
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
volumes:
- redis_data:/data
networks:
- infisical-dev
redis-commander:
container_name: infisical-dev-redis-commander
image: rediscommander/redis-commander
restart: always
depends_on:
- redis
environment:
- REDIS_HOSTS=local:redis:6379
ports:
- "8085:8081"
networks:
- infisical-dev
volumes:
mongo-data:
driver: local
redis_data:
driver: local
networks:
infisical-dev:

View File

@@ -41,19 +41,17 @@ services:
networks:
- infisical
# secret-scanning-git-app:
# container_name: infisical-secret-scanning-git-app
# restart: unless-stopped
# depends_on:
# - backend
# - frontend
# - mongo
# ports:
# - "3000:3001"
# image: infisical/staging_deployment_secret-scanning-git-app
# env_file: .env
# networks:
# - infisical
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
networks:
- infisical
volumes:
- redis_data:/data
mongo:
container_name: infisical-mongo
@@ -71,6 +69,8 @@ services:
volumes:
mongo-data:
driver: local
redis_data:
driver: local
networks:
infisical:

View File

@@ -9,12 +9,11 @@ description: "Change the vault type in Infisical"
infisical vault
# Example output
The following vaults are available on your system:
- keychain
- pass
- file
Vaults are used to securely store your login details locally. Available vaults:
- auto (automatically select native vault on system)
- file (encrypted file vault)
You are currently using [keychain] vault to store your login credentials
You are currently using [file] vault to store your login credentials
```
</Tab>
@@ -31,20 +30,7 @@ description: "Change the vault type in Infisical"
## Description
To ensure secure storage of your login credentials when using the CLI, Infisical stores login credentials securely in a system vault or encrypted text file with a passphrase known only by the user.
<Accordion title="Supported vaults">
By default, the most appropriate vault is chosen to store your login credentials.
For example, if you are on macOS, KeyChain will be automatically selected.
- [macOS Keychain](https://support.apple.com/en-au/guide/keychain-access/welcome/mac)
- [Windows Credential Manager](https://support.microsoft.com/en-au/help/4026814/windows-accessing-credential-manager)
- Secret Service ([Gnome Keyring](https://wiki.gnome.org/Projects/GnomeKeyring), [KWallet](https://kde.org/applications/system/org.kde.kwalletmanager5))
- [KWallet](https://kde.org/applications/system/org.kde.kwalletmanager5)
- [Pass](https://www.passwordstore.org/)
- [KeyCtl]()
- Encrypted file (JWT)
</Accordion>
To safeguard your login details when using the CLI, Infisical places them in a system vault or an encrypted text file, protected by a passphrase that only the user knows.
<Tip>To avoid constantly entering your passphrase when using the `file` vault type, set the `INFISICAL_VAULT_FILE_PASSPHRASE` environment variable with your password in your shell</Tip>

View File

@@ -144,13 +144,12 @@
"self-hosting/deployment-options/aws-ec2",
"self-hosting/deployment-options/docker-compose",
"self-hosting/deployment-options/standalone-infisical",
"self-hosting/deployment-options/fly.io",
"self-hosting/deployment-options/render",
"self-hosting/deployment-options/digital-ocean-marketplace"
]
},
"self-hosting/configuration/envars",
"self-hosting/configuration/email",
"self-hosting/configuration/redis",
"self-hosting/faq"
]
},

View File

@@ -49,10 +49,15 @@ Other environment variables are listed below to increase the functionality of yo
16`
</ParamField>
<ParamField query="MONGO_URL" type="string" default="none" required>
*TLS based connection string is not yet supported
</ParamField>
</Tab>
<ParamField query="MONGO_URL" type="string" default="none" required>
*TLS based connection string is not yet supported
</ParamField>
<ParamField query="REDIS_URL" type="string" default="none" required>
Redis connection string
</ParamField>
</Tab>
<Tab title="Email service">
<Info>When email service is not configured, Infisical will have limited functionality</Info>

View File

@@ -0,0 +1,83 @@
---
title: "Configure Redis"
description: "Learn to configure Redis with your self hosted Infisical"
---
## Why Redis?
As the features and use case of Infisical have grown, the need for a fast and reliable in-memory data storage has become clear.
By adding Redis to Infisical, we can now support more complex workflows such as queuing system to run long running asynchronous tasks, cron jobs, and access reliable cache to speed up frequently used resources.
<Info>
Starting with Infisical version v0.31.0, Redis will be required to fully use Infisical
</Info>
### Adding Redis to your self hosted instance of Infisical
To add Redis to your self hosted instance, follow the instructions for the deployment method you used.
<Tabs>
<Tab title="Kubernetes Helm chart">
### In cluster Redis
By default, new versions of the Infisical Helm chart already comes with an in-cluster Redis instance. To deploy a in-cluster Redis instance along with your Infisical instance, update your Infisical chart then redeploy/upgrade your release.
This will spin up a Redis instance and automatically configure it with your Infisical backend.
1. Update Infisical Helm chart
```bash
helm repo update
```
2. Upgrade Infisical release
```bash
helm upgrade <infisical release name> infisical-helm-charts/infisical --values <path to your values file>
```
### External Redis
If you want to use an external Redis instance, please add a Redis connection URL under the backend environments variables and then upgrade/redeploy your Infisical instance.
1. Update your helm values file
```yaml your-values.yaml
backendEnvironmentVariables:
REDIS_URL=<your redis connection string>
```
2. Upgrade Infisical release
```bash
helm upgrade <infisical release name> infisical-helm-charts/infisical --values <path to your values file>
```
</Tab>
<Tab title="Docker compose">
### Internal Redis service
By default, new versions of the docker compose file already comes with a Redis service. To use the pre-configured Redis service, please update your docker compose file to the latest version.
1. Download the new docker compose file
```
wget -O docker-compose.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.yml
```
2. Add Redis environment variable to your .env file
```.env .env
REDIS_URL=redis://redis:6379
```
3. Restart your docker compose services
</Tab>
<Tab title="Standalone Docker image">
This standalone version of Infisical does not have an internal Redis service. To configure Redis with your Infisical instance, you must connect to a external Redis service by setting the connection string as an environment variable.
Example:
```bash
docker run -p 80:80 \
-e ENCRYPTION_KEY=f40c9178624764ad85a6830b37ce239a \
-e JWT_SIGNUP_SECRET=38ea90fb7998b92176080f457d890392 \
-e JWT_REFRESH_SECRET=7764c7bbf3928ad501591a3e005eb364 \
-e JWT_AUTH_SECRET=5239fea3a4720c0e524f814a540e14a2 \
-e JWT_SERVICE_SECRET=8509fb8b90c9b53e9e61d1e35826dcb5 \
-e REDIS_URL=<> \
-e MONGO_URL="<>" \
infisical/infisical:latest
```
Redis environment variable name: `REDIS_URL`
</Tab>
</Tabs>
## Support
If you have questions or need support, please join our [slack channel](https://infisical-users.slack.com) and one of our teammates will be happy to guide you.

View File

@@ -32,7 +32,7 @@ However, it's important to specify a particular version of Infisical during inst
View [properties for frontend and backend](https://github.com/Infisical/infisical/tree/main/helm-charts/infisical#parameters).
<Tip>
To find the latest version number of Infisical, follow the links bellow
To find the latest version number of Infisical, follow the links below
- [frontend Docker image](https://hub.docker.com/r/infisical/frontend/tags)
- [backend Docker image](https://hub.docker.com/r/infisical/backend/tags)
</Tip>
@@ -92,7 +92,7 @@ ingress:
#### Database
Infisical uses a document database as its persistence layer. With this Helm chart, you spin up a MongoDB instance power by Bitnami along side other Infisical services in your cluster.
When persistence is enabled, the data will be stored a Kubernetes Persistence Volume. View all [properties for mongodb](https://github.com/Infisical/infisical/tree/main/helm-charts/infisical).
When persistence is enabled, the data will be stored as Kubernetes Persistence Volume. View all [properties for mongodb](https://github.com/Infisical/infisical/tree/main/helm-charts/infisical).
```yaml simple-values-example.yaml
mongodb:
@@ -214,4 +214,7 @@ Allow 3-5 minutes for the deployment to complete. Once done, you should now be a
<Info>
Once installation is complete, you will have to create the first account. No default account is provided.
</Info>
</Info>
## Related blogs
- [Set up Infisical in a development cluster](https://iamunnip.hashnode.dev/infisical-open-source-secretops-kubernetes-setup)

View File

@@ -46,6 +46,10 @@ Add the required environment variables listed below to your docker run command.
Must be a random 16 byte hex string. Can be generated with `openssl rand -hex 16`
</ParamField>
<ParamField query="REDIS_URL" type="string" default="none" required>
Redis connection string
</ParamField>
<ParamField query="MONGO_URL" type="string" default="none" required>
*TLS based connection string is not yet supported
</ParamField>
@@ -61,6 +65,7 @@ docker run -p 80:80 \
-e JWT_AUTH_SECRET=5239fea3a4720c0e524f814a540e14a2 \
-e JWT_SERVICE_SECRET=8509fb8b90c9b53e9e61d1e35826dcb5 \
-e MONGO_URL="<>" \
-e REDIS_URL="<>" \
infisical/infisical:latest
```

View File

@@ -22,12 +22,6 @@ Choose from a variety of deployment options listed below to get started.
>
Automatically create and deploy Infisical on to a Kubernetes cluster
</Card>
<Card title="Fly.io" color="#dc2626" href="deployment-options/fly.io">
Use our standalone docker image to deploy on Fly.io
</Card>
<Card title="Render.com" color="#dc2626" href="deployment-options/render">
Install on Render using our standalone docker image
</Card>
<Card title="AWS EC2" color="#0285c7" href="deployment-options/aws-ec2">
Install infisical with just a few clicks using our Cloud Formation template
</Card>

View File

@@ -57,7 +57,8 @@ COPY --chown=nextjs:nodejs --chmod=555 scripts ./scripts
COPY --from=builder /app/public ./public
RUN chown nextjs:nodejs ./public/data
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
COPY --from=builder --chown=nextjs:nodejs --chmod=777 /app/.next/static ./.next/static
RUN chmod -R 777 /app/.next/server
USER nextjs

View File

@@ -29,7 +29,7 @@ export const ModalContent = forwardRef<HTMLDivElement, ModalContentProps>(
<Card
isRounded
className={twMerge(
"fixed top-1/2 left-1/2 z-[90] max-w-lg -translate-y-2/4 -translate-x-2/4 animate-popIn border border-mineshaft-600 drop-shadow-2xl",
"fixed top-1/2 left-1/2 z-[90] dark:[color-scheme:dark] max-h-screen overflow-y-auto thin-scrollbar max-w-lg -translate-y-2/4 -translate-x-2/4 animate-popIn border border-mineshaft-600 drop-shadow-2xl",
className
)}
>

View File

@@ -4,4 +4,5 @@ export type ServerStatus = {
emailConfigured: boolean;
inviteOnlySignup: boolean;
secretScanningConfigured: boolean
redisConfigured: boolean
};

View File

@@ -16,6 +16,7 @@ export {
useGetWorkspaceUsers,
useNameWorkspaceSecrets,
useRenameWorkspace,
useReorderWsEnvironment,
useToggleAutoCapitalization,
useUpdateUserWorkspaceRole,
useUpdateWsEnvironment} from "./queries";

View File

@@ -13,6 +13,7 @@ import {
GetWsEnvironmentDTO,
NameWorkspaceSecretsDTO,
RenameWorkspaceDTO,
ReorderEnvironmentsDTO,
ToggleAutoCapitalizationDTO,
UpdateEnvironmentDTO,
Workspace,
@@ -244,6 +245,21 @@ export const useCreateWsEnvironment = () => {
});
};
export const useReorderWsEnvironment = () => {
const queryClient = useQueryClient();
return useMutation<{}, {}, ReorderEnvironmentsDTO>({
mutationFn: ({ workspaceID, environmentSlug, environmentName, otherEnvironmentSlug, otherEnvironmentName}) => {
return apiRequest.patch(`/api/v2/workspace/${workspaceID}/environments`, {
environmentSlug, environmentName, otherEnvironmentSlug, otherEnvironmentName
});
},
onSuccess: () => {
queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace);
}
});
};
export const useUpdateWsEnvironment = () => {
const queryClient = useQueryClient();

View File

@@ -46,6 +46,15 @@ export type CreateEnvironmentDTO = {
environmentName: string;
};
export type ReorderEnvironmentsDTO = {
workspaceID: string;
environmentSlug: string;
environmentName: string;
otherEnvironmentSlug: string;
otherEnvironmentName: string;
};
export type UpdateEnvironmentDTO = {
workspaceID: string;
oldEnvironmentSlug: string;

View File

@@ -483,7 +483,7 @@ export const AppLayout = ({ children }: LayoutProps) => {
</MenuItem>
</a>
</Link>
{/* <Link href={`/project/${currentWorkspace?._id}/audit-logs`} passHref>
<Link href={`/project/${currentWorkspace?._id}/audit-logs`} passHref>
<a>
<MenuItem
isSelected={
@@ -491,11 +491,11 @@ export const AppLayout = ({ children }: LayoutProps) => {
}
icon="system-outline-168-view-headline"
>
Audit Logs V2
Audit Logs
</MenuItem>
</a>
</Link> */}
<Link href={`/project/${currentWorkspace?._id}/logs`} passHref>
</Link>
{/* <Link href={`/project/${currentWorkspace?._id}/logs`} passHref>
<a>
<MenuItem
isSelected={
@@ -506,7 +506,7 @@ export const AppLayout = ({ children }: LayoutProps) => {
Audit Logs
</MenuItem>
</a>
</Link>
</Link> */}
{/* <Link href={`/project/${currentWorkspace?._id}/secret-scanning`} passHref>
<a>
<MenuItem

View File

@@ -4,6 +4,7 @@ import { useEffect, useState } from "react";
import { Controller, useForm } from "react-hook-form";
import { useTranslation } from "react-i18next";
import Head from "next/head";
import Link from "next/link";
import { useRouter } from "next/router";
import { IconProp } from "@fortawesome/fontawesome-svg-core";
import { faSlack } from "@fortawesome/free-brands-svg-icons";
@@ -11,6 +12,7 @@ import { faFolderOpen } from "@fortawesome/free-regular-svg-icons";
import {
faArrowRight,
faCheckCircle,
faExclamationCircle,
faHandPeace,
faMagnifyingGlass,
faNetworkWired,
@@ -37,6 +39,7 @@ import {
import { TabsObject } from "@app/components/v2/Tabs";
import { useSubscription, useUser, useWorkspace } from "@app/context";
import { fetchOrgUsers, useAddUserToWs, useCreateWorkspace, useRegisterUserAction,useUploadWsKey } from "@app/hooks/api";
import { useFetchServerStatus } from "@app/hooks/api/serverDetails";
import { usePopUp } from "@app/hooks/usePopUp";
import { encryptAssymmetric } from "../../../../components/utilities/cryptography/crypto";
@@ -269,6 +272,8 @@ export default function Organization() {
const createWs = useCreateWorkspace();
const { user } = useUser();
const uploadWsKey = useUploadWsKey();
const { data: serverDetails } = useFetchServerStatus();
const onCreateProject = async ({ name, addMembers }: TAddProjectFormData) => {
// type check
@@ -340,6 +345,18 @@ export default function Organization() {
<title>{t("common.head-title", { title: t("settings.members.title") })}</title>
<link rel="icon" href="/infisical.ico" />
</Head>
{!serverDetails?.redisConfigured && <div className="mb-4 flex flex-col items-start justify-start px-6 py-6 pb-0 text-3xl">
<p className="mr-4 mb-4 font-semibold text-white">Announcements</p>
<div className="w-full border border-blue-400/70 rounded-md bg-blue-900/70 p-2 text-base text-mineshaft-100 flex items-center">
<FontAwesomeIcon icon={faExclamationCircle} className="text-2xl mr-4 p-4 text-mineshaft-50"/>
Attention: Updated versions of Infisical now require Redis for full functionality. Learn how to configure it
<Link href="https://infisical.com/docs/self-hosting/configuration/redis" target="_blank">
<span className="pl-1 text-white underline underline-offset-2 hover:decoration-blue-400 hover:text-blue-200 duration-100 cursor-pointer">
here
</span>
</Link>.
</div>
</div>}
<div className="mb-4 flex flex-col items-start justify-start px-6 py-6 pb-0 text-3xl">
<p className="mr-4 font-semibold text-white">Projects</p>
<div className="mt-6 flex w-full flex-row">

View File

@@ -9,7 +9,7 @@ const Logs = () => {
return (
<div className="h-full bg-bunker-800">
<Head>
<title>{t("common.head-title", { title: t("billing.title") })}</title>
<title>{t("common.head-title", { title: t("settings.project.title") })}</title>
<link rel="icon" href="/infisical.ico" />
<meta property="og:image" content="/images/message.png" />
</Head>

View File

@@ -110,6 +110,11 @@ export const SecretInputRow = memo(
append
} = useFieldArray({ control, name: `secrets.${index}.tags` });
const tagColorByTagId = new Map((wsTags || []).map((wsTag, i) => [wsTag._id, tagColors[i % tagColors.length]]))
// display the tags in alphabetical order
secretTags.sort((a, b) => a.name.localeCompare(b.name))
// to get details on a secret
const overrideAction = useWatch({
control,
@@ -321,19 +326,22 @@ export const SecretInputRow = memo(
</td>
<td className="min-w-sm flex">
<div className="flex h-8 items-center pl-2">
{secretTags.map(({ id, slug }, i) => (
<Tag
className={cx(
tagColors[i % tagColors.length].bg,
tagColors[i % tagColors.length].text
)}
isDisabled={isReadOnly || isAddOnly || isRollbackMode}
onClose={() => remove(i)}
key={id}
>
{slug}
</Tag>
))}
{secretTags.map(({ id, _id, slug }, i) => {
// This map lookup shouldn't ever fail, but if it does we default to the first color
const tagColor = tagColorByTagId.get(_id) || tagColors[0]
return (
<Tag
className={cx(
tagColor.bg,
tagColor.text
)}
isDisabled={isReadOnly || isAddOnly || isRollbackMode}
onClose={() => remove(i)}
key={id}
>
{slug}
</Tag>)
})}
<div className="w-0 overflow-hidden group-hover:w-6">
<Tooltip content="Copy value">
<IconButton

View File

@@ -40,7 +40,8 @@ export const PasswordStep = ({
authMethod
} = jwt_decode(providerAuthToken) as any;
const handleLogin = async () => {
const handleLogin = async (e:React.FormEvent) => {
e.preventDefault()
try {
setIsLoading(true);
@@ -119,10 +120,12 @@ export const PasswordStep = ({
console.error(err);
}
};
return (
<form
onSubmit={(e) => e.preventDefault()}
onSubmit={handleLogin}
className="h-full mx-auto w-full max-w-md px-6 pt-8"
>
<div className="mb-8">
@@ -153,9 +156,9 @@ export const PasswordStep = ({
</div>
<div className='lg:w-1/6 w-1/4 w-full mx-auto flex items-center justify-center min-w-[22rem] text-center rounded-md mt-4'>
<Button
type="submit"
colorSchema="primary"
variant="outline_bg"
onClick={async () => handleLogin()}
isFullWidth
isLoading={isLoading}
className="h-14"

View File

@@ -15,11 +15,19 @@ export const SAMLSSOStep = ({
const queryParams = new URLSearchParams(window.location.search);
const handleSubmission = (e:React.FormEvent) => {
e.preventDefault()
const callbackPort = queryParams.get("callback_port");
window.open(`/api/v1/sso/redirect/saml2/${ssoIdentifier}${callbackPort ? `?callback_port=${callbackPort}` : ""}`);
window.close();
}
return (
<div className="mx-auto w-full max-w-md md:px-6">
<p className="mx-auto mb-6 flex w-max justify-center text-xl font-medium text-transparent bg-clip-text bg-gradient-to-b from-white to-bunker-200 text-center mb-8">
What&apos;s your SSO Identifier?
</p>
<form onSubmit={handleSubmission}>
<div className="relative flex items-center justify-center lg:w-1/6 w-1/4 min-w-[20rem] md:min-w-[22rem] mx-auto w-full rounded-lg max-h-24 md:max-h-28">
<div className="flex items-center justify-center w-full rounded-lg max-h-24 md:max-h-28">
<Input
@@ -36,19 +44,16 @@ export const SAMLSSOStep = ({
</div>
<div className='lg:w-1/6 w-1/4 w-full mx-auto flex items-center justify-center min-w-[20rem] md:min-w-[22rem] text-center rounded-md mt-4'>
<Button
type="submit"
colorSchema="primary"
variant="outline_bg"
onClick={() => {
const callbackPort = queryParams.get("callback_port");
window.open(`/api/v1/sso/redirect/saml2/${ssoIdentifier}${callbackPort ? `?callback_port=${callbackPort}` : ""}`);
window.close();
}}
isFullWidth
className="h-14"
>
{t("login.login")}
</Button>
</div>
</form>
<div className="flex flex-row items-center justify-center mt-4">
<button
onClick={() => {

View File

@@ -74,8 +74,7 @@ export const LogsFilter = ({
className="w-40 mr-4"
>
<Select
placeholder="Select"
// {...(field.value ? { value: field.value } : { placeholder: "Select" })}
{...(field.value ? { value: field.value } : { placeholder: "Select" })}
{...field}
onValueChange={(e) => onChange(e)}
className="w-full bg-mineshaft-700 border border-mineshaft-500 text-mineshaft-100"

View File

@@ -1,13 +1,25 @@
import { useEffect } from "react";
import { useForm } from "react-hook-form";
import { useRouter } from "next/router";
import { yupResolver } from "@hookform/resolvers/yup";
import { UpgradePlanModal } from "@app/components/v2";
import { useSubscription } from "@app/context";
import { EventType, UserAgentType } from "@app/hooks/api/auditLogs/enums";
import { usePopUp } from "@app/hooks/usePopUp";
import { LogsFilter } from "./LogsFilter";
import { LogsTable } from "./LogsTable";
import { AuditLogFilterFormData, auditLogFilterFormSchema } from "./types";
export const LogsSection = () => {
const { subscription } = useSubscription();
const router = useRouter();
const { popUp, handlePopUpOpen, handlePopUpToggle } = usePopUp([
"upgradePlan"
] as const);
const {
control,
reset,
@@ -20,6 +32,12 @@ export const LogsSection = () => {
perPage: 10
}
});
useEffect(() => {
if (subscription && !subscription.auditLogs) {
handlePopUpOpen("upgradePlan");
}
}, [subscription]);
const eventType = watch("eventType") as EventType | undefined;
const userAgentType = watch("userAgentType") as UserAgentType | undefined;
@@ -54,6 +72,19 @@ export const LogsSection = () => {
perPage={perPage}
setValue={setValue}
/>
<UpgradePlanModal
isOpen={popUp.upgradePlan.isOpen}
onOpenChange={(isOpen) => {
if (!isOpen) {
router.back();
return;
}
handlePopUpToggle("upgradePlan", isOpen)
}}
text="You can use audit logs if you switch to a paid Infisical plan."
/>
</div>
);
}

View File

@@ -52,7 +52,7 @@ export const SecretOverviewTableRow = ({
<div className="text-blue-300/70">
<FontAwesomeIcon icon={isFormExpanded ? faAngleDown : faKey} />
</div>
<div>{secretKey}</div>
<div title={secretKey}>{secretKey}</div>
</div>
</div>
</Td>
@@ -73,7 +73,9 @@ export const SecretOverviewTableRow = ({
>
<div className="h-full w-full border-r border-mineshaft-600 py-[0.85rem] px-5">
<div className="flex justify-center">
{!isSecretEmpty && <FontAwesomeIcon icon={isSecretPresent ? faCheck : faXmark} />}
{!isSecretEmpty && <Tooltip content={isSecretPresent ? "Present secret" : "Missing secret"}>
<FontAwesomeIcon icon={isSecretPresent ? faCheck : faXmark} />
</Tooltip>}
{isSecretEmpty && (
<Tooltip content="Empty value">
<FontAwesomeIcon icon={faCircle} />

View File

@@ -1,6 +1,7 @@
import { faPencil, faXmark } from "@fortawesome/free-solid-svg-icons";
import { faArrowDown,faArrowUp, faPencil, faXmark } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { useNotificationContext } from "@app/components/context/Notifications/NotificationProvider";
import {
EmptyState,
IconButton,
@@ -14,6 +15,9 @@ import {
Tr
} from "@app/components/v2";
import { useWorkspace } from "@app/context";
import {
useReorderWsEnvironment
} from "@app/hooks/api";
import { UsePopUpState } from "@app/hooks/usePopUp";
type Props = {
@@ -31,6 +35,43 @@ type Props = {
export const EnvironmentTable = ({ handlePopUpOpen }: Props) => {
const { currentWorkspace, isLoading } = useWorkspace();
const { createNotification } = useNotificationContext();
const reorderWsEnvironment = useReorderWsEnvironment();
const handleReorderEnv= async (shouldMoveUp: boolean, name: string, slug: string) => {
try {
if (!currentWorkspace?._id) return;
const indexOfEnv = currentWorkspace.environments.findIndex((env) => env.name === name && env.slug === slug);
// check that this reordering is possible
if (indexOfEnv === 0 && shouldMoveUp || indexOfEnv === currentWorkspace.environments.length - 1 && !shouldMoveUp) {
return
}
const indexToSwap = shouldMoveUp ? indexOfEnv - 1 : indexOfEnv + 1
await reorderWsEnvironment.mutateAsync({
workspaceID: currentWorkspace._id,
environmentSlug: slug,
environmentName: name,
otherEnvironmentSlug: currentWorkspace.environments[indexToSwap].slug,
otherEnvironmentName: currentWorkspace.environments[indexToSwap].name
});
createNotification({
text: "Successfully re-ordered environments",
type: "success"
});
} catch (err) {
console.error(err);
createNotification({
text: "Failed to re-order environments",
type: "error"
});
}
};
return (
<TableContainer>
<Table>
@@ -45,11 +86,35 @@ export const EnvironmentTable = ({ handlePopUpOpen }: Props) => {
{isLoading && <TableSkeleton columns={3} innerKey="project-envs" />}
{!isLoading &&
currentWorkspace &&
currentWorkspace.environments.map(({ name, slug }) => (
currentWorkspace.environments.map(({ name, slug }, pos) => (
<Tr key={name}>
<Td>{name}</Td>
<Td>{slug}</Td>
<Td className="flex items-center justify-end">
<IconButton
className="mr-3 py-2"
onClick={() => {
handleReorderEnv(false, name, slug)
}}
colorSchema="primary"
variant="plain"
ariaLabel="update"
isDisabled={pos === currentWorkspace.environments.length - 1}
>
<FontAwesomeIcon icon={faArrowDown} />
</IconButton>
<IconButton
className="mr-3 py-2"
onClick={() => {
handleReorderEnv(true, name, slug)
}}
colorSchema="primary"
variant="plain"
ariaLabel="update"
isDisabled={pos === 0}
>
<FontAwesomeIcon icon={faArrowUp} />
</IconButton>
<IconButton
className="mr-3 py-2"
onClick={() => {

View File

@@ -11,16 +11,13 @@ import {
useNameWorkspaceSecrets
} from "@app/hooks/api";
// TODO: add check so that this only shows up if user is
// an admin in the workspace
export const ProjectIndexSecretsSection = () => {
const { currentWorkspace } = useWorkspace();
const { data: isBlindIndexed, isLoading: isBlindIndexedLoading } = useGetWorkspaceIndexStatus(currentWorkspace?._id ?? "");
const { data: latestFileKey } = useGetUserWsKey(currentWorkspace?._id ?? "");
const { data: encryptedSecrets } = useGetWorkspaceSecrets(currentWorkspace?._id ?? "");
const nameWorkspaceSecrets = useNameWorkspaceSecrets();
const onEnableBlindIndices = async () => {
if (!currentWorkspace?._id) return;
if (!encryptedSecrets) return;
@@ -53,7 +50,7 @@ export const ProjectIndexSecretsSection = () => {
});
};
return (!isBlindIndexedLoading && !isBlindIndexed) ? (
return (!isBlindIndexedLoading && (isBlindIndexed === false)) ? (
<div className="mb-6 p-4 bg-mineshaft-900 rounded-lg border border-mineshaft-600">
<p className="mb-3 text-xl font-semibold">Blind Indices</p>
<p className="text-gray-400 mb-8">

View File

@@ -5,8 +5,11 @@ dependencies:
- name: mailhog
repository: https://codecentric.github.io/helm-charts
version: 5.2.3
- name: redis
repository: https://charts.bitnami.com/bitnami
version: 17.15.0
- name: ingress-nginx
repository: https://kubernetes.github.io/ingress-nginx
version: 4.0.13
digest: sha256:d1a679e6c30e37da96b7a4b6115e285f61e6ce0dd921ffbe2cf557418c229f33
generated: "2023-04-08T15:59:12.950942-07:00"
digest: sha256:1762132c45000bb6d410c6da2291ac5c65f91331550a473b370374ba042d0744
generated: "2023-08-10T15:03:12.219788-04:00"

View File

@@ -7,7 +7,7 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.2.1
version: 0.3.1
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
@@ -24,6 +24,10 @@ dependencies:
version: "~5.2.3"
repository: "https://codecentric.github.io/helm-charts"
condition: mailhog.enabled
- name: redis
version: 17.15.0
repository: https://charts.bitnami.com/bitnami
condition: redis.enabled
- name: ingress-nginx
version: 4.0.13
repository: https://kubernetes.github.io/ingress-nginx

View File

@@ -7,6 +7,7 @@ This is the Infisical application Helm chart. This chart includes the following
| `frontend` | Infisical's Web UI |
| `backend` | Infisical's API |
| `mongodb` | Infisical's database |
| `redis` | Infisical's cache service |
| `mailhog` | Infisical's development SMTP server |
## Installation
@@ -58,7 +59,6 @@ kubectl get secrets -n <namespace> <secret-name> \
| `nameOverride` | Override release name | `""` |
| `fullnameOverride` | Override release fullname | `""` |
### Infisical frontend parameters
| Name | Description | Value |
@@ -78,41 +78,41 @@ kubectl get secrets -n <namespace> <secret-name> \
| `frontend.service.nodePort` | Backend service nodePort (used if above type is `NodePort`) | `""` |
| `frontendEnvironmentVariables.SITE_URL` | Absolute URL including the protocol (e.g. https://app.infisical.com) | `infisical.local` |
### Infisical backend parameters
| Name | Description | Value |
| ------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------- |
| `backend.enabled` | Enable backend | `true` |
| `backend.name` | Backend name | `backend` |
| `backend.fullnameOverride` | Backend fullnameOverride | `""` |
| `backend.podAnnotations` | Backend pod annotations | `{}` |
| `backend.deploymentAnnotations` | Backend deployment annotations | `{}` |
| `backend.replicaCount` | Backend replica count | `2` |
| `backend.image.repository` | Backend image repository | `infisical/backend` |
| `backend.image.tag` | Backend image tag | `latest` |
| `backend.image.pullPolicy` | Backend image pullPolicy | `IfNotPresent` |
| `backend.kubeSecretRef` | Backend secret resource reference name (containing required [backend configuration variables](https://infisical.com/docs/self-hosting/configuration/envars)) | `""` |
| `backend.service.annotations` | Backend service annotations | `{}` |
| `backend.service.type` | Backend service type | `ClusterIP` |
| `backend.service.nodePort` | Backend service nodePort (used if above type is `NodePort`) | `""` |
| `backendEnvironmentVariables.ENCRYPTION_KEY` | **Required** Backend encryption key (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_SIGNUP_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_REFRESH_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_AUTH_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_SERVICE_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_MFA_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.SMTP_HOST` | **Required** Hostname to connect to for establishing SMTP connections | `""` |
| `backendEnvironmentVariables.SMTP_PORT` | Port to connect to for establishing SMTP connections | `587` |
| `backendEnvironmentVariables.SMTP_SECURE` | If true, use TLS when connecting to host. If false, TLS will be used if STARTTLS is supported | `false` |
| `backendEnvironmentVariables.SMTP_FROM_NAME` | Name label to be used in From field (e.g. Infisical) | `Infisical` |
| `backendEnvironmentVariables.SMTP_FROM_ADDRESS` | **Required** Email address to be used for sending emails (e.g. dev@infisical.com) | `""` |
| `backendEnvironmentVariables.SMTP_USERNAME` | **Required** Credential to connect to host (e.g. team@infisical.com) | `""` |
| `backendEnvironmentVariables.SMTP_PASSWORD` | **Required** Credential to connect to host | `""` |
| `backendEnvironmentVariables.SITE_URL` | Absolute URL including the protocol (e.g. https://app.infisical.com) | `infisical.local` |
| `backendEnvironmentVariables.INVITE_ONLY_SIGNUP` | To disable account creation from the login page (invites only) | `false` |
| `backendEnvironmentVariables.MONGO_URL` | MongoDB connection string (external or internal)</br>Leave it empty for auto-generated connection string | `""` |
| Name | Description | Value |
| ------------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------- |
| `backend.enabled` | Enable backend | `true` |
| `backend.name` | Backend name | `backend` |
| `backend.fullnameOverride` | Backend fullnameOverride | `""` |
| `backend.podAnnotations` | Backend pod annotations | `{}` |
| `backend.deploymentAnnotations` | Backend deployment annotations | `{}` |
| `backend.replicaCount` | Backend replica count | `2` |
| `backend.image.repository` | Backend image repository | `infisical/backend` |
| `backend.image.tag` | Backend image tag | `latest` |
| `backend.image.pullPolicy` | Backend image pullPolicy | `IfNotPresent` |
| `backend.kubeSecretRef` | Backend secret resource reference name (containing required [backend configuration variables](https://infisical.com/docs/self-hosting/configuration/envars)) | `""` |
| `backend.service.annotations` | Backend service annotations | `{}` |
| `backend.service.type` | Backend service type | `ClusterIP` |
| `backend.service.nodePort` | Backend service nodePort (used if above type is `NodePort`) | `""` |
| `backendEnvironmentVariables.ENCRYPTION_KEY` | **Required** Backend encryption key (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_SIGNUP_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_REFRESH_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_AUTH_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_SERVICE_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_MFA_SECRET` | **Required** Secrets to sign JWT tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.JWT_PROVIDER_AUTH_SECRET` | **Required** Secrets to sign JWT OAuth tokens (128-bit hex value, 32-characters hex, [example](https://stackoverflow.com/a/34329057))</br><kbd>auto-generated</kbd> variable (if not provided, and not found in an existing secret) | `""` |
| `backendEnvironmentVariables.SMTP_HOST` | **Required** Hostname to connect to for establishing SMTP connections | `""` |
| `backendEnvironmentVariables.SMTP_PORT` | Port to connect to for establishing SMTP connections | `587` |
| `backendEnvironmentVariables.SMTP_SECURE` | If true, use TLS when connecting to host. If false, TLS will be used if STARTTLS is supported | `false` |
| `backendEnvironmentVariables.SMTP_FROM_NAME` | Name label to be used in From field (e.g. Infisical) | `Infisical` |
| `backendEnvironmentVariables.SMTP_FROM_ADDRESS` | **Required** Email address to be used for sending emails (e.g. dev@infisical.com) | `""` |
| `backendEnvironmentVariables.SMTP_USERNAME` | **Required** Credential to connect to host (e.g. team@infisical.com) | `""` |
| `backendEnvironmentVariables.SMTP_PASSWORD` | **Required** Credential to connect to host | `""` |
| `backendEnvironmentVariables.SITE_URL` | Absolute URL including the protocol (e.g. https://app.infisical.com) | `infisical.local` |
| `backendEnvironmentVariables.INVITE_ONLY_SIGNUP` | To disable account creation from the login page (invites only) | `false` |
| `backendEnvironmentVariables.MONGO_URL` | MongoDB connection string (external or internal)</br>Leave it empty for auto-generated connection string | `""` |
| `backendEnvironmentVariables.REDIS_URL` | | `redis://redis-master:6379` |
### MongoDB(&reg;) parameters
@@ -154,18 +154,17 @@ kubectl get secrets -n <namespace> <secret-name> \
| `mongodb.persistence.size` | Persistent storage request size | `8Gi` |
| `mongodbConnection.externalMongoDBConnectionString` | Deprecated :warning: External MongoDB connection string</br>Use backendEnvironmentVariables.MONGO_URL instead | `""` |
### Ingress parameters
| Name | Description | Value |
| -------------------------- | ------------------------------------------------------------------------ | ------- |
| `ingress.enabled` | Enable ingress | `true` |
| `ingress.ingressClassName` | Ingress class name | `nginx` |
| `ingress.nginx.enabled` | Ingress controller | `false` |
| `ingress.annotations` | Ingress annotations | `{}` |
| `ingress.hostName` | Ingress hostname (your custom domain name, e.g. `infisical.example.org`) | `""` |
| `ingress.tls` | Ingress TLS hosts (matching above hostName) | `[]` |
### Mailhog parameters
| Name | Description | Value |
@@ -184,6 +183,10 @@ kubectl get secrets -n <namespace> <secret-name> \
| `mailhog.ingress.labels` | Ingress labels | `{}` |
| `mailhog.ingress.hosts[0].host` | Mailhog host | `mailhog.infisical.local` |
### Redis parameters
## Persistence

View File

@@ -126,5 +126,4 @@ Create the mongodb connection string.
{{- if .Values.mongodbConnection.externalMongoDBConnectionString -}}
{{- $connectionString = .Values.mongodbConnection.externalMongoDBConnectionString -}}
{{- end -}}
{{- printf "%s" $connectionString -}}
{{- end -}}
{{- end -}}

View File

@@ -169,6 +169,10 @@ backendEnvironmentVariables:
##
MONGO_URL: ""
## @param backendEnvironmentVariables.REDIS_URL
## By default, the backend will use the Redis that is auto deployed along with Infisical
REDIS_URL: "redis://redis-master:6379"
## @section MongoDB(&reg;) parameters
## Documentation : https://github.com/bitnami/charts/blob/main/bitnami/mongodb/values.yaml
##
@@ -419,3 +423,16 @@ mailhog:
paths:
- path: "/"
pathType: Prefix
## @section Redis parameters
## Documentation : https://github.com/bitnami/charts/tree/main/bitnami/redis#parameters
##
## @skip redis
##
redis:
name: "redis"
fullnameOverride: "redis"
enabled: true
architecture: standalone
auth:
enabled: false