Compare commits

..

8 Commits

143 changed files with 2389 additions and 1614 deletions

View File

@ -1,8 +1,6 @@
name: Build Binaries and Deploy
on:
push:
workflow_dispatch:
inputs:
version:
@ -53,20 +51,20 @@ jobs:
if [ "${{ matrix.os }}" != "linux" ]; then
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
else
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
fi
# Set up .deb package structure (Debian/Ubuntu only)
- name: Set up .deb package structure
if: matrix.os == 'linux ------------------------------------------------------------------- TEST'
if: matrix.os == 'linux'
run: |
mkdir -p infisical-core/DEBIAN
mkdir -p infisical-core/usr/local/bin
cp ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} infisical-core/usr/local/bin/infisical-core
cp ./binary/infisical-core infisical-core/usr/local/bin/
chmod +x infisical-core/usr/local/bin/infisical-core
- name: Create control file
if: matrix.os == 'linux ----------------------------------------- TEST'
if: matrix.os == 'linux'
run: |
cat <<EOF > infisical-core/DEBIAN/control
Package: infisical-core
@ -80,7 +78,7 @@ jobs:
# Build .deb file (Debian/Ubunutu only)
- name: Build .deb package
if: matrix.os == 'linux ------------------------------------------- TEST'
if: matrix.os == 'linux'
run: |
dpkg-deb --build infisical-core
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
@ -95,18 +93,12 @@ jobs:
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
- name: Publish to Cloudsmith (Debian/Ubuntu)
if: matrix.os == 'linux ------------------------------------------- TEST'
if: matrix.os == 'linux'
working-directory: ./backend
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
# Raw package for x64 and arm64 ubuntu (needed for Infisical Omnibus to avoid having to build from source)
- name: Publish to Cloudsmith (Debian/Ubuntu [] RAW)
if: matrix.os == 'linux' && matrix.arch == 'x64'
working-directory: ./backend
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} --republish --no-wait-for-sync --version 1.0.1 --api-key ${{ secrets.CLOUDSMITH_API_KEY }}
# Publish .exe file to Cloudsmith (Windows only)
- name: Publish to Cloudsmith (Windows [] RAW)
- name: Publish to Cloudsmith (Windows)
if: matrix.os == 'win'
working-directory: ./backend
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version 1.0.1 --api-key ${{ secrets.CLOUDSMITH_API_KEY }}
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -50,6 +50,6 @@ jobs:
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

View File

@ -15,16 +15,3 @@ up-prod:
down:
docker compose -f docker-compose.dev.yml down
reviewable-ui:
cd frontend && \
npm run lint:fix && \
npm run type:check
reviewable-api:
cd backend && \
npm run lint:fix && \
npm run type:check
reviewable: reviewable-ui reviewable-api

View File

@ -25,7 +25,6 @@
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",
@ -7813,45 +7812,19 @@
}
},
"node_modules/@octokit/plugin-retry": {
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-5.0.5.tgz",
"integrity": "sha512-sB1RWMhSrre02Atv95K6bhESlJ/sPdZkK/wE/w1IdSCe0yM6FxSjksLa6T7aAvxvxlLKzQEC4KIiqpqyov1Tbg==",
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz",
"integrity": "sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==",
"dependencies": {
"@octokit/request-error": "^4.0.1",
"@octokit/types": "^10.0.0",
"@octokit/request-error": "^5.0.0",
"@octokit/types": "^12.0.0",
"bottleneck": "^2.15.3"
},
"engines": {
"node": ">= 18"
},
"peerDependencies": {
"@octokit/core": ">=3"
}
},
"node_modules/@octokit/plugin-retry/node_modules/@octokit/openapi-types": {
"version": "18.1.1",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz",
"integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw=="
},
"node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-4.0.2.tgz",
"integrity": "sha512-uqwUEmZw3x4I9DGYq9fODVAAvcLsPQv97NRycP6syEFu5916M189VnNBW2zANNwqg3OiligNcAey7P0SET843w==",
"dependencies": {
"@octokit/types": "^10.0.0",
"deprecation": "^2.0.0",
"once": "^1.4.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/plugin-retry/node_modules/@octokit/types": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-10.0.0.tgz",
"integrity": "sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
"@octokit/core": ">=5"
}
},
"node_modules/@octokit/plugin-throttling": {
@ -17423,22 +17396,6 @@
"node": ">=18"
}
},
"node_modules/probot/node_modules/@octokit/plugin-retry": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz",
"integrity": "sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==",
"dependencies": {
"@octokit/request-error": "^5.0.0",
"@octokit/types": "^12.0.0",
"bottleneck": "^2.15.3"
},
"engines": {
"node": ">= 18"
},
"peerDependencies": {
"@octokit/core": ">=5"
}
},
"node_modules/probot/node_modules/commander": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz",

View File

@ -121,7 +121,6 @@
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",

View File

@ -7,33 +7,14 @@ const prompt = promptSync({
sigint: true
});
type ComponentType = 1 | 2 | 3;
console.log(`
Component List
--------------
0. Exit
1. Service component
2. DAL component
3. Router component
`);
function getComponentType(): ComponentType {
while (true) {
const input = prompt("Select a component (0-3): ");
const componentType = parseInt(input, 10);
if (componentType === 0) {
console.log("Exiting the program. Goodbye!");
process.exit(0);
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
return componentType;
} else {
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
}
}
}
const componentType = getComponentType();
const componentType = parseInt(prompt("Select a component: "), 10);
if (componentType === 1) {
const componentName = prompt("Enter service name: ");

View File

@ -18,7 +18,6 @@ import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-ser
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -90,7 +89,6 @@ declare module "fastify" {
id: string;
orgId: string;
};
rateLimits: RateLimitConfiguration;
// passport data
passportUser: {
isUserCompleted: string;

View File

@ -0,0 +1,117 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
const hasActiveCaCertIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId");
if (!hasActiveCaCertIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.uuid("activeCaCertId").nullable();
t.foreign("activeCaCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthority}" ca
SET "activeCaCertId" = cac.id
FROM "${TableName.CertificateAuthorityCert}" cac
WHERE ca.id = cac."caId"
`);
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
const hasVersionColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version");
if (!hasVersionColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.integer("version").nullable();
t.dropUnique(["caId"]);
});
await knex(TableName.CertificateAuthorityCert).update({ version: 1 }).whereNull("version");
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.integer("version").notNullable().alter();
});
}
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId");
if (!hasCaSecretIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("caSecretId").nullable();
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthorityCert}" cert
SET "caSecretId" = (
SELECT sec.id
FROM "${TableName.CertificateAuthoritySecret}" sec
WHERE sec."caId" = cert."caId"
)
`);
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("caSecretId").notNullable().alter();
});
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthoritySecret)) {
await knex.schema.alterTable(TableName.CertificateAuthoritySecret, (t) => {
t.dropUnique(["caId"]);
});
}
if (await knex.schema.hasTable(TableName.Certificate)) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").nullable();
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
UPDATE "${TableName.Certificate}" cert
SET "caCertId" = (
SELECT caCert.id
FROM "${TableName.CertificateAuthorityCert}" caCert
WHERE caCert."caId" = cert."caId"
)
`);
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
if (await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId")) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.dropColumn("activeCaCertId");
});
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version")) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.dropColumn("version");
});
}
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId")) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.dropColumn("caSecretId");
});
}
}
if (await knex.schema.hasTable(TableName.Certificate)) {
if (await knex.schema.hasColumn(TableName.Certificate, "caCertId")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("caCertId");
});
}
}
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (hasCreationLimitCol) {
t.dropColumn("creationLimit");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (!hasCreationLimitCol) {
t.integer("creationLimit").defaultTo(30).notNullable();
}
});
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.dropColumn("name");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (!hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.string("name");
});
}
}

View File

@ -27,7 +27,8 @@ export const CertificateAuthoritiesSchema = z.object({
maxPathLength: z.number().nullable().optional(),
keyAlgorithm: z.string(),
notBefore: z.date().nullable().optional(),
notAfter: z.date().nullable().optional()
notAfter: z.date().nullable().optional(),
activeCaCertId: z.string().uuid().nullable().optional()
});
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;

View File

@ -15,7 +15,9 @@ export const CertificateAuthorityCertsSchema = z.object({
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCertificate: zodBuffer,
encryptedCertificateChain: zodBuffer
encryptedCertificateChain: zodBuffer,
version: z.number(),
caSecretId: z.string().uuid()
});
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;

View File

@ -20,7 +20,8 @@ export const CertificatesSchema = z.object({
notAfter: z.date(),
revokedAt: z.date().nullable().optional(),
revocationReason: z.number().nullable().optional(),
altNames: z.string().default("").nullable().optional()
altNames: z.string().default("").nullable().optional(),
caCertId: z.string().uuid()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

View File

@ -15,6 +15,7 @@ export const RateLimitSchema = z.object({
authRateLimit: z.number().default(60),
inviteUserRateLimit: z.number().default(30),
mfaRateLimit: z.number().default(20),
creationLimit: z.number().default(30),
publicEndpointLimit: z.number().default(30),
createdAt: z.date(),
updatedAt: z.date()

View File

@ -9,6 +9,7 @@ import { TImmutableDBKeys } from "./models";
export const SecretTagsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
slug: z.string(),
color: z.string().nullable().optional(),
createdAt: z.date(),

View File

@ -131,7 +131,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
.default("/")
.transform(removeTrailingSlash)
.describe(DYNAMIC_SECRET_LEASES.RENEW.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.environmentSlug)
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.ttl)
}),
response: {
200: z.object({

View File

@ -58,6 +58,7 @@ export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
authRateLimit: z.number(),
inviteUserRateLimit: z.number(),
mfaRateLimit: z.number(),
creationLimit: z.number(),
publicEndpointLimit: z.number()
}),
response: {

View File

@ -75,16 +75,15 @@ export const auditLogDALFactory = (db: TDbClient) => {
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 100); // time to breathe for db
});
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete audit log on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
};
return { ...auditLogOrm, pruneAuditLog, find };

View File

@ -130,7 +130,9 @@ export enum EventType {
GET_CA = "get-certificate-authority",
UPDATE_CA = "update-certificate-authority",
DELETE_CA = "delete-certificate-authority",
RENEW_CA = "renew-certificate-authority",
GET_CA_CSR = "get-certificate-authority-csr",
GET_CA_CERTS = "get-certificate-authority-certs",
GET_CA_CERT = "get-certificate-authority-cert",
SIGN_INTERMEDIATE = "sign-intermediate",
IMPORT_CA_CERT = "import-certificate-authority-cert",
@ -338,7 +340,6 @@ interface DeleteIntegrationEvent {
targetServiceId?: string;
path?: string;
region?: string;
shouldDeleteIntegrationSecrets?: boolean;
};
}
@ -1096,6 +1097,14 @@ interface DeleteCa {
};
}
interface RenewCa {
type: EventType.RENEW_CA;
metadata: {
caId: string;
dn: string;
};
}
interface GetCaCsr {
type: EventType.GET_CA_CSR;
metadata: {
@ -1104,6 +1113,14 @@ interface GetCaCsr {
};
}
interface GetCaCerts {
type: EventType.GET_CA_CERTS;
metadata: {
caId: string;
dn: string;
};
}
interface GetCaCert {
type: EventType.GET_CA_CERT;
metadata: {
@ -1349,7 +1366,9 @@ export type Event =
| GetCa
| UpdateCa
| DeleteCa
| RenewCa
| GetCaCsr
| GetCaCerts
| GetCaCert
| SignIntermediate
| ImportCaCert

View File

@ -40,12 +40,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
secretRotation: true,
caCrl: false,
instanceUserManagement: false,
externalKms: false,
rateLimits: {
readLimit: 60,
writeLimit: 200,
secretsLimit: 40
}
externalKms: false
});
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {

View File

@ -58,11 +58,6 @@ export type TFeatureSet = {
caCrl: false;
instanceUserManagement: false;
externalKms: false;
rateLimits: {
readLimit: number;
writeLimit: number;
secretsLimit: number;
};
};
export type TOrgPlansTableDTO = {

View File

@ -4,16 +4,17 @@ import { logger } from "@app/lib/logger";
import { TLicenseServiceFactory } from "../license/license-service";
import { TRateLimitDALFactory } from "./rate-limit-dal";
import { RateLimitConfiguration, TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
let rateLimitMaxConfiguration: RateLimitConfiguration = {
let rateLimitMaxConfiguration = {
readLimit: 60,
publicEndpointLimit: 30,
writeLimit: 200,
secretsLimit: 60,
authRateLimit: 60,
inviteUserRateLimit: 30,
mfaRateLimit: 20
mfaRateLimit: 20,
creationLimit: 30
};
Object.freeze(rateLimitMaxConfiguration);
@ -66,7 +67,8 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
secretsLimit: rateLimit.secretsRateLimit,
authRateLimit: rateLimit.authRateLimit,
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
mfaRateLimit: rateLimit.mfaRateLimit
mfaRateLimit: rateLimit.mfaRateLimit,
creationLimit: rateLimit.creationLimit
};
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);

View File

@ -5,6 +5,7 @@ export type TRateLimitUpdateDTO = {
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
creationLimit: number;
publicEndpointLimit: number;
};
@ -13,13 +14,3 @@ export type TRateLimit = {
createdAt: Date;
updatedAt: Date;
} & TRateLimitUpdateDTO;
export type RateLimitConfiguration = {
readLimit: number;
publicEndpointLimit: number;
writeLimit: number;
secretsLimit: number;
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
};

View File

@ -81,13 +81,15 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
.select({
secVerTagId: "secVerTag.id",
secVerTagColor: "secVerTag.color",
secVerTagSlug: "secVerTag.slug"
secVerTagSlug: "secVerTag.slug",
secVerTagName: "secVerTag.name"
})
.select(
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTag).as("tagJnId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
)
.select(
db.ref("secretBlindIndex").withSchema(TableName.Secret).as("orgSecBlindIndex"),
@ -122,9 +124,9 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "tagJnId",
label: "tags" as const,
mapper: ({ tagId: id, tagSlug: slug, tagColor: color }) => ({
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color }) => ({
id,
name: slug,
name,
slug,
color
})
@ -198,11 +200,11 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "secVerTagId",
label: "tags" as const,
mapper: ({ secVerTagId: id, secVerTagSlug: slug, secVerTagColor: color }) => ({
mapper: ({ secVerTagId: id, secVerTagName: name, secVerTagSlug: slug, secVerTagColor: color }) => ({
// eslint-disable-next-line
id,
// eslint-disable-next-line
name: slug,
name,
// eslint-disable-next-line
slug,
// eslint-disable-next-line
@ -260,13 +262,15 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
.select({
secVerTagId: "secVerTag.id",
secVerTagColor: "secVerTag.color",
secVerTagSlug: "secVerTag.slug"
secVerTagSlug: "secVerTag.slug",
secVerTagName: "secVerTag.name"
})
.select(
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTagV2).as("tagJnId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
)
.select(
db.ref("version").withSchema(TableName.SecretV2).as("orgSecVersion"),
@ -288,9 +292,9 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "tagJnId",
label: "tags" as const,
mapper: ({ tagId: id, tagSlug: slug, tagColor: color }) => ({
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color }) => ({
id,
name: slug,
name,
slug,
color
})
@ -326,11 +330,11 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "secVerTagId",
label: "tags" as const,
mapper: ({ secVerTagId: id, secVerTagSlug: slug, secVerTagColor: color }) => ({
mapper: ({ secVerTagId: id, secVerTagName: name, secVerTagSlug: slug, secVerTagColor: color }) => ({
// eslint-disable-next-line
id,
// eslint-disable-next-line
name: slug,
name,
// eslint-disable-next-line
slug,
// eslint-disable-next-line

View File

@ -224,10 +224,12 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.key,
id: el.id,
version: el.version,
secretValue: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
secretValue: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: undefined,
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: "",
: undefined,
secret: el.secret
? {
secretKey: el.secret.key,
@ -235,10 +237,10 @@ export const secretApprovalRequestServiceFactory = ({
version: el.secret.version,
secretValue: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
: "",
: undefined,
secretComment: el.secret.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedComment }).toString()
: ""
: undefined
}
: undefined,
secretVersion: el.secretVersion
@ -248,10 +250,10 @@ export const secretApprovalRequestServiceFactory = ({
version: el.secretVersion.version,
secretValue: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
: "",
: undefined,
secretComment: el.secretVersion.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString()
: ""
: undefined
}
: undefined
}));

View File

@ -257,7 +257,7 @@ export const secretReplicationServiceFactory = ({
secretDAL: secretV2BridgeDAL,
folderDAL,
secretImportDAL,
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : undefined)
});
// secrets that gets replicated across imports
const sourceDecryptedLocalSecrets = sourceLocalSecrets.map((el) => ({
@ -449,7 +449,7 @@ export const secretReplicationServiceFactory = ({
});
}
if (locallyDeletedSecrets.length) {
await secretV2BridgeDAL.delete(
await secretDAL.delete(
{
$in: {
id: locallyDeletedSecrets.map(({ id }) => id)

View File

@ -164,10 +164,10 @@ export const secretSnapshotServiceFactory = ({
secretKey: el.key,
secretValue: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "",
: undefined,
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: ""
: undefined
}))
};
} else {

View File

@ -100,7 +100,8 @@ export const snapshotDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretVersionTag).as("tagVersionId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
);
return sqlNestRelationships({
data,
@ -131,9 +132,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
{
key: "tagVersionId",
label: "tags" as const,
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
id,
name: slug,
name,
slug,
color,
vId
@ -194,7 +195,8 @@ export const snapshotDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretVersionV2Tag).as("tagVersionId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
);
return sqlNestRelationships({
data,
@ -225,9 +227,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
{
key: "tagVersionId",
label: "tags" as const,
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
id,
name: slug,
name,
slug,
color,
vId
@ -351,7 +353,8 @@ export const snapshotDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretVersionTag).as("tagVersionId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
);
const formated = sqlNestRelationships({
@ -374,9 +377,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
{
key: "tagVersionId",
label: "tags" as const,
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
id,
name: slug,
name,
slug,
color,
vId
@ -505,7 +508,8 @@ export const snapshotDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretVersionV2Tag).as("tagVersionId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
);
const formated = sqlNestRelationships({
@ -528,9 +532,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
{
key: "tagVersionId",
label: "tags" as const,
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
id,
name: slug,
name,
slug,
color,
vId

View File

@ -596,8 +596,7 @@ export const RAW_SECRETS = {
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
environment: "The slug of the environment to list secrets from.",
secretPath: "The secret path to list secrets from.",
includeImports: "Weather to include imported secrets or not.",
tagSlugs: "The comma separated tag slugs to filter secrets"
includeImports: "Weather to include imported secrets or not."
},
CREATE: {
secretName: "The name of the secret to create.",
@ -1049,12 +1048,27 @@ export const CERTIFICATE_AUTHORITIES = {
caId: "The ID of the CA to generate CSR from",
csr: "The generated CSR from the CA"
},
RENEW_CA_CERT: {
caId: "The ID of the CA to renew the CA certificate for",
type: "The type of behavior to use for the renewal operation. Currently Infisical is only able to renew a CA certificate with the same key pair.",
notAfter: "The expiry date and time for the renewed CA certificate in YYYY-MM-DDTHH:mm:ss.sssZ format",
certificate: "The renewed CA certificate body",
certificateChain: "The certificate chain of the CA",
serialNumber: "The serial number of the renewed CA certificate"
},
GET_CERT: {
caId: "The ID of the CA to get the certificate body and certificate chain from",
certificate: "The certificate body of the CA",
certificateChain: "The certificate chain of the CA",
serialNumber: "The serial number of the CA certificate"
},
GET_CA_CERTS: {
caId: "The ID of the CA to get the CA certificates for",
certificate: "The certificate body of the CA certificate",
certificateChain: "The certificate chain of the CA certificate",
serialNumber: "The serial number of the CA certificate",
version: "The version of the CA certificate. The version is incremented for each CA renewal operation."
},
SIGN_INTERMEDIATE: {
caId: "The ID of the CA to sign the intermediate certificate with",
csr: "The pem-encoded CSR to sign with the CA",

View File

@ -128,16 +128,6 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
throw new DatabaseError({ error, name: "Create" });
}
},
// This spilit the insert into multiple chunk
batchInsert: async (data: readonly Tables[Tname]["insert"][], tx?: Knex) => {
try {
if (!data.length) return [];
const res = await (tx || db).batchInsert(tableName, data as never).returning("*");
return res as Tables[Tname]["base"][];
} catch (error) {
throw new DatabaseError({ error, name: "batchInsert" });
}
},
upsert: async (data: readonly Tables[Tname]["insert"][], onConflictField: keyof Tables[Tname]["base"], tx?: Knex) => {
try {
if (!data.length) return [];

View File

@ -1,6 +1,7 @@
import type { RateLimitOptions, RateLimitPluginOptions } from "@fastify/rate-limit";
import { Redis } from "ioredis";
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
import { getConfig } from "@app/lib/config/env";
export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
@ -21,16 +22,14 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
// GET endpoints
export const readLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: (req) => req.rateLimits.readLimit,
max: () => getRateLimiterConfig().readLimit,
keyGenerator: (req) => req.realIp
};
// POST, PATCH, PUT, DELETE endpoints
export const writeLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: (req) => req.rateLimits.writeLimit,
max: () => getRateLimiterConfig().writeLimit,
keyGenerator: (req) => req.realIp
};
@ -38,40 +37,42 @@ export const writeLimit: RateLimitOptions = {
export const secretsLimit: RateLimitOptions = {
// secrets, folders, secret imports
timeWindow: 60 * 1000,
hook: "preValidation",
max: (req) => req.rateLimits.secretsLimit,
max: () => getRateLimiterConfig().secretsLimit,
keyGenerator: (req) => req.realIp
};
export const authRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: (req) => req.rateLimits.authRateLimit,
max: () => getRateLimiterConfig().authRateLimit,
keyGenerator: (req) => req.realIp
};
export const inviteUserRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: (req) => req.rateLimits.inviteUserRateLimit,
max: () => getRateLimiterConfig().inviteUserRateLimit,
keyGenerator: (req) => req.realIp
};
export const mfaRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: (req) => req.rateLimits.mfaRateLimit,
max: () => getRateLimiterConfig().mfaRateLimit,
keyGenerator: (req) => {
return req.headers.authorization?.split(" ")[1] || req.realIp;
}
};
export const creationLimit: RateLimitOptions = {
// identity, project, org
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().creationLimit,
keyGenerator: (req) => req.realIp
};
// Public endpoints to avoid brute force attacks
export const publicEndpointLimit: RateLimitOptions = {
// Read Shared Secrets
timeWindow: 60 * 1000,
hook: "preValidation",
max: (req) => req.rateLimits.publicEndpointLimit,
max: () => getRateLimiterConfig().publicEndpointLimit,
keyGenerator: (req) => req.realIp
};

View File

@ -1,38 +0,0 @@
import fp from "fastify-plugin";
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
import { getConfig } from "@app/lib/config/env";
export const injectRateLimits = fp(async (server) => {
server.decorateRequest("rateLimits", null);
server.addHook("onRequest", async (req) => {
const appCfg = getConfig();
const instanceRateLimiterConfig = getRateLimiterConfig();
if (!req.auth?.orgId) {
// for public endpoints, we always use the instance-wide default rate limits
req.rateLimits = instanceRateLimiterConfig;
return;
}
const { rateLimits, customRateLimits } = await server.services.license.getPlan(req.auth.orgId);
if (customRateLimits && !appCfg.isCloud) {
// we do this because for self-hosted/dedicated instances, we want custom rate limits to be based on admin configuration
// note that the syncing of custom rate limit happens on the instanceRateLimiterConfig object
req.rateLimits = instanceRateLimiterConfig;
return;
}
// we're using the null coalescing operator in order to handle outdated licenses
req.rateLimits = {
readLimit: rateLimits?.readLimit ?? instanceRateLimiterConfig.readLimit,
writeLimit: rateLimits?.writeLimit ?? instanceRateLimiterConfig.writeLimit,
secretsLimit: rateLimits?.secretsLimit ?? instanceRateLimiterConfig.secretsLimit,
publicEndpointLimit: instanceRateLimiterConfig.publicEndpointLimit,
authRateLimit: instanceRateLimiterConfig.authRateLimit,
inviteUserRateLimit: instanceRateLimiterConfig.inviteUserRateLimit,
mfaRateLimit: instanceRateLimiterConfig.mfaRateLimit
};
});
});

View File

@ -184,7 +184,6 @@ import { webhookServiceFactory } from "@app/services/webhook/webhook-service";
import { injectAuditLogInfo } from "../plugins/audit-log";
import { injectIdentity } from "../plugins/auth/inject-identity";
import { injectPermission } from "../plugins/auth/inject-permission";
import { injectRateLimits } from "../plugins/inject-rate-limits";
import { registerSecretScannerGhApp } from "../plugins/secret-scanner";
import { registerV1Routes } from "./v1";
import { registerV2Routes } from "./v2";
@ -897,15 +896,8 @@ export const registerRoutes = async (
folderDAL,
integrationDAL,
integrationAuthDAL,
secretQueueService,
integrationAuthService,
projectBotService,
secretV2BridgeDAL,
secretImportDAL,
secretDAL,
kmsService
secretQueueService
});
const serviceTokenService = serviceTokenServiceFactory({
projectEnvDAL,
serviceTokenDAL,
@ -1037,8 +1029,7 @@ export const registerRoutes = async (
snapshotDAL,
identityAccessTokenDAL,
secretSharingDAL,
secretVersionV2DAL: secretVersionV2BridgeDAL,
identityUniversalAuthClientSecretDAL: identityUaClientSecretDAL
secretVersionV2DAL: secretVersionV2BridgeDAL
});
const oidcService = oidcConfigServiceFactory({
@ -1151,7 +1142,6 @@ export const registerRoutes = async (
await server.register(injectIdentity, { userDAL, serviceTokenDAL });
await server.register(injectPermission);
await server.register(injectRateLimits);
await server.register(injectAuditLogInfo);
server.route({

View File

@ -63,8 +63,8 @@ export const secretRawSchema = z.object({
version: z.number(),
type: z.string(),
secretKey: z.string(),
secretValue: z.string(),
secretComment: z.string(),
secretValue: z.string().optional(),
secretComment: z.string().optional(),
secretReminderNote: z.string().nullable().optional(),
secretReminderRepeatDays: z.number().nullable().optional(),
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),

View File

@ -8,7 +8,7 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CaStatus, CaType } from "@app/services/certificate-authority/certificate-authority-types";
import { CaRenewalType, CaStatus, CaType } from "@app/services/certificate-authority/certificate-authority-types";
import {
validateAltNamesField,
validateCaDateField
@ -275,15 +275,118 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "POST",
url: "/:caId/renew",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Perform CA certificate renewal",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.caId)
}),
body: z.object({
type: z.nativeEnum(CaRenewalType).describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.type),
notAfter: validateCaDateField.describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.notAfter)
}),
response: {
200: z.object({
certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.certificate),
certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.certificateChain),
serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.serialNumber)
})
}
},
handler: async (req) => {
const { certificate, certificateChain, serialNumber, ca } =
await server.services.certificateAuthority.renewCaCert({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.RENEW_CA,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
certificate,
certificateChain,
serialNumber
};
}
});
server.route({
method: "GET",
url: "/:caId/certificate",
url: "/:caId/ca-certificates",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get cert and cert chain of a CA",
description: "Get list of past and current CA certificates for a CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.caId)
}),
response: {
200: z.array(
z.object({
certificate: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.certificate),
certificateChain: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.certificateChain),
serialNumber: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.serialNumber),
version: z.number().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.version)
})
)
}
},
handler: async (req) => {
const { caCerts, ca } = await server.services.certificateAuthority.getCaCerts({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_CA_CERTS,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return caCerts;
}
});
server.route({
method: "GET",
url: "/:caId/certificate", // TODO: consider updating endpoint structure considering CA certificates
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get current CA cert and cert chain of a CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CERT.caId)
}),

View File

@ -3,7 +3,7 @@ import { z } from "zod";
import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgMembershipRole, OrgRolesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { IDENTITIES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -16,7 +16,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
rateLimit: creationLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {

View File

@ -170,12 +170,6 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
params: z.object({
integrationId: z.string().trim().describe(INTEGRATION.DELETE.integrationId)
}),
querystring: z.object({
shouldDeleteIntegrationSecrets: z
.enum(["true", "false"])
.optional()
.transform((val) => val === "true")
}),
response: {
200: z.object({
integration: IntegrationsSchema
@ -189,8 +183,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
id: req.params.integrationId,
shouldDeleteIntegrationSecrets: req.query.shouldDeleteIntegrationSecrets
id: req.params.integrationId
});
await server.services.auditLog.createAuditLog({
@ -212,8 +205,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetService: integration.targetService,
targetServiceId: integration.targetServiceId,
path: integration.path,
region: integration.region,
shouldDeleteIntegrationSecrets: req.query.shouldDeleteIntegrationSecrets
region: integration.region
// eslint-disable-next-line
}) as any
}

View File

@ -1,4 +1,3 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { SecretTagsSchema } from "@app/db/schemas";
@ -50,8 +49,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
// akhilmhdh: for terraform backward compatiability
workspaceTag: SecretTagsSchema.extend({ name: z.string() })
workspaceTag: SecretTagsSchema
})
}
},
@ -81,8 +79,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
// akhilmhdh: for terraform backward compatiability
workspaceTag: SecretTagsSchema.extend({ name: z.string() })
workspaceTag: SecretTagsSchema
})
}
},
@ -111,14 +108,8 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
projectId: z.string().trim().describe(SECRET_TAGS.CREATE.projectId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.describe(SECRET_TAGS.CREATE.slug)
.refine((v) => slugify(v) === v, {
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
}),
name: z.string().trim().describe(SECRET_TAGS.CREATE.name),
slug: z.string().trim().describe(SECRET_TAGS.CREATE.slug),
color: z.string().trim().describe(SECRET_TAGS.CREATE.color)
}),
response: {
@ -153,14 +144,8 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
tagId: z.string().trim().describe(SECRET_TAGS.UPDATE.tagId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.describe(SECRET_TAGS.UPDATE.slug)
.refine((v) => slugify(v) === v, {
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
}),
name: z.string().trim().describe(SECRET_TAGS.UPDATE.name),
slug: z.string().trim().describe(SECRET_TAGS.UPDATE.slug),
color: z.string().trim().describe(SECRET_TAGS.UPDATE.color)
}),
response: {

View File

@ -9,7 +9,7 @@ import {
UsersSchema
} from "@app/db/schemas";
import { ORGANIZATIONS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
@ -307,7 +307,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
rateLimit: creationLimit
},
schema: {
body: z.object({

View File

@ -4,7 +4,7 @@ import { z } from "zod";
import { CertificateAuthoritiesSchema, CertificatesSchema, ProjectKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -142,7 +142,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
rateLimit: creationLimit
},
schema: {
description: "Create a new project",

View File

@ -59,10 +59,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.extend({ name: z.string() })
.array()
}).array()
})
)
})
@ -117,15 +116,16 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
secret: SecretsSchema.omit({ secretBlindIndex: true }).extend({
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
secret: SecretsSchema.omit({ secretBlindIndex: true }).merge(
z.object({
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
}).array()
})
.extend({ name: z.string() })
.array()
})
)
})
}
},
@ -180,13 +180,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
.enum(["true", "false"])
.default("false")
.transform((value) => value === "true")
.describe(RAW_SECRETS.LIST.includeImports),
tagSlugs: z
.string()
.describe(RAW_SECRETS.LIST.tagSlugs)
.optional()
// split by comma and trim the strings
.transform((el) => (el ? el.split(",").map((i) => i.trim()) : []))
.describe(RAW_SECRETS.LIST.includeImports)
}),
response: {
200: z.object({
@ -196,9 +190,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional()
})
@ -257,8 +251,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId: workspaceId,
path: secretPath,
includeImports: req.query.include_imports,
recursive: req.query.recursive,
tagSlugs: req.query.tagSlugs
recursive: req.query.recursive
});
await server.services.auditLog.createAuditLog({
@ -332,9 +325,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional()
})
@ -738,10 +731,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.extend({ name: z.string() })
.array()
}).array()
})
.array(),
imports: z

View File

@ -5,7 +5,13 @@ import { BadRequestError } from "@app/lib/errors";
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
import { CertKeyAlgorithm, CertStatus } from "../certificate/certificate-types";
import { TDNParts, TGetCaCertChainDTO, TGetCaCredentialsDTO, TRebuildCaCrlDTO } from "./certificate-authority-types";
import {
TDNParts,
TGetCaCertChainDTO,
TGetCaCertChainsDTO,
TGetCaCredentialsDTO,
TRebuildCaCrlDTO
} from "./certificate-authority-types";
export const createDistinguishedName = (parts: TDNParts) => {
const dnParts = [];
@ -89,6 +95,8 @@ export const keyAlgorithmToAlgCfg = (keyAlgorithm: CertKeyAlgorithm) => {
* Return the public and private key of CA with id [caId]
* Note: credentials are returned as crypto.webcrypto.CryptoKey
* suitable for use with @peculiar/x509 module
*
* TODO: Update to get latest CA Secret once support for CA renewal with new key pair is added
*/
export const getCaCredentials = async ({
caId,
@ -132,26 +140,73 @@ export const getCaCredentials = async ({
]);
return {
caSecret,
caPrivateKey,
caPublicKey
};
};
/**
* Return the decrypted pem-encoded certificate and certificate chain
* Return the list of decrypted pem-encoded certificates and certificate chains
* for CA with id [caId].
*/
export const getCaCertChain = async ({
export const getCaCertChains = async ({
caId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
kmsService
}: TGetCaCertChainDTO) => {
}: TGetCaCertChainsDTO) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
const keyId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: keyId
});
const caCerts = await certificateAuthorityCertDAL.find({ caId: ca.id }, { sort: [["version", "asc"]] });
const decryptedChains = await Promise.all(
caCerts.map(async (caCert) => {
const decryptedCaCert = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificate
});
const caCertObj = new x509.X509Certificate(decryptedCaCert);
const decryptedChain = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificateChain
});
return {
certificate: caCertObj.toString("pem"),
certificateChain: decryptedChain.toString("utf-8"),
serialNumber: caCertObj.serialNumber,
version: caCert.version
};
})
);
return decryptedChains;
};
/**
* Return the decrypted pem-encoded certificate and certificate chain
* corresponding to CA certificate with id [caCertId].
*/
export const getCaCertChain = async ({
caCertId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
kmsService
}: TGetCaCertChainDTO) => {
const caCert = await certificateAuthorityCertDAL.findById(caCertId);
if (!caCert) throw new BadRequestError({ message: "CA certificate not found" });
const ca = await certificateAuthorityDAL.findById(caCert.caId);
const keyId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,

View File

@ -20,7 +20,8 @@ import { TCertificateAuthorityCertDALFactory } from "./certificate-authority-cer
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
import {
createDistinguishedName,
getCaCertChain,
getCaCertChain, // TODO: consider rename
getCaCertChains,
getCaCredentials,
keyAlgorithmToAlgCfg,
parseDistinguishedName
@ -33,10 +34,12 @@ import {
TCreateCaDTO,
TDeleteCaDTO,
TGetCaCertDTO,
TGetCaCertsDTO,
TGetCaCsrDTO,
TGetCaDTO,
TImportCertToCaDTO,
TIssueCertFromCaDTO,
TRenewCaCertDTO,
TSignCertFromCaDTO,
TSignIntermediateDTO,
TUpdateCaDTO
@ -48,7 +51,10 @@ type TCertificateAuthorityServiceFactoryDep = {
TCertificateAuthorityDALFactory,
"transaction" | "create" | "findById" | "updateById" | "deleteById" | "findOne"
>;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "create" | "findOne" | "transaction">;
certificateAuthorityCertDAL: Pick<
TCertificateAuthorityCertDALFactory,
"create" | "findOne" | "transaction" | "find" | "findById"
>;
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "create" | "findOne">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "create" | "findOne" | "update">;
certificateAuthorityQueue: TCertificateAuthorityQueueFactory; // TODO: Pick
@ -165,6 +171,24 @@ export const certificateAuthorityServiceFactory = ({
kmsId: certificateManagerKmsId
});
// https://nodejs.org/api/crypto.html#static-method-keyobjectfromkey
const skObj = KeyObject.from(keys.privateKey);
const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({
plainText: skObj.export({
type: "pkcs8",
format: "der"
})
});
const caSecret = await certificateAuthoritySecretDAL.create(
{
caId: ca.id,
encryptedPrivateKey
},
tx
);
if (type === CaType.ROOT) {
// note: create self-signed cert only applicable for root CA
const cert = await x509.X509CertificateGenerator.createSelfSigned({
@ -191,11 +215,21 @@ export const certificateAuthorityServiceFactory = ({
plainText: Buffer.alloc(0)
});
await certificateAuthorityCertDAL.create(
const caCert = await certificateAuthorityCertDAL.create(
{
caId: ca.id,
encryptedCertificate,
encryptedCertificateChain
encryptedCertificateChain,
version: 1,
caSecretId: caSecret.id
},
tx
);
await certificateAuthorityDAL.updateById(
ca.id,
{
activeCaCertId: caCert.id
},
tx
);
@ -223,24 +257,6 @@ export const certificateAuthorityServiceFactory = ({
tx
);
// https://nodejs.org/api/crypto.html#static-method-keyobjectfromkey
const skObj = KeyObject.from(keys.privateKey);
const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({
plainText: skObj.export({
type: "pkcs8",
format: "der"
})
});
await certificateAuthoritySecretDAL.create(
{
caId: ca.id,
encryptedPrivateKey
},
tx
);
return ca;
});
@ -341,9 +357,7 @@ export const certificateAuthorityServiceFactory = ({
);
if (ca.type === CaType.ROOT) throw new BadRequestError({ message: "Root CA cannot generate CSR" });
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
if (caCert) throw new BadRequestError({ message: "CA already has a certificate installed" });
if (ca.activeCaCertId) throw new BadRequestError({ message: "CA already has a certificate installed" });
const { caPrivateKey, caPublicKey } = await getCaCredentials({
caId,
@ -381,9 +395,283 @@ export const certificateAuthorityServiceFactory = ({
};
/**
* Return certificate and certificate chain for CA
* Renew certificate for CA with id [caId]
* Note: Currently implements CA renewal with same key-pair only
*/
const getCaCert = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCertDTO) => {
const renewCaCert = async ({ caId, notAfter, actorId, actorAuthMethod, actor, actorOrgId }: TRenewCaCertDTO) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
ca.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
ProjectPermissionSub.CertificateAuthorities
);
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
// get latest CA certificate
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
const serialNumber = crypto.randomBytes(32).toString("hex");
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const kmsEncryptor = await kmsService.encryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const { caPrivateKey, caPublicKey, caSecret } = await getCaCredentials({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
projectDAL,
kmsService
});
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const decryptedCaCert = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificate
});
const caCertObj = new x509.X509Certificate(decryptedCaCert);
let certificate = "";
let certificateChain = "";
switch (ca.type) {
case CaType.ROOT: {
if (new Date(notAfter) <= new Date(caCertObj.notAfter)) {
throw new BadRequestError({
message:
"New Root CA certificate must have notAfter date that is greater than the current certificate notAfter date"
});
}
const notBeforeDate = new Date();
const cert = await x509.X509CertificateGenerator.createSelfSigned({
name: ca.dn,
serialNumber,
notBefore: notBeforeDate,
notAfter: new Date(notAfter),
signingAlgorithm: alg,
keys: {
privateKey: caPrivateKey,
publicKey: caPublicKey
},
extensions: [
new x509.BasicConstraintsExtension(
true,
ca.maxPathLength === -1 || !ca.maxPathLength ? undefined : ca.maxPathLength,
true
),
new x509.ExtendedKeyUsageExtension(["1.2.3.4.5.6.7", "2.3.4.5.6.7.8"], true),
// eslint-disable-next-line no-bitwise
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
await x509.SubjectKeyIdentifierExtension.create(caPublicKey)
]
});
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
plainText: Buffer.from(new Uint8Array(cert.rawData))
});
const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({
plainText: Buffer.alloc(0)
});
await certificateAuthorityDAL.transaction(async (tx) => {
const newCaCert = await certificateAuthorityCertDAL.create(
{
caId: ca.id,
encryptedCertificate,
encryptedCertificateChain,
version: caCert.version + 1,
caSecretId: caSecret.id
},
tx
);
await certificateAuthorityDAL.updateById(
ca.id,
{
activeCaCertId: newCaCert.id,
notBefore: notBeforeDate,
notAfter: new Date(notAfter)
},
tx
);
});
certificate = cert.toString("pem");
break;
}
case CaType.INTERMEDIATE: {
if (!ca.parentCaId) {
// TODO: look into optimal way to support renewal of intermediate CA with external parent CA
throw new BadRequestError({
message: "Failed to renew intermediate CA certificate with external parent CA"
});
}
const parentCa = await certificateAuthorityDAL.findById(ca.parentCaId);
const { caPrivateKey: parentCaPrivateKey } = await getCaCredentials({
caId: parentCa.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
projectDAL,
kmsService
});
// get latest parent CA certificate
if (!parentCa.activeCaCertId)
throw new BadRequestError({ message: "Parent CA does not have a certificate installed" });
const parentCaCert = await certificateAuthorityCertDAL.findById(parentCa.activeCaCertId);
const decryptedParentCaCert = await kmsDecryptor({
cipherTextBlob: parentCaCert.encryptedCertificate
});
const parentCaCertObj = new x509.X509Certificate(decryptedParentCaCert);
if (new Date(notAfter) <= new Date(caCertObj.notAfter)) {
throw new BadRequestError({
message:
"New Intermediate CA certificate must have notAfter date that is greater than the current certificate notAfter date"
});
}
if (new Date(notAfter) > new Date(parentCaCertObj.notAfter)) {
throw new BadRequestError({
message:
"New Intermediate CA certificate must have notAfter date that is equal to or smaller than the notAfter date of the parent CA certificate current certificate notAfter date"
});
}
const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({
name: ca.dn,
keys: {
privateKey: caPrivateKey,
publicKey: caPublicKey
},
signingAlgorithm: alg,
extensions: [
// eslint-disable-next-line no-bitwise
new x509.KeyUsagesExtension(
x509.KeyUsageFlags.keyCertSign |
x509.KeyUsageFlags.cRLSign |
x509.KeyUsageFlags.digitalSignature |
x509.KeyUsageFlags.keyEncipherment
)
],
attributes: [new x509.ChallengePasswordAttribute("password")]
});
const notBeforeDate = new Date();
const intermediateCert = await x509.X509CertificateGenerator.create({
serialNumber,
subject: csrObj.subject,
issuer: parentCaCertObj.subject,
notBefore: notBeforeDate,
notAfter: new Date(notAfter),
signingKey: parentCaPrivateKey,
publicKey: csrObj.publicKey,
signingAlgorithm: alg,
extensions: [
new x509.KeyUsagesExtension(
x509.KeyUsageFlags.keyCertSign |
x509.KeyUsageFlags.cRLSign |
x509.KeyUsageFlags.digitalSignature |
x509.KeyUsageFlags.keyEncipherment,
true
),
new x509.BasicConstraintsExtension(
true,
ca.maxPathLength === -1 || !ca.maxPathLength ? undefined : ca.maxPathLength,
true
),
await x509.AuthorityKeyIdentifierExtension.create(parentCaCertObj, false),
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey)
]
});
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
plainText: Buffer.from(new Uint8Array(intermediateCert.rawData))
});
const { caCert: parentCaCertificate, caCertChain: parentCaCertChain } = await getCaCertChain({
caCertId: parentCa.activeCaCertId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
kmsService
});
certificateChain = `${parentCaCertificate}\n${parentCaCertChain}`.trim();
const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({
plainText: Buffer.from(certificateChain)
});
await certificateAuthorityDAL.transaction(async (tx) => {
const newCaCert = await certificateAuthorityCertDAL.create(
{
caId: ca.id,
encryptedCertificate,
encryptedCertificateChain,
version: caCert.version + 1,
caSecretId: caSecret.id
},
tx
);
await certificateAuthorityDAL.updateById(
ca.id,
{
activeCaCertId: newCaCert.id,
notBefore: notBeforeDate,
notAfter: new Date(notAfter)
},
tx
);
});
certificate = intermediateCert.toString("pem");
break;
}
default: {
throw new BadRequestError({
message: "Unrecognized CA type"
});
}
}
return {
certificate,
certificateChain,
serialNumber,
ca
};
};
const getCaCerts = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCertsDTO) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
@ -400,7 +688,7 @@ export const certificateAuthorityServiceFactory = ({
ProjectPermissionSub.CertificateAuthorities
);
const { caCert, caCertChain, serialNumber } = await getCaCertChain({
const caCertChains = await getCaCertChains({
caId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
@ -408,6 +696,41 @@ export const certificateAuthorityServiceFactory = ({
kmsService
});
return {
ca,
caCerts: caCertChains
};
};
/**
* Return current certificate and certificate chain for CA
*/
const getCaCert = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCertDTO) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
ca.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionSub.CertificateAuthorities
);
const { caCert, caCertChain, serialNumber } = await getCaCertChain({
caCertId: ca.activeCaCertId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
kmsService
});
return {
certificate: caCert,
certificateChain: caCertChain,
@ -447,6 +770,13 @@ export const certificateAuthorityServiceFactory = ({
);
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
if (ca.notAfter && new Date() > new Date(ca.notAfter)) {
throw new BadRequestError({ message: "CA is expired" });
}
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
@ -459,7 +789,6 @@ export const certificateAuthorityServiceFactory = ({
kmsId: certificateManagerKmsId
});
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
const decryptedCaCert = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificate
});
@ -531,7 +860,7 @@ export const certificateAuthorityServiceFactory = ({
});
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
caId,
caCertId: ca.activeCaCertId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
@ -577,8 +906,7 @@ export const certificateAuthorityServiceFactory = ({
ProjectPermissionSub.CertificateAuthorities
);
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
if (caCert) throw new BadRequestError({ message: "CA has already imported a certificate" });
if (ca.activeCaCertId) throw new BadRequestError({ message: "CA has already imported a certificate" });
const certObj = new x509.X509Certificate(certificate);
const maxPathLength = certObj.getExtension(x509.BasicConstraintsExtension)?.pathLength;
@ -625,12 +953,32 @@ export const certificateAuthorityServiceFactory = ({
plainText: Buffer.from(certificateChain)
});
// TODO: validate that latest key-pair of CA is used to sign the certificate
// once renewal with new key pair is supported
const { caSecret, caPublicKey } = await getCaCredentials({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
projectDAL,
kmsService
});
const isCaAndCertPublicKeySame = Buffer.from(await crypto.subtle.exportKey("spki", caPublicKey)).equals(
Buffer.from(certObj.publicKey.rawData)
);
if (!isCaAndCertPublicKeySame) {
throw new BadRequestError({ message: "CA and certificate public key do not match" });
}
await certificateAuthorityCertDAL.transaction(async (tx) => {
await certificateAuthorityCertDAL.create(
const newCaCert = await certificateAuthorityCertDAL.create(
{
caId: ca.id,
encryptedCertificate,
encryptedCertificateChain
encryptedCertificateChain,
version: 1,
caSecretId: caSecret.id
},
tx
);
@ -643,7 +991,8 @@ export const certificateAuthorityServiceFactory = ({
notBefore: new Date(certObj.notBefore),
notAfter: new Date(certObj.notAfter),
serialNumber: certObj.serialNumber,
parentCaId: parentCa?.id
parentCaId: parentCa?.id,
activeCaCertId: newCaCert.id
},
tx
);
@ -683,9 +1032,12 @@ export const certificateAuthorityServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
if (!caCert) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.notAfter && new Date() > new Date(ca.notAfter)) {
throw new BadRequestError({ message: "CA is expired" });
}
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
@ -814,6 +1166,7 @@ export const certificateAuthorityServiceFactory = ({
const cert = await certificateDAL.create(
{
caId: ca.id,
caCertId: caCert.id,
status: CertStatus.ACTIVE,
friendlyName: friendlyName || commonName,
commonName,
@ -837,7 +1190,7 @@ export const certificateAuthorityServiceFactory = ({
});
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
caId: ca.id,
caCertId: caCert.id,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
@ -886,9 +1239,13 @@ export const certificateAuthorityServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
if (!caCert) throw new BadRequestError({ message: "CA does not have a certificate installed" });
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
if (ca.notAfter && new Date() > new Date(ca.notAfter)) {
throw new BadRequestError({ message: "CA is expired" });
}
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
@ -1013,6 +1370,7 @@ export const certificateAuthorityServiceFactory = ({
const cert = await certificateDAL.create(
{
caId: ca.id,
caCertId: caCert.id,
status: CertStatus.ACTIVE,
friendlyName: friendlyName || csrObj.subject,
commonName: cn,
@ -1036,7 +1394,7 @@ export const certificateAuthorityServiceFactory = ({
});
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
caId: ca.id,
caCertId: ca.activeCaCertId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
@ -1058,6 +1416,8 @@ export const certificateAuthorityServiceFactory = ({
updateCaById,
deleteCaById,
getCaCsr,
renewCaCert,
getCaCerts,
getCaCert,
signIntermediate,
importCertToCa,

View File

@ -20,6 +20,10 @@ export enum CaStatus {
PENDING_CERTIFICATE = "pending-certificate"
}
export enum CaRenewalType {
EXISTING = "existing"
}
export type TCreateCaDTO = {
projectSlug: string;
type: CaType;
@ -53,6 +57,16 @@ export type TGetCaCsrDTO = {
caId: string;
} & Omit<TProjectPermission, "projectId">;
export type TRenewCaCertDTO = {
caId: string;
notAfter: string;
type: CaRenewalType;
} & Omit<TProjectPermission, "projectId">;
export type TGetCaCertsDTO = {
caId: string;
} & Omit<TProjectPermission, "projectId">;
export type TGetCaCertDTO = {
caId: string;
} & Omit<TProjectPermission, "projectId">;
@ -109,10 +123,18 @@ export type TGetCaCredentialsDTO = {
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
};
export type TGetCaCertChainDTO = {
export type TGetCaCertChainsDTO = {
caId: string;
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findOne">;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "find">;
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
};
export type TGetCaCertChainDTO = {
caCertId: string;
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
};

View File

@ -21,7 +21,7 @@ type TCertificateServiceFactoryDep = {
certificateDAL: Pick<TCertificateDALFactory, "findOne" | "deleteById" | "update" | "find">;
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "findOne">;
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findOne">;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "update">;
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "findOne">;
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "findById" | "transaction">;
@ -180,7 +180,7 @@ export const certificateServiceFactory = ({
const certObj = new x509.X509Certificate(decryptedCert);
const { caCert, caCertChain } = await getCaCertChain({
caId: ca.id,
caCertId: cert.caCertId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,

View File

@ -4,7 +4,6 @@ import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
export type TIdentityUaClientSecretDALFactory = ReturnType<typeof identityUaClientSecretDALFactory>;
@ -24,55 +23,5 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
}
};
const removeExpiredClientSecrets = async (tx?: Knex) => {
const BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
let deletedClientSecret: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
do {
try {
const findExpiredClientSecretQuery = (tx || db)(TableName.IdentityUaClientSecret)
.where({
isClientSecretRevoked: true
})
.orWhere((qb) => {
void qb
.where("clientSecretNumUses", ">", 0)
.andWhere(
"clientSecretNumUses",
">=",
db.ref("clientSecretNumUsesLimit").withSchema(TableName.IdentityUaClientSecret)
);
})
.orWhere((qb) => {
void qb
.where("clientSecretTTL", ">", 0)
.andWhereRaw(
`"${TableName.IdentityUaClientSecret}"."createdAt" + make_interval(secs => "${TableName.IdentityUaClientSecret}"."clientSecretTTL") < NOW()`
);
})
.select("id")
.limit(BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
deletedClientSecret = await (tx || db)(TableName.IdentityUaClientSecret)
.whereIn("id", findExpiredClientSecretQuery)
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete client secret on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
} while (deletedClientSecret.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
};
return { ...uaClientSecretOrm, incrementUsage, removeExpiredClientSecrets };
return { ...uaClientSecretOrm, incrementUsage };
};

View File

@ -1,357 +0,0 @@
import { retry } from "@octokit/plugin-retry";
import { Octokit } from "@octokit/rest";
import { TIntegrationAuths, TIntegrations } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { IntegrationMetadataSchema } from "../integration/integration-schema";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectBotServiceFactory } from "../project-bot/project-bot-service";
import { TSecretDALFactory } from "../secret/secret-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretImportDALFactory } from "../secret-import/secret-import-dal";
import { fnSecretsV2FromImports } from "../secret-import/secret-import-fns";
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
import { TIntegrationAuthServiceFactory } from "./integration-auth-service";
import { Integrations } from "./integration-list";
const MAX_SYNC_SECRET_DEPTH = 5;
/**
* Return the secrets in a given [folderId] including secrets from
* nested imported folders recursively.
*/
const getIntegrationSecretsV2 = async (
dto: {
projectId: string;
environment: string;
folderId: string;
depth: number;
decryptor: (value: Buffer | null | undefined) => string;
},
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">,
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">,
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">
) => {
const content: Record<string, boolean> = {};
if (dto.depth > MAX_SYNC_SECRET_DEPTH) {
logger.info(
`getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]`
);
return content;
}
// process secrets in current folder
const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId);
secrets.forEach((secret) => {
const secretKey = secret.key;
content[secretKey] = true;
});
// check if current folder has any imports from other folders
const secretImports = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
// if no imports then return secrets in the current folder
if (!secretImports.length) return content;
const importedSecrets = await fnSecretsV2FromImports({
decryptor: dto.decryptor,
folderDAL,
secretDAL: secretV2BridgeDAL,
secretImportDAL,
allowedImports: secretImports
});
for (let i = importedSecrets.length - 1; i >= 0; i -= 1) {
for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) {
const importedSecret = importedSecrets[i].secrets[j];
if (!content[importedSecret.key]) {
content[importedSecret.key] = true;
}
}
}
return content;
};
/**
* Return the secrets in a given [folderId] including secrets from
* nested imported folders recursively.
*/
const getIntegrationSecretsV1 = async (
dto: {
projectId: string;
environment: string;
folderId: string;
key: string;
depth: number;
},
secretDAL: Pick<TSecretDALFactory, "findByFolderId">,
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">,
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">
) => {
let content: Record<string, boolean> = {};
if (dto.depth > MAX_SYNC_SECRET_DEPTH) {
logger.info(
`getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]`
);
return content;
}
// process secrets in current folder
const secrets = await secretDAL.findByFolderId(dto.folderId);
secrets.forEach((secret) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
key: dto.key
});
content[secretKey] = true;
});
// check if current folder has any imports from other folders
const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
// if no imports then return secrets in the current folder
if (!secretImport) return content;
const importedFolders = await folderDAL.findByManySecretPath(
secretImport.map(({ importEnv, importPath }) => ({
envId: importEnv.id,
secretPath: importPath
}))
);
for await (const folder of importedFolders) {
if (folder) {
// get secrets contained in each imported folder by recursively calling
// this function against the imported folder
const importedSecrets = await getIntegrationSecretsV1(
{
environment: dto.environment,
projectId: dto.projectId,
folderId: folder.id,
key: dto.key,
depth: dto.depth + 1
},
secretDAL,
folderDAL,
secretImportDAL
);
// add the imported secrets to the current folder secrets
content = { ...importedSecrets, ...content };
}
}
return content;
};
export const deleteGithubSecrets = async ({
integration,
secrets,
accessToken
}: {
integration: Omit<TIntegrations, "envId">;
secrets: Record<string, boolean>;
accessToken: string;
}) => {
interface GitHubSecret {
name: string;
created_at: string;
updated_at: string;
visibility?: "all" | "private" | "selected";
selected_repositories_url?: string | undefined;
}
const OctokitWithRetry = Octokit.plugin(retry);
const octokit = new OctokitWithRetry({
auth: accessToken
});
enum GithubScope {
Repo = "github-repo",
Org = "github-org",
Env = "github-env"
}
let encryptedGithubSecrets: GitHubSecret[];
switch (integration.scope) {
case GithubScope.Org: {
encryptedGithubSecrets = (
await octokit.request("GET /orgs/{org}/actions/secrets", {
org: integration.owner as string
})
).data.secrets;
break;
}
case GithubScope.Env: {
encryptedGithubSecrets = (
await octokit.request("GET /repositories/{repository_id}/environments/{environment_name}/secrets", {
repository_id: Number(integration.appId),
environment_name: integration.targetEnvironmentId as string
})
).data.secrets;
break;
}
default: {
encryptedGithubSecrets = (
await octokit.request("GET /repos/{owner}/{repo}/actions/secrets", {
owner: integration.owner as string,
repo: integration.app as string
})
).data.secrets;
break;
}
}
for await (const encryptedSecret of encryptedGithubSecrets) {
if (encryptedSecret.name in secrets) {
switch (integration.scope) {
case GithubScope.Org: {
await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", {
org: integration.owner as string,
secret_name: encryptedSecret.name
});
break;
}
case GithubScope.Env: {
await octokit.request(
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}",
{
repository_id: Number(integration.appId),
environment_name: integration.targetEnvironmentId as string,
secret_name: encryptedSecret.name
}
);
break;
}
default: {
await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
owner: integration.owner as string,
repo: integration.app as string,
secret_name: encryptedSecret.name
});
break;
}
}
// small delay to prevent hitting API rate limits
await new Promise((resolve) => {
setTimeout(resolve, 50);
});
}
}
};
export const deleteIntegrationSecrets = async ({
integration,
integrationAuth,
integrationAuthService,
projectBotService,
secretV2BridgeDAL,
folderDAL,
secretDAL,
secretImportDAL,
kmsService
}: {
integration: Omit<TIntegrations, "envId"> & {
projectId: string;
environment: {
id: string;
name: string;
slug: string;
};
secretPath: string;
};
integrationAuth: TIntegrationAuths;
integrationAuthService: Pick<TIntegrationAuthServiceFactory, "getIntegrationAccessToken" | "getIntegrationAuth">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">;
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath" | "findBySecretPath">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
secretDAL: Pick<TSecretDALFactory, "findByFolderId">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
}) => {
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integration.projectId);
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: integration.projectId
});
const folder = await folderDAL.findBySecretPath(
integration.projectId,
integration.environment.slug,
integration.secretPath
);
if (!folder) {
throw new NotFoundError({
message: "Folder not found."
});
}
const { accessToken } = await integrationAuthService.getIntegrationAccessToken(
integrationAuth,
shouldUseSecretV2Bridge,
botKey
);
const secrets = shouldUseSecretV2Bridge
? await getIntegrationSecretsV2(
{
environment: integration.environment.id,
projectId: integration.projectId,
folderId: folder.id,
depth: 1,
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
},
secretV2BridgeDAL,
folderDAL,
secretImportDAL
)
: await getIntegrationSecretsV1(
{
environment: integration.environment.id,
projectId: integration.projectId,
folderId: folder.id,
key: botKey as string,
depth: 1
},
secretDAL,
folderDAL,
secretImportDAL
);
const suffixedSecrets: typeof secrets = {};
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
if (metadata) {
Object.keys(secrets).forEach((key) => {
const prefix = metadata?.secretPrefix || "";
const suffix = metadata?.secretSuffix || "";
const newKey = prefix + key + suffix;
suffixedSecrets[newKey] = secrets[key];
});
}
switch (integration.integration) {
case Integrations.GITHUB: {
await deleteGithubSecrets({
integration,
accessToken,
secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets
});
break;
}
default:
throw new BadRequestError({
message: "Invalid integration"
});
}
};

View File

@ -538,20 +538,19 @@ const syncSecretsAWSParameterStore = async ({
integration,
secrets,
accessId,
accessToken,
projectId
accessToken
}: {
integration: TIntegrations & { secretPath: string; environment: { slug: string } };
integration: TIntegrations;
secrets: Record<string, { value: string; comment?: string }>;
accessId: string | null;
accessToken: string;
projectId?: string;
}) => {
let response: { isSynced: boolean; syncMessage: string } | null = null;
if (!accessId) {
throw new Error("AWS access ID is required");
}
const config = new AWS.Config({
region: integration.region as string,
credentials: {
@ -568,9 +567,7 @@ const syncSecretsAWSParameterStore = async ({
const metadata = z.record(z.any()).parse(integration.metadata || {});
const awsParameterStoreSecretsObj: Record<string, AWS.SSM.Parameter> = {};
logger.info(
`getIntegrationSecrets: integration sync triggered for ssm with [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [shouldDisableDelete=${metadata.shouldDisableDelete}]`
);
// now fetch all aws parameter store secrets
let hasNext = true;
let nextToken: string | undefined;
@ -597,18 +594,6 @@ const syncSecretsAWSParameterStore = async ({
nextToken = parameters.NextToken;
}
logger.info(
`getIntegrationSecrets: all fetched keys from AWS SSM [projectId=${projectId}] [environment=${
integration.environment.slug
}] [secretPath=${integration.secretPath}] [awsParameterStoreSecretsObj=${Object.keys(
awsParameterStoreSecretsObj
).join(",")}]`
);
logger.info(
`getIntegrationSecrets: all secrets from Infisical to send to AWS SSM [projectId=${projectId}] [environment=${
integration.environment.slug
}] [secretPath=${integration.secretPath}] [secrets=${Object.keys(secrets).join(",")}]`
);
// Identify secrets to create
// don't use Promise.all() and promise map here
// it will cause rate limit
@ -618,56 +603,24 @@ const syncSecretsAWSParameterStore = async ({
// case: secret does not exist in AWS parameter store
// -> create secret
if (secrets[key].value) {
logger.info(
`getIntegrationSecrets: create secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
);
await ssm
.putParameter({
Name: `${integration.path}${key}`,
Type: "SecureString",
Value: secrets[key].value,
...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId }),
Overwrite: true
// Overwrite: true,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({
Key: tag.key,
Value: tag.value
}))
: []
})
.promise();
if (metadata.secretAWSTag?.length) {
try {
await ssm
.addTagsToResource({
ResourceType: "Parameter",
ResourceId: `${integration.path}${key}`,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({
Key: tag.key,
Value: tag.value
}))
: []
})
.promise();
} catch (err) {
logger.error(
err,
`getIntegrationSecrets: create secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if ((err as any).code === "AccessDeniedException") {
logger.error(
`AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)`
);
}
response = {
isSynced: false,
syncMessage: (err as AWSError)?.message || "Error syncing with AWS Parameter Store"
};
}
}
}
// case: secret exists in AWS parameter store
} else {
logger.info(
`getIntegrationSecrets: update secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
);
// -> update secret
if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
await ssm
@ -695,10 +648,6 @@ const syncSecretsAWSParameterStore = async ({
})
.promise();
} catch (err) {
logger.error(
err,
`getIntegrationSecrets: update secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if ((err as any).code === "AccessDeniedException") {
logger.error(
@ -721,18 +670,9 @@ const syncSecretsAWSParameterStore = async ({
}
if (!metadata.shouldDisableDelete) {
logger.info(
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=1]`
);
for (const key in awsParameterStoreSecretsObj) {
if (Object.hasOwn(awsParameterStoreSecretsObj, key)) {
logger.info(
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=2]`
);
if (!(key in secrets)) {
logger.info(
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=3]`
);
// case:
// -> delete secret
await ssm
@ -740,9 +680,6 @@ const syncSecretsAWSParameterStore = async ({
Name: awsParameterStoreSecretsObj[key].Name as string
})
.promise();
logger.info(
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=4]`
);
}
await new Promise((resolve) => {
setTimeout(resolve, 50);
@ -3719,8 +3656,7 @@ export const syncIntegrationSecrets = async ({
integration,
secrets,
accessId,
accessToken,
projectId
accessToken
});
break;
case Integrations.AWS_SECRET_MANAGER:

View File

@ -6,15 +6,8 @@ import { BadRequestError } from "@app/lib/errors";
import { TProjectPermission } from "@app/lib/types";
import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth-dal";
import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service";
import { deleteIntegrationSecrets } from "../integration-auth/integration-delete-secret";
import { TKmsServiceFactory } from "../kms/kms-service";
import { TProjectBotServiceFactory } from "../project-bot/project-bot-service";
import { TSecretDALFactory } from "../secret/secret-dal";
import { TSecretQueueFactory } from "../secret/secret-queue";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretImportDALFactory } from "../secret-import/secret-import-dal";
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
import { TIntegrationDALFactory } from "./integration-dal";
import {
TCreateIntegrationDTO,
@ -26,15 +19,9 @@ import {
type TIntegrationServiceFactoryDep = {
integrationDAL: TIntegrationDALFactory;
integrationAuthDAL: TIntegrationAuthDALFactory;
integrationAuthService: TIntegrationAuthServiceFactory;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findByManySecretPath">;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectBotService: TProjectBotServiceFactory;
secretQueueService: Pick<TSecretQueueFactory, "syncIntegrations">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
secretDAL: Pick<TSecretDALFactory, "findByFolderId">;
};
export type TIntegrationServiceFactory = ReturnType<typeof integrationServiceFactory>;
@ -44,13 +31,7 @@ export const integrationServiceFactory = ({
integrationAuthDAL,
folderDAL,
permissionService,
secretQueueService,
integrationAuthService,
projectBotService,
secretV2BridgeDAL,
secretImportDAL,
kmsService,
secretDAL
secretQueueService
}: TIntegrationServiceFactoryDep) => {
const createIntegration = async ({
app,
@ -180,14 +161,7 @@ export const integrationServiceFactory = ({
return updatedIntegration;
};
const deleteIntegration = async ({
actorId,
id,
actor,
actorAuthMethod,
actorOrgId,
shouldDeleteIntegrationSecrets
}: TDeleteIntegrationDTO) => {
const deleteIntegration = async ({ actorId, id, actor, actorAuthMethod, actorOrgId }: TDeleteIntegrationDTO) => {
const integration = await integrationDAL.findById(id);
if (!integration) throw new BadRequestError({ message: "Integration auth not found" });
@ -200,22 +174,6 @@ export const integrationServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Integrations);
const integrationAuth = await integrationAuthDAL.findById(integration.integrationAuthId);
if (shouldDeleteIntegrationSecrets) {
await deleteIntegrationSecrets({
integration,
integrationAuth,
projectBotService,
integrationAuthService,
secretV2BridgeDAL,
folderDAL,
secretImportDAL,
secretDAL,
kmsService
});
}
const deletedIntegration = await integrationDAL.transaction(async (tx) => {
// delete integration
const deletedIntegrationResult = await integrationDAL.deleteById(id, tx);

View File

@ -63,7 +63,6 @@ export type TUpdateIntegrationDTO = {
export type TDeleteIntegrationDTO = {
id: string;
shouldDeleteIntegrationSecrets?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TSyncIntegrationDTO = {

View File

@ -46,7 +46,6 @@ export const projectBotDALFactory = (db: TDbClient) => {
const doc = await db
.replicaNode()(TableName.ProjectMembership)
.where(`${TableName.ProjectMembership}.projectId` as "projectId", projectId)
.where(`${TableName.ProjectKeys}.projectId` as "projectId", projectId)
.where(`${TableName.Users}.isGhost` as "isGhost", false)
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
.join(TableName.ProjectKeys, `${TableName.ProjectMembership}.userId`, `${TableName.ProjectKeys}.receiverId`)

View File

@ -4,7 +4,6 @@ import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
import { TIdentityUaClientSecretDALFactory } from "../identity-ua/identity-ua-client-secret-dal";
import { TSecretVersionDALFactory } from "../secret/secret-version-dal";
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
import { TSecretSharingDALFactory } from "../secret-sharing/secret-sharing-dal";
@ -13,7 +12,6 @@ import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-d
type TDailyResourceCleanUpQueueServiceFactoryDep = {
auditLogDAL: Pick<TAuditLogDALFactory, "pruneAuditLog">;
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "removeExpiredTokens">;
identityUniversalAuthClientSecretDAL: Pick<TIdentityUaClientSecretDALFactory, "removeExpiredClientSecrets">;
secretVersionDAL: Pick<TSecretVersionDALFactory, "pruneExcessVersions">;
secretVersionV2DAL: Pick<TSecretVersionV2DALFactory, "pruneExcessVersions">;
secretFolderVersionDAL: Pick<TSecretFolderVersionDALFactory, "pruneExcessVersions">;
@ -32,14 +30,12 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
secretFolderVersionDAL,
identityAccessTokenDAL,
secretSharingDAL,
secretVersionV2DAL,
identityUniversalAuthClientSecretDAL
secretVersionV2DAL
}: TDailyResourceCleanUpQueueServiceFactoryDep) => {
queueService.start(QueueName.DailyResourceCleanUp, async () => {
logger.info(`${QueueName.DailyResourceCleanUp}: queue task started`);
await auditLogDAL.pruneAuditLog();
await identityAccessTokenDAL.removeExpiredTokens();
await identityUniversalAuthClientSecretDAL.removeExpiredClientSecrets();
await secretSharingDAL.pruneExpiredSharedSecrets();
await snapshotDAL.pruneExcessSnapshots();
await secretVersionDAL.pruneExcessVersions();

View File

@ -36,8 +36,8 @@ type TSecretImportSecretsV2 = {
secretKey: string;
// akhilmhdh: yes i know you can put ?.
// But for somereason ts consider ? and undefined explicit as different just ts things
secretValue: string;
secretComment: string;
secretValue: string | undefined;
secretComment: string | undefined;
})[];
};
@ -157,7 +157,7 @@ export const fnSecretsV2FromImports = async ({
secretImportDAL: Pick<TSecretImportDALFactory, "findByFolderIds">;
depth?: number;
cyclicDetector?: Set<string>;
decryptor: (value?: Buffer | null) => string;
decryptor: (value?: Buffer | null) => string | undefined;
expandSecretReferences?: (
secrets: Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
) => Promise<Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>>;
@ -231,7 +231,6 @@ export const fnSecretsV2FromImports = async ({
_id: item.id // The old Python SDK depends on the _id field being returned. We return this to keep the older Python SDK versions backwards compatible with the new Postgres backend.
}))
.concat(folderDeeperImportSecrets);
return {
secretPath: importPath,
environment: importEnv.slug,
@ -255,7 +254,7 @@ export const fnSecretsV2FromImports = async ({
};
return acc;
},
{} as Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
{} as Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
);
// eslint-disable-next-line
await expandSecretReferences(secretsGroupByKey);

View File

@ -507,7 +507,7 @@ export const secretImportServiceFactory = ({
folderDAL,
secretDAL: secretV2BridgeDAL,
secretImportDAL,
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : undefined)
});
return importedSecrets;
}

View File

@ -51,7 +51,7 @@ export const secretTagDALFactory = (db: TDbClient) => {
...secretTagOrm,
saveTagsToSecret: secretJnTagOrm.insertMany,
deleteTagsToSecret: secretJnTagOrm.delete,
saveTagsToSecretV2: secretV2JnTagOrm.batchInsert,
saveTagsToSecretV2: secretV2JnTagOrm.insertMany,
deleteTagsToSecretV2: secretV2JnTagOrm.delete,
findSecretTagsByProjectId,
deleteTagsManySecret,

View File

@ -22,7 +22,16 @@ type TSecretTagServiceFactoryDep = {
export type TSecretTagServiceFactory = ReturnType<typeof secretTagServiceFactory>;
export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSecretTagServiceFactoryDep) => {
const createTag = async ({ slug, actor, color, actorId, actorOrgId, actorAuthMethod, projectId }: TCreateTagDTO) => {
const createTag = async ({
name,
slug,
actor,
color,
actorId,
actorOrgId,
actorAuthMethod,
projectId
}: TCreateTagDTO) => {
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
@ -37,6 +46,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
const newTag = await secretTagDAL.create({
projectId,
name,
slug,
color,
createdBy: actorId,
@ -45,7 +55,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
return newTag;
};
const updateTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, color, slug }: TUpdateTagDTO) => {
const updateTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, name, color, slug }: TUpdateTagDTO) => {
const tag = await secretTagDAL.findById(id);
if (!tag) throw new BadRequestError({ message: "Tag doesn't exist" });
@ -63,7 +73,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Tags);
const updatedTag = await secretTagDAL.updateById(tag.id, { color, slug });
const updatedTag = await secretTagDAL.updateById(tag.id, { name, color, slug });
return updatedTag;
};
@ -97,7 +107,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
return { ...tag, name: tag.slug };
return tag;
};
const getTagBySlug = async ({ actorId, actor, actorOrgId, actorAuthMethod, slug, projectId }: TGetTagBySlugDTO) => {
@ -113,7 +123,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
return { ...tag, name: tag.slug };
return tag;
};
const getProjectTags = async ({ actor, actorId, actorOrgId, actorAuthMethod, projectId }: TListProjectTagsDTO) => {

View File

@ -1,12 +1,14 @@
import { TProjectPermission } from "@app/lib/types";
export type TCreateTagDTO = {
name: string;
color: string;
slug: string;
} & TProjectPermission;
export type TUpdateTagDTO = {
id: string;
name?: string;
slug?: string;
color?: string;
} & Omit<TProjectPermission, "projectId">;

View File

@ -136,6 +136,7 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"))
.orderBy("id", "asc");
const data = sqlNestRelationships({
@ -146,11 +147,11 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
id,
color,
slug,
name: slug
name
})
}
]
@ -168,13 +169,14 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
.where({ [`${TableName.SecretV2}Id` as const]: secretId })
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
return tags.map((el) => ({
id: el.tagId,
color: el.tagColor,
slug: el.tagSlug,
name: el.tagSlug
name: el.tagName
}));
} catch (error) {
throw new DatabaseError({ error, name: "get secret tags" });
@ -208,6 +210,7 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"))
.orderBy("id", "asc");
const data = sqlNestRelationships({
@ -218,11 +221,11 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
id,
color,
slug,
name: slug
name
})
}
]
@ -287,7 +290,7 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
}))
);
if (!newSecretReferences.length) return;
const secretReferences = await (tx || db).batchInsert(TableName.SecretReferenceV2, newSecretReferences);
const secretReferences = await (tx || db)(TableName.SecretReferenceV2).insert(newSecretReferences);
return secretReferences;
} catch (error) {
throw new DatabaseError({ error, name: "UpsertSecretReference" });
@ -347,7 +350,8 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.SecretV2))
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
const docs = sqlNestRelationships({
data: rawDocs,
key: "id",
@ -356,11 +360,11 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
id,
color,
slug,
name: slug
name
})
}
]

View File

@ -528,8 +528,8 @@ export const reshapeBridgeSecret = (
environment: string,
secretPath: string,
secret: Omit<TSecretsV2, "encryptedValue" | "encryptedComment"> & {
value: string;
comment: string;
value?: string;
comment?: string;
tags?: {
id: string;
slug: string;
@ -542,8 +542,8 @@ export const reshapeBridgeSecret = (
secretPath,
workspace: workspaceId,
environment,
secretValue: secret.value || "",
secretComment: secret.comment || "",
secretValue: secret.value,
secretComment: secret.comment,
version: secret.version,
type: secret.type,
_id: secret.id,

View File

@ -196,7 +196,7 @@ export const secretV2BridgeServiceFactory = ({
return reshapeBridgeSecret(projectId, environment, secretPath, {
...secret[0],
value: inputSecret.secretValue,
comment: inputSecret.secretComment || ""
comment: inputSecret.secretComment
});
};
@ -339,8 +339,8 @@ export const secretV2BridgeServiceFactory = ({
});
return reshapeBridgeSecret(projectId, environment, secretPath, {
...updatedSecret[0],
value: inputSecret.secretValue || "",
comment: inputSecret.secretComment || ""
value: inputSecret.secretValue,
comment: inputSecret.secretComment
});
};
@ -378,18 +378,6 @@ export const secretV2BridgeServiceFactory = ({
throw new BadRequestError({ message: "Must be user to delete personal secret" });
}
const secretToDelete = await secretDAL.findOne({
key: inputSecret.secretName,
folderId,
...(inputSecret.type === SecretType.Shared
? {}
: {
type: SecretType.Personal,
userId: actorId
})
});
if (!secretToDelete) throw new NotFoundError({ message: "Secret not found" });
const deletedSecret = await secretDAL.transaction(async (tx) =>
fnSecretBulkDelete({
projectId,
@ -424,10 +412,10 @@ export const secretV2BridgeServiceFactory = ({
...deletedSecret[0],
value: deletedSecret[0].encryptedValue
? secretManagerDecryptor({ cipherTextBlob: deletedSecret[0].encryptedValue }).toString()
: "",
: undefined,
comment: deletedSecret[0].encryptedComment
? secretManagerDecryptor({ cipherTextBlob: deletedSecret[0].encryptedComment }).toString()
: ""
: undefined
});
};
@ -441,7 +429,6 @@ export const secretV2BridgeServiceFactory = ({
actorAuthMethod,
includeImports,
recursive,
tagSlugs = [],
expandSecretReferences: shouldExpandSecretReferences
}: TGetSecretsDTO) => {
const { permission } = await permissionService.getProjectPermission(
@ -509,9 +496,6 @@ export const secretV2BridgeServiceFactory = ({
: ""
})
);
const filteredSecrets = tagSlugs.length
? decryptedSecrets.filter((secret) => Boolean(secret.tags?.find((el) => tagSlugs.includes(el.slug))))
: decryptedSecrets;
const expandSecretReferences = expandSecretReferencesFactory({
projectId,
folderDAL,
@ -520,7 +504,7 @@ export const secretV2BridgeServiceFactory = ({
});
if (shouldExpandSecretReferences) {
const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath);
const secretsGroupByPath = groupBy(decryptedSecrets, (i) => i.secretPath);
for (const secretPathKey in secretsGroupByPath) {
if (Object.hasOwn(secretsGroupByPath, secretPathKey)) {
const secretsGroupByKey = secretsGroupByPath[secretPathKey].reduce(
@ -538,7 +522,7 @@ export const secretV2BridgeServiceFactory = ({
await expandSecretReferences(secretsGroupByKey);
secretsGroupByPath[secretPathKey].forEach((decryptedSecret) => {
// eslint-disable-next-line no-param-reassign
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value || "";
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value;
});
}
}
@ -546,7 +530,7 @@ export const secretV2BridgeServiceFactory = ({
if (!includeImports) {
return {
secrets: filteredSecrets
secrets: decryptedSecrets
};
}
@ -570,11 +554,11 @@ export const secretV2BridgeServiceFactory = ({
folderDAL,
secretImportDAL,
expandSecretReferences,
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : undefined)
});
return {
secrets: filteredSecrets,
secrets: decryptedSecrets,
imports: importedSecrets
};
};
@ -670,7 +654,7 @@ export const secretV2BridgeServiceFactory = ({
secretDAL,
folderDAL,
secretImportDAL,
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""),
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : undefined),
expandSecretReferences: shouldExpandSecretReferences ? expandSecretReferences : undefined
});
@ -678,11 +662,12 @@ export const secretV2BridgeServiceFactory = ({
for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) {
const importedSecret = importedSecrets[i].secrets[j];
if (secretName === importedSecret.key) {
return reshapeBridgeSecret(projectId, importedSecrets[i].environment, importedSecrets[i].secretPath, {
...importedSecret,
value: importedSecret.secretValue || "",
comment: importedSecret.secretComment || ""
});
return reshapeBridgeSecret(
projectId,
importedSecrets[i].environment,
importedSecrets[i].secretPath,
importedSecret
);
}
}
}
@ -691,7 +676,7 @@ export const secretV2BridgeServiceFactory = ({
let secretValue = secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString()
: "";
: undefined;
if (shouldExpandSecretReferences && secretValue) {
const secretReferenceExpandedRecord = {
[secret.key]: { value: secretValue }
@ -706,7 +691,7 @@ export const secretV2BridgeServiceFactory = ({
value: secretValue,
comment: secret.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedComment }).toString()
: ""
: undefined
});
};
@ -796,8 +781,10 @@ export const secretV2BridgeServiceFactory = ({
return newSecrets.map((el) =>
reshapeBridgeSecret(projectId, environment, secretPath, {
...el,
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : ""
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : undefined,
comment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: undefined
})
);
};
@ -915,8 +902,10 @@ export const secretV2BridgeServiceFactory = ({
return secrets.map((el) =>
reshapeBridgeSecret(projectId, environment, secretPath, {
...el,
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : ""
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : undefined,
comment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: undefined
})
);
};
@ -992,8 +981,10 @@ export const secretV2BridgeServiceFactory = ({
return secretsDeleted.map((el) =>
reshapeBridgeSecret(projectId, environment, secretPath, {
...el,
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : ""
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : undefined,
comment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: undefined
})
);
};
@ -1029,8 +1020,10 @@ export const secretV2BridgeServiceFactory = ({
return secretVersions.map((el) =>
reshapeBridgeSecret(folder.projectId, folder.environment.envSlug, "/", {
...el,
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : ""
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : undefined,
comment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: undefined
})
);
};

View File

@ -20,7 +20,6 @@ export type TGetSecretsDTO = {
environment: string;
includeImports?: boolean;
recursive?: boolean;
tagSlugs?: string[];
} & TProjectPermission;
export type TGetASecretDTO = {

View File

@ -123,6 +123,7 @@ export const secretDALFactory = (db: TDbClient) => {
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"))
.orderBy("id", "asc");
const data = sqlNestRelationships({
data: secs,
@ -132,11 +133,11 @@ export const secretDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
id,
color,
slug,
name: slug
name
})
}
]
@ -154,13 +155,14 @@ export const secretDALFactory = (db: TDbClient) => {
.where({ [`${TableName.Secret}Id` as const]: secretId })
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
return tags.map((el) => ({
id: el.tagId,
color: el.tagColor,
slug: el.tagSlug,
name: el.tagSlug
name: el.tagName
}));
} catch (error) {
throw new DatabaseError({ error, name: "get secret tags" });
@ -186,6 +188,7 @@ export const secretDALFactory = (db: TDbClient) => {
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"))
.orderBy("id", "asc");
const data = sqlNestRelationships({
data: secs,
@ -195,11 +198,11 @@ export const secretDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
id,
color,
slug,
name: slug
name
})
}
]
@ -315,7 +318,8 @@ export const secretDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.Secret))
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
const docs = sqlNestRelationships({
data: rawDocs,
key: "id",
@ -324,11 +328,11 @@ export const secretDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
id,
color,
slug,
name: slug
name
})
}
]

View File

@ -370,6 +370,7 @@ export const decryptSecretRaw = (
id: string;
slug: string;
color?: string | null;
name: string;
}[];
},
key: string
@ -411,7 +412,7 @@ export const decryptSecretRaw = (
_id: secret.id,
id: secret.id,
user: secret.userId,
tags: secret.tags?.map((el) => ({ ...el, name: el.slug })),
tags: secret.tags,
skipMultilineEncoding: secret.skipMultilineEncoding,
secretReminderRepeatDays: secret.secretReminderRepeatDays,
secretReminderNote: secret.secretReminderNote,

View File

@ -73,12 +73,12 @@ type TSecretQueueFactoryDep = {
secretVersionTagDAL: TSecretVersionTagDALFactory;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
secretV2BridgeDAL: TSecretV2BridgeDALFactory;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "batchInsert" | "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "batchInsert">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
secretRotationDAL: Pick<TSecretRotationDALFactory, "secretOutputV2InsertMany" | "find">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "deleteByProjectId">;
snapshotDAL: Pick<TSnapshotDALFactory, "findNSecretV1SnapshotByFolderId" | "deleteSnapshotsAboveLimit">;
snapshotSecretV2BridgeDAL: Pick<TSnapshotSecretV2DALFactory, "insertMany" | "batchInsert">;
snapshotSecretV2BridgeDAL: Pick<TSnapshotSecretV2DALFactory, "insertMany">;
};
export type TGetSecrets = {
@ -728,10 +728,7 @@ export const secretQueueFactory = ({
isSynced: response?.isSynced ?? true
});
} catch (err) {
logger.error(
err,
`Secret integration sync error [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}]`
);
logger.info("Secret integration sync error: %o", err);
const message =
(err instanceof AxiosError ? JSON.stringify(err?.response?.data) : (err as Error)?.message) ||
@ -831,7 +828,7 @@ export const secretQueueFactory = ({
secretId: string;
references: { environment: string; secretPath: string; secretKey: string }[];
}[] = [];
await secretV2BridgeDAL.batchInsert(
await secretV2BridgeDAL.insertMany(
projectV1Secrets.map((el) => {
const key = decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretKeyCiphertext,
@ -1007,14 +1004,14 @@ export const secretQueueFactory = ({
const projectV3SecretVersions = Object.values(projectV3SecretVersionsGroupById);
if (projectV3SecretVersions.length) {
await secretVersionV2BridgeDAL.batchInsert(projectV3SecretVersions, tx);
await secretVersionV2BridgeDAL.insertMany(projectV3SecretVersions, tx);
}
if (projectV3SecretVersionTags.length) {
await secretVersionTagV2BridgeDAL.batchInsert(projectV3SecretVersionTags, tx);
await secretVersionTagV2BridgeDAL.insertMany(projectV3SecretVersionTags, tx);
}
if (projectV3SnapshotSecrets.length) {
await snapshotSecretV2BridgeDAL.batchInsert(projectV3SnapshotSecrets, tx);
await snapshotSecretV2BridgeDAL.insertMany(projectV3SnapshotSecrets, tx);
}
await snapshotDAL.deleteSnapshotsAboveLimit(folderId, SNAPSHOT_BATCH_SIZE, tx);
}

View File

@ -964,8 +964,7 @@ export const secretServiceFactory = ({
environment,
includeImports,
expandSecretReferences,
recursive,
tagSlugs = []
recursive
}: TGetSecretsRawDTO) => {
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
if (shouldUseSecretV2Bridge) {
@ -979,8 +978,7 @@ export const secretServiceFactory = ({
path,
recursive,
actorAuthMethod,
includeImports,
tagSlugs
includeImports
});
return { secrets, imports };
}
@ -1000,9 +998,6 @@ export const secretServiceFactory = ({
});
const decryptedSecrets = secrets.map((el) => decryptSecretRaw(el, botKey));
const filteredSecrets = tagSlugs.length
? decryptedSecrets.filter((secret) => Boolean(secret.tags?.find((el) => tagSlugs.includes(el.slug))))
: decryptedSecrets;
const processedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => {
const decryptedImportSecrets = importedSecrets.map((sec) =>
decryptSecretRaw(
@ -1111,14 +1106,14 @@ export const secretServiceFactory = ({
};
// expand secrets
await batchSecretsExpand(filteredSecrets);
await batchSecretsExpand(decryptedSecrets);
// expand imports by batch
await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets)));
}
return {
secrets: filteredSecrets,
secrets: decryptedSecrets,
imports: processedImports
};
};
@ -1154,7 +1149,6 @@ export const secretServiceFactory = ({
type,
secretName
});
return secret;
}
@ -2087,7 +2081,7 @@ export const secretServiceFactory = ({
return {
...updatedSecret[0],
tags: [...existingSecretTags, ...tags].map((t) => ({ id: t.id, slug: t.slug, name: t.slug, color: t.color }))
tags: [...existingSecretTags, ...tags].map((t) => ({ id: t.id, slug: t.slug, name: t.name, color: t.color }))
};
};

View File

@ -149,7 +149,6 @@ export type TGetSecretsRawDTO = {
environment: string;
includeImports?: boolean;
recursive?: boolean;
tagSlugs?: string[];
} & TProjectPermission;
export type TGetASecretRawDTO = {

View File

@ -404,10 +404,6 @@ func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Reques
SetQueryParam("environment", request.Environment).
SetQueryParam("secretPath", request.SecretPath)
if request.TagSlugs != "" {
req.SetQueryParam("tagSlugs", request.TagSlugs)
}
if request.IncludeImport {
req.SetQueryParam("include_imports", "true")
}

View File

@ -574,7 +574,6 @@ type GetRawSecretsV3Request struct {
SecretPath string `json:"secretPath"`
IncludeImport bool `json:"include_imports"`
Recursive bool `json:"recursive"`
TagSlugs string `json:"tagSlugs,omitempty"`
}
type GetRawSecretsV3Response struct {

View File

@ -312,7 +312,7 @@ func ParseAgentConfig(configFile []byte) (*Config, error) {
func secretTemplateFunction(accessToken string, existingEtag string, currentEtag *string) func(string, string, string) ([]models.SingleEnvironmentVariable, error) {
return func(projectID, envSlug, secretPath string) ([]models.SingleEnvironmentVariable, error) {
res, err := util.GetPlainTextSecretsV3(accessToken, projectID, envSlug, secretPath, false, false, "")
res, err := util.GetPlainTextSecretsV3(accessToken, projectID, envSlug, secretPath, false, false)
if err != nil {
return nil, err
}

View File

@ -14,7 +14,6 @@ import (
"github.com/Infisical/infisical-merge/packages/util"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"gopkg.in/yaml.v2"
)
const (
@ -189,7 +188,7 @@ func formatEnvs(envs []models.SingleEnvironmentVariable, format string) (string,
case FormatCSV:
return formatAsCSV(envs), nil
case FormatYaml:
return formatAsYaml(envs)
return formatAsYaml(envs), nil
default:
return "", fmt.Errorf("invalid format type: %s. Available format types are [%s]", format, []string{FormatDotenv, FormatJson, FormatCSV, FormatYaml, FormatDotEnvExport})
}
@ -225,18 +224,12 @@ func formatAsDotEnvExport(envs []models.SingleEnvironmentVariable) string {
return dotenv
}
func formatAsYaml(envs []models.SingleEnvironmentVariable) (string, error) {
m := make(map[string]string)
func formatAsYaml(envs []models.SingleEnvironmentVariable) string {
var dotenv string
for _, env := range envs {
m[env.Key] = env.Value
dotenv += fmt.Sprintf("%s: %s\n", env.Key, env.Value)
}
yamlBytes, err := yaml.Marshal(m)
if err != nil {
return "", fmt.Errorf("failed to format environment variables as YAML: %w", err)
}
return string(yamlBytes), nil
return dotenv
}
// Format environment variables as a JSON file

View File

@ -1,79 +0,0 @@
package cmd
import (
"testing"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v2"
)
func TestFormatAsYaml(t *testing.T) {
tests := []struct {
name string
input []models.SingleEnvironmentVariable
expected string
}{
{
name: "Empty input",
input: []models.SingleEnvironmentVariable{},
expected: "{}\n",
},
{
name: "Single environment variable",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
},
expected: "KEY1: VALUE1\n",
},
{
name: "Multiple environment variables",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
{Key: "KEY2", Value: "VALUE2"},
{Key: "KEY3", Value: "VALUE3"},
},
expected: "KEY1: VALUE1\nKEY2: VALUE2\nKEY3: VALUE3\n",
},
{
name: "Overwriting duplicate keys",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
{Key: "KEY1", Value: "VALUE2"},
},
expected: "KEY1: VALUE2\n",
},
{
name: "Special characters in values",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "Value with spaces"},
{Key: "KEY2", Value: "Value:with:colons"},
{Key: "KEY3", Value: "Value\nwith\nnewlines"},
},
expected: "KEY1: Value with spaces\nKEY2: Value:with:colons\nKEY3: |-\n Value\n with\n newlines\n",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := formatAsYaml(tt.input)
assert.NoError(t, err)
// Compare the result with the expected output
assert.Equal(t, tt.expected, result)
// Additionally, parse the result back into a map to ensure it's valid YAML
var resultMap map[string]string
err = yaml.Unmarshal([]byte(result), &resultMap)
assert.NoError(t, err)
// Create an expected map from the input
expectedMap := make(map[string]string)
for _, env := range tt.input {
expectedMap[env.Key] = env.Value
}
assert.Equal(t, expectedMap, resultMap)
})
}
}

View File

@ -155,24 +155,22 @@ var secretsSetCmd = &cobra.Command{
DisableFlagsInUseLine: true,
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
if (token == nil) {
util.RequireLocalWorkspaceFile()
}
util.RequireLocalWorkspaceFile()
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
projectId, err := cmd.Flags().GetString("projectId")
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -376,11 +374,6 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
@ -401,12 +394,6 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "To fetch all secrets")
}
if secretOverriding {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
} else {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
}
if shouldExpand {
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
@ -426,13 +413,11 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
if value, ok := secretsMap[secretKeyFromArg]; ok {
requestedSecrets = append(requestedSecrets, value)
} else {
if !(plainOutput || showOnlyValue) {
requestedSecrets = append(requestedSecrets, models.SingleEnvironmentVariable{
Key: secretKeyFromArg,
Type: "*not found*",
Value: "*not found*",
})
}
requestedSecrets = append(requestedSecrets, models.SingleEnvironmentVariable{
Key: secretKeyFromArg,
Type: "*not found*",
Value: "*not found*",
})
}
}
@ -703,7 +688,6 @@ func init() {
secretsGetCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")
secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets, and process your referenced secrets")
secretsGetCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders")
secretsGetCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsGetCmd)
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsSetCmd)

View File

@ -31,54 +31,39 @@ var AvailableVaults = []VaultBackendType{
}
var vaultSetCmd = &cobra.Command{
Example: `infisical vault set file`,
Use: "set [file|auto]",
Example: `infisical vault set file --passphrase <your-passphrase>`,
Use: "set [file|auto] [flags]",
Short: "Used to configure the vault backends",
DisableFlagsInUseLine: true,
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
wantedVaultTypeName := args[0]
currentVaultBackend, err := util.GetCurrentVaultBackend()
vaultType := args[0]
passphrase, err := cmd.Flags().GetString("passphrase")
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because of [err=%s]", wantedVaultTypeName, err)
util.HandleError(err, "Unable to get passphrase flag")
}
if vaultType == util.VAULT_BACKEND_FILE_MODE && passphrase != "" {
setFileVaultPassphrase(passphrase)
return
}
if wantedVaultTypeName == string(currentVaultBackend) {
log.Error().Msgf("You are already on vault backend [%s]", currentVaultBackend)
return
}
if wantedVaultTypeName == util.VAULT_BACKEND_AUTO_MODE || wantedVaultTypeName == util.VAULT_BACKEND_FILE_MODE {
configFile, err := util.GetConfigFile()
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because of [err=%s]", wantedVaultTypeName, err)
return
}
configFile.VaultBackendType = wantedVaultTypeName
configFile.LoggedInUserEmail = ""
configFile.VaultBackendPassphrase = base64.StdEncoding.EncodeToString([]byte(util.GenerateRandomString(10)))
err = util.WriteConfigFile(&configFile)
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because an error occurred when saving the config file [err=%s]", wantedVaultTypeName, err)
return
}
fmt.Printf("\nSuccessfully, switched vault backend from [%s] to [%s]. Please login in again to store your login details in the new vault with [infisical login]\n", currentVaultBackend, wantedVaultTypeName)
Telemetry.CaptureEvent("cli-command:vault set", posthog.NewProperties().Set("currentVault", currentVaultBackend).Set("wantedVault", wantedVaultTypeName).Set("version", util.CLI_VERSION))
} else {
var availableVaultsNames []string
for _, vault := range AvailableVaults {
availableVaultsNames = append(availableVaultsNames, vault.Name)
}
log.Error().Msgf("The requested vault type [%s] is not available on this system. Only the following vault backends are available for you system: %s", wantedVaultTypeName, strings.Join(availableVaultsNames, ", "))
}
util.PrintWarning("This command has been deprecated. Please use 'infisical vault use [file|auto]' to select which vault to use.\n")
selectVaultTypeCmd(cmd, args)
},
}
var vaultUseCmd = &cobra.Command{
Example: `infisical vault use [file|auto]`,
Use: "use [file|auto]",
Short: "Used to select the the type of vault backend to store sensitive data securely at rest",
DisableFlagsInUseLine: true,
Args: cobra.MinimumNArgs(1),
Run: selectVaultTypeCmd,
}
// runCmd represents the run command
var vaultCmd = &cobra.Command{
Use: "vault",
@ -90,6 +75,26 @@ var vaultCmd = &cobra.Command{
},
}
func setFileVaultPassphrase(passphrase string) {
configFile, err := util.GetConfigFile()
if err != nil {
log.Error().Msgf("Unable to set passphrase for file vault because of [err=%s]", err)
return
}
// encode with base64
encodedPassphrase := base64.StdEncoding.EncodeToString([]byte(passphrase))
configFile.VaultBackendPassphrase = encodedPassphrase
err = util.WriteConfigFile(&configFile)
if err != nil {
log.Error().Msgf("Unable to set passphrase for file vault because of [err=%s]", err)
return
}
util.PrintSuccessMessage("\nSuccessfully, set passphrase for file vault.\n")
}
func printAvailableVaultBackends() {
fmt.Printf("Vaults are used to securely store your login details locally. Available vaults:")
for _, vaultType := range AvailableVaults {
@ -106,8 +111,53 @@ func printAvailableVaultBackends() {
fmt.Printf("\n\nYou are currently using [%s] vault to store your login credentials\n", string(currentVaultBackend))
}
func selectVaultTypeCmd(cmd *cobra.Command, args []string) {
wantedVaultTypeName := args[0]
currentVaultBackend, err := util.GetCurrentVaultBackend()
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because of [err=%s]", wantedVaultTypeName, err)
return
}
if wantedVaultTypeName == string(currentVaultBackend) {
log.Error().Msgf("You are already on vault backend [%s]", currentVaultBackend)
return
}
if wantedVaultTypeName == util.VAULT_BACKEND_AUTO_MODE || wantedVaultTypeName == util.VAULT_BACKEND_FILE_MODE {
configFile, err := util.GetConfigFile()
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because of [err=%s]", wantedVaultTypeName, err)
return
}
configFile.VaultBackendType = wantedVaultTypeName // save selected vault
configFile.LoggedInUserEmail = "" // reset the logged in user to prompt them to re login
err = util.WriteConfigFile(&configFile)
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because an error occurred when saving the config file [err=%s]", wantedVaultTypeName, err)
return
}
fmt.Printf("\nSuccessfully, switched vault backend from [%s] to [%s]. Please login in again to store your login details in the new vault with [infisical login]\n", currentVaultBackend, wantedVaultTypeName)
Telemetry.CaptureEvent("cli-command:vault set", posthog.NewProperties().Set("currentVault", currentVaultBackend).Set("wantedVault", wantedVaultTypeName).Set("version", util.CLI_VERSION))
} else {
var availableVaultsNames []string
for _, vault := range AvailableVaults {
availableVaultsNames = append(availableVaultsNames, vault.Name)
}
log.Error().Msgf("The requested vault type [%s] is not available on this system. Only the following vault backends are available for you system: %s", wantedVaultTypeName, strings.Join(availableVaultsNames, ", "))
}
}
func init() {
vaultSetCmd.Flags().StringP("passphrase", "p", "", "Set the passphrase for the file vault")
vaultCmd.AddCommand(vaultSetCmd)
vaultCmd.AddCommand(vaultUseCmd)
rootCmd.AddCommand(vaultCmd)
}

View File

@ -38,8 +38,7 @@ const (
SERVICE_TOKEN_IDENTIFIER = "service-token"
UNIVERSAL_AUTH_TOKEN_IDENTIFIER = "universal-auth-token"
INFISICAL_BACKUP_SECRET = "infisical-backup-secrets" // akhilmhdh: @depreciated remove in version v0.30
INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY = "infisical-backup-secret-encryption-key"
INFISICAL_BACKUP_SECRET = "infisical-backup-secrets"
)
var (

View File

@ -71,7 +71,7 @@ func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) {
if strings.Contains(err.Error(), "credentials not found in system keyring") {
return LoggedInUserDetails{}, errors.New("we couldn't find your logged in details, try running [infisical login] then try again")
} else {
return LoggedInUserDetails{}, fmt.Errorf("failed to fetch credentials from keyring because [err=%s]", err)
return LoggedInUserDetails{}, fmt.Errorf("failed to fetch creditnals from keyring because [err=%s]", err)
}
}

View File

@ -5,7 +5,6 @@ import (
"crypto/sha256"
"encoding/base64"
"fmt"
"math/rand"
"os"
"os/exec"
"path"
@ -26,8 +25,6 @@ type DecodedSymmetricEncryptionDetails = struct {
Key []byte
}
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
func GetBase64DecodedSymmetricEncryptionDetails(key string, cipher string, IV string, tag string) (DecodedSymmetricEncryptionDetails, error) {
cipherx, err := base64.StdEncoding.DecodeString(cipher)
if err != nil {
@ -290,11 +287,3 @@ func GetCmdFlagOrEnv(cmd *cobra.Command, flag, envName string) (string, error) {
}
return value, nil
}
func GenerateRandomString(length int) string {
b := make([]byte, length)
for i := range b {
b[i] = charset[rand.Intn(len(charset))]
}
return string(b)
}

View File

@ -2,9 +2,8 @@ package util
import (
"encoding/base64"
"fmt"
"github.com/rs/zerolog/log"
"github.com/manifoldco/promptui"
"github.com/zalando/go-keyring"
)
@ -27,13 +26,20 @@ func SetValueInKeyring(key, value string) error {
err = keyring.Set(currentVaultBackend, MAIN_KEYRING_SERVICE, key, value)
if err != nil {
log.Debug().Msg(fmt.Sprintf("Error while setting default keyring: %v", err))
configFile, _ := GetConfigFile()
if configFile.VaultBackendPassphrase == "" {
encodedPassphrase := base64.StdEncoding.EncodeToString([]byte(GenerateRandomString(10))) // generate random passphrase
PrintWarning("System keyring could not be used, falling back to `file` vault for sensitive data storage.")
passphrasePrompt := promptui.Prompt{
Label: "Enter the passphrase to use for keyring encryption",
}
passphrase, err := passphrasePrompt.Run()
if err != nil {
return err
}
encodedPassphrase := base64.StdEncoding.EncodeToString([]byte(passphrase))
configFile.VaultBackendPassphrase = encodedPassphrase
configFile.VaultBackendType = VAULT_BACKEND_FILE_MODE
err = WriteConfigFile(&configFile)
if err != nil {
return err
@ -44,7 +50,6 @@ func SetValueInKeyring(key, value string) error {
}
err = keyring.Set(VAULT_BACKEND_FILE_MODE, MAIN_KEYRING_SERVICE, key, value)
log.Debug().Msg(fmt.Sprintf("Error while setting file keyring: %v", err))
}
return err
@ -55,7 +60,13 @@ func GetValueInKeyring(key string) (string, error) {
if err != nil {
PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical reset] then try again")
}
return keyring.Get(currentVaultBackend, MAIN_KEYRING_SERVICE, key)
value, err := keyring.Get(currentVaultBackend, MAIN_KEYRING_SERVICE, key)
if err != nil {
value, err = keyring.Get(VAULT_BACKEND_FILE_MODE, MAIN_KEYRING_SERVICE, key)
}
return value, err
}
@ -65,5 +76,11 @@ func DeleteValueInKeyring(key string) error {
return err
}
return keyring.Delete(currentVaultBackend, MAIN_KEYRING_SERVICE, key)
err = keyring.Delete(currentVaultBackend, MAIN_KEYRING_SERVICE, key)
if err != nil {
err = keyring.Delete(VAULT_BACKEND_FILE_MODE, MAIN_KEYRING_SERVICE, key)
}
return err
}

View File

@ -1,15 +1,14 @@
package util
import (
"crypto/rand"
"encoding/base64"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"os"
"path"
"regexp"
"slices"
"strings"
"unicode"
@ -21,7 +20,7 @@ import (
"github.com/zalando/go-keyring"
)
func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string, includeImports bool, recursive bool, tagSlugs string) ([]models.SingleEnvironmentVariable, error) {
func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string, includeImports bool, recursive bool) ([]models.SingleEnvironmentVariable, error) {
serviceTokenParts := strings.SplitN(fullServiceToken, ".", 4)
if len(serviceTokenParts) < 4 {
return nil, fmt.Errorf("invalid service token entered. Please double check your service token and try again")
@ -54,7 +53,6 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str
SecretPath: secretPath,
IncludeImport: includeImports,
Recursive: recursive,
TagSlugs: tagSlugs,
})
if err != nil {
@ -78,7 +76,7 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str
}
func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool, recursive bool, tagSlugs string) (models.PlaintextSecretResult, error) {
func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool, recursive bool) (models.PlaintextSecretResult, error) {
httpClient := resty.New()
httpClient.SetAuthToken(accessToken).
SetHeader("Accept", "application/json")
@ -88,7 +86,7 @@ func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentNa
Environment: environmentName,
IncludeImport: includeImports,
Recursive: recursive,
TagSlugs: tagSlugs,
// TagSlugs: tagSlugs,
}
if secretsPath != "" {
@ -283,36 +281,29 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
}
res, err := GetPlainTextSecretsV3(loggedInUserDetails.UserCredentials.JTWToken, infisicalDotJson.WorkspaceId,
params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs)
params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
log.Debug().Msgf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", err)
if err == nil {
backupEncryptionKey, err := GetBackupEncryptionKey()
if err != nil {
return nil, err
}
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey, res.Secrets)
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, res.Secrets)
}
secretsToReturn = res.Secrets
errorToReturn = err
// only attempt to serve cached secrets if no internet connection and if at least one secret cached
if !isConnected {
backupEncryptionKey, _ := GetBackupEncryptionKey()
if backupEncryptionKey != nil {
backedUpSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey)
if len(backedUpSecrets) > 0 {
PrintWarning("Unable to fetch the latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug")
secretsToReturn = backedUpSecrets
errorToReturn = err
}
backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath)
if len(backedSecrets) > 0 {
PrintWarning("Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug")
secretsToReturn = backedSecrets
errorToReturn = err
}
}
} else {
if params.InfisicalToken != "" {
log.Debug().Msg("Trying to fetch secrets using service token")
secretsToReturn, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs)
secretsToReturn, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
} else if params.UniversalAuthAccessToken != "" {
if params.WorkspaceId == "" {
@ -320,7 +311,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
}
log.Debug().Msg("Trying to fetch secrets using universal auth")
res, err := GetPlainTextSecretsV3(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs)
res, err := GetPlainTextSecretsV3(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
errorToReturn = err
secretsToReturn = res.Secrets
@ -485,99 +476,71 @@ func OverrideSecrets(secrets []models.SingleEnvironmentVariable, secretType stri
return secretsToReturn
}
func GetBackupEncryptionKey() ([]byte, error) {
encryptionKey, err := GetValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY)
func WriteBackupSecrets(workspace string, environment string, secretsPath string, secrets []models.SingleEnvironmentVariable) error {
var backedUpSecrets []models.BackupSecretKeyRing
secretValueInKeyRing, err := GetValueInKeyring(INFISICAL_BACKUP_SECRET)
if err != nil {
if err == keyring.ErrUnsupportedPlatform {
return nil, errors.New("your OS does not support keyring. Consider using a service token https://infisical.com/docs/documentation/platform/token")
} else if err == keyring.ErrNotFound {
// generate a new key
randomizedKey := make([]byte, 16)
rand.Read(randomizedKey)
encryptionKey = hex.EncodeToString(randomizedKey)
if err := SetValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY, encryptionKey); err != nil {
return nil, err
}
return []byte(encryptionKey), nil
} else {
return nil, fmt.Errorf("something went wrong, failed to retrieve value from system keyring [error=%v]", err)
return errors.New("your OS does not support keyring. Consider using a service token https://infisical.com/docs/documentation/platform/token")
} else if err != keyring.ErrNotFound {
return fmt.Errorf("something went wrong, failed to retrieve value from system keyring [error=%v]", err)
}
}
return []byte(encryptionKey), nil
}
_ = json.Unmarshal([]byte(secretValueInKeyRing), &backedUpSecrets)
func WriteBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error {
formattedPath := strings.ReplaceAll(secretsPath, "/", "-")
fileName := fmt.Sprintf("project_secrets_%s_%s_%s.json", workspace, environment, formattedPath)
secrets_backup_folder_name := "secrets-backup"
backedUpSecrets = slices.DeleteFunc(backedUpSecrets, func(e models.BackupSecretKeyRing) bool {
return e.SecretPath == secretsPath && e.ProjectID == workspace && e.Environment == environment
})
newBackupSecret := models.BackupSecretKeyRing{
ProjectID: workspace,
Environment: environment,
SecretPath: secretsPath,
Secrets: secrets,
}
backedUpSecrets = append(backedUpSecrets, newBackupSecret)
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
listOfSecretsMarshalled, err := json.Marshal(backedUpSecrets)
if err != nil {
return fmt.Errorf("WriteBackupSecrets: unable to get full config folder path [err=%s]", err)
return err
}
// create secrets backup directory
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
if _, err := os.Stat(fullPathToSecretsBackupFolder); errors.Is(err, os.ErrNotExist) {
err := os.Mkdir(fullPathToSecretsBackupFolder, os.ModePerm)
if err != nil {
return err
}
}
marshaledSecrets, _ := json.Marshal(secrets)
result, err := crypto.EncryptSymmetric(marshaledSecrets, encryptionKey)
err = SetValueInKeyring(INFISICAL_BACKUP_SECRET, string(listOfSecretsMarshalled))
if err != nil {
return fmt.Errorf("WriteBackupSecrets: Unable to encrypt local secret backup to file [err=%s]", err)
}
listOfSecretsMarshalled, _ := json.Marshal(result)
err = os.WriteFile(fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName), listOfSecretsMarshalled, 0600)
if err != nil {
return fmt.Errorf("WriteBackupSecrets: Unable to write backup secrets to file [err=%s]", err)
return fmt.Errorf("StoreUserCredsInKeyRing: unable to store user credentials because [err=%s]", err)
}
return nil
}
func ReadBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) {
formattedPath := strings.ReplaceAll(secretsPath, "/", "-")
fileName := fmt.Sprintf("project_secrets_%s_%s_%s.json", workspace, environment, formattedPath)
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
func ReadBackupSecrets(workspace string, environment string, secretsPath string) ([]models.SingleEnvironmentVariable, error) {
secretValueInKeyRing, err := GetValueInKeyring(INFISICAL_BACKUP_SECRET)
if err != nil {
return nil, fmt.Errorf("ReadBackupSecrets: unable to write config file because an error occurred when getting config file path [err=%s]", err)
if err == keyring.ErrUnsupportedPlatform {
return nil, errors.New("your OS does not support keyring. Consider using a service token https://infisical.com/docs/documentation/platform/token")
} else if err == keyring.ErrNotFound {
return nil, errors.New("credentials not found in system keyring")
} else {
return nil, fmt.Errorf("something went wrong, failed to retrieve value from system keyring [error=%v]", err)
}
}
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
if _, err := os.Stat(fullPathToSecretsBackupFolder); errors.Is(err, os.ErrNotExist) {
return nil, nil
}
encryptedBackupSecretsFilePath := fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName)
encryptedBackupSecretsAsBytes, err := os.ReadFile(encryptedBackupSecretsFilePath)
var backedUpSecrets []models.BackupSecretKeyRing
err = json.Unmarshal([]byte(secretValueInKeyRing), &backedUpSecrets)
if err != nil {
return nil, err
return nil, fmt.Errorf("getUserCredsFromKeyRing: Something went wrong when unmarshalling user creds [err=%s]", err)
}
var encryptedBackUpSecrets models.SymmetricEncryptionResult
err = json.Unmarshal(encryptedBackupSecretsAsBytes, &encryptedBackUpSecrets)
if err != nil {
return nil, fmt.Errorf("ReadBackupSecrets: unable to parse encrypted backup secrets. The secrets backup may be malformed [err=%s]", err)
for _, backupSecret := range backedUpSecrets {
if backupSecret.Environment == environment && backupSecret.ProjectID == workspace && backupSecret.SecretPath == secretsPath {
return backupSecret.Secrets, nil
}
}
result, err := crypto.DecryptSymmetric(encryptionKey, encryptedBackUpSecrets.CipherText, encryptedBackUpSecrets.AuthTag, encryptedBackUpSecrets.Nonce)
if err != nil {
return nil, fmt.Errorf("ReadBackupSecrets: unable to decrypt encrypted backup secrets [err=%s]", err)
}
var plainTextSecrets []models.SingleEnvironmentVariable
_ = json.Unmarshal(result, &plainTextSecrets)
return plainTextSecrets, nil
return nil, nil
}
func DeleteBackupSecrets() error {
// keeping this logic for now. Need to remove it later as more users migrate keyring would be used and this folder will be removed completely by then
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
@ -586,8 +549,8 @@ func DeleteBackupSecrets() error {
}
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
DeleteValueInKeyring(INFISICAL_BACKUP_SECRET)
DeleteValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY)
return os.RemoveAll(fullPathToSecretsBackupFolder)
}

View File

@ -1,4 +1,4 @@
Warning: Unable to fetch the latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug
Warning: Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug
┌───────────────┬──────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├───────────────┼──────────────┼─────────────┤

View File

@ -7,6 +7,7 @@ import (
"github.com/bradleyjkemp/cupaloy/v2"
)
func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
@ -93,7 +94,7 @@ func TestUserAuth_SecretsGetAll(t *testing.T) {
}
// explicitly called here because it should happen directly after successful secretsGetAll
// testUserAuth_SecretsGetAllWithoutConnection(t)
testUserAuth_SecretsGetAllWithoutConnection(t)
}
func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) {
@ -106,7 +107,7 @@ func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) {
// set it to a URL that will always be unreachable
newConfigFile.LoggedInUserDomain = "http://localhost:4999"
util.WriteConfigFile(&newConfigFile)
// restore config file
defer util.WriteConfigFile(&originalConfigFile)
@ -120,4 +121,4 @@ func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) {
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
}

View File

@ -1,16 +1,14 @@
---
title: "Spending Money"
title: "Spenging Money"
sidebarTitle: "Spending Money"
description: "The guide to spending money at Infisical."
---
Fairly frequently, you might run into situations when you need to spend company money.
<Note>
Please spend money in a way that you think is in the best interest of the company.
</Note>
**Please spend money in a way that you think is in the best interest of the company.**
# Trivial expenses
## Trivial expenses
We don't want you to be slowed down because you're waiting for an approval to purchase some SaaS. For trivial expenses  **Just do it**.
@ -24,35 +22,6 @@ Make sure you keep copies for all receipts. If you expense something on a compan
You should default to using your company card in all cases - it has no transaction fees. If using your personal card is unavoidable, please reach out to Maidul to get it reimbursed manually.
# Equipment
Infisical is a remote first company so we understand the importance of having a comfortable work setup. To support this, we provide allowances for essential office equipment.
### Desk & Chair
Most people already have a comfortable desk and chair, but if you need an upgrade, we offer the following allowances.
While we're not yet able to provide the latest and greatest, we strive to be reasonable given the stage of our company.
**Desk**: $150 USD
**Chair**: $150 USD
### Laptop
Each team member will receive a company-issued Macbook Pro before they start their first day.
### Notes
1. All equipment purchased using company allowances remains the property of Infisical.
2. Keep all receipts for equipment purchases and submit them for reimbursement.
3. If you leave Infisical, you may be required to return company-owned equipment.
Please note that we're unable to offer a split payment option where the Infisical pays half and you pay half for equipment exceeding the allowance.
This is because we don't yet have a formal HR department to handle such logistics.
For any equipment related questions, please reach out to Maidul.
## Brex
We use Brex as our primary credit card provider. Don't have a company card yet? Reach out to Maidul.

View File

@ -0,0 +1,4 @@
---
title: "List CA certificates"
openapi: "GET /api/v1/pki/ca/{caId}/ca-certificates"
---

View File

@ -0,0 +1,4 @@
---
title: "Renew"
openapi: "POST /api/v1/pki/ca/{caId}/renew"
---

View File

@ -1,4 +0,0 @@
---
title: "Create Lease"
openapi: "POST /api/v1/dynamic-secrets/leases"
---

View File

@ -1,4 +0,0 @@
---
title: "Create"
openapi: "POST /api/v1/dynamic-secrets"
---

View File

@ -1,4 +0,0 @@
---
title: "Delete Lease"
openapi: "DELETE /api/v1/dynamic-secrets/leases/{leaseId}"
---

View File

@ -1,4 +0,0 @@
---
title: "Delete"
openapi: "DELETE /api/v1/dynamic-secrets/{name}"
---

View File

@ -1,4 +0,0 @@
---
title: "Get Lease"
openapi: "GET /api/v1/dynamic-secrets/leases/{leaseId}"
---

View File

@ -1,4 +0,0 @@
---
title: "Get"
openapi: "GET /api/v1/dynamic-secrets/{name}"
---

View File

@ -1,4 +0,0 @@
---
title: "List Leases"
openapi: "GET /api/v1/dynamic-secrets/{name}/leases"
---

View File

@ -1,4 +0,0 @@
---
title: "List"
openapi: "GET /api/v1/dynamic-secrets"
---

View File

@ -1,4 +0,0 @@
---
title: "Renew Lease"
openapi: "POST /api/v1/dynamic-secrets/leases/{leaseId}/renew"
---

View File

@ -1,4 +0,0 @@
---
title: "Update"
openapi: "PATCH /api/v1/dynamic-secrets/{name}"
---

View File

@ -30,5 +30,8 @@ description: "Change the vault type in Infisical"
## Description
To safeguard your login details when using the CLI, Infisical attempts to store them in a system keyring. If a system keyring cannot be found on your machine, the data is stored in a config file.
To safeguard your login details when using the CLI, Infisical places them in a system vault or an encrypted text file, protected by a passphrase that only the user knows.
<Tip>To avoid constantly entering your passphrase when using the `file` vault type, use the `infisical vault set file --passphrase <your-passphrase>` CLI command to specify your password once.</Tip>

View File

@ -36,7 +36,7 @@ A typical workflow for setting up a Private CA hierarchy consists of the followi
intermediate certificate back to the intermediate CA as part of Step 2.
</Note>
## Guide
## Guide to Creating a CA Hierarchy
In the following steps, we explore how to create a simple Private CA hierarchy
consisting of a root CA and an intermediate CA.
@ -240,6 +240,51 @@ consisting of a root CA and an intermediate CA.
</Tab>
</Tabs>
## Guide to CA Renewal
In the following steps, we explore how to renew a CA certificate via same key pair.
<Tabs>
<Tab title="Infisical UI">
Head to the CA Page of the CA you wish you renew and press **Renew CA** on
the left side. ![pki ca renewal
page](/images/platform/pki/ca-renewal-page.png) Input a new **Valid Until**
date to be used for the renewed CA certificate and press **Renew** to renew
the CA. ![pki ca renewal. modal](/images/platform/pki/ca-renewal-modal.png)
<Note>
The new **Valid Until** date must be within the validity period of the
parent CA.
</Note>
</Tab>
<Tab title="API">
To renew a CA certificate, make an API request to the [Renew CA](/api-reference/endpoints/certificate-authorities/renew) API endpoint, specifying the new `notAfter` date for the CA.
### Sample request
```bash Request
curl --location --request POST 'https://app.infisical.com/api/v1/pki/ca/<ca-id>/renew' \
--header 'Authorization: Bearer <access-token>' \
--header 'Content-Type: application/json' \
--data-raw '{
"type": "existing",
"notAfter": "2029-06-12"
}'
```
### Sample response
```bash Response
{
certificate: "...",
certificateChain: "...",
serialNumber: "..."
}
```
</Tab>
</Tabs>
## FAQ
<AccordionGroup>
@ -247,4 +292,8 @@ consisting of a root CA and an intermediate CA.
Infisical supports `RSA 2048`, `RSA 4096`, `ECDSA P-256`, `ECDSA P-384` key
algorithms specified at the time of creating a CA.
</Accordion>
<Accordion title="Does Infisical support CA renewal via new key pair">
At the moment, Infisical only supports CA renewal via same key pair. We
anticipate supporting CA renewal via new key pair in the coming month.
</Accordion>
</AccordionGroup>

View File

@ -4,10 +4,10 @@ description: "Learn how to configure Google SAML for Infisical SSO."
---
<Info>
Google SAML SSO feature is a paid feature. If you're using Infisical Cloud,
then it is available under the **Pro Tier**. If you're self-hosting Infisical,
then you should contact sales@infisical.com to purchase an enterprise license
to use it.
Google SAML SSO feature is a paid feature.
If you're using Infisical Cloud, then it is available under the **Pro Tier**. If you're self-hosting Infisical,
then you should contact sales@infisical.com to purchase an enterprise license to use it.
</Info>
<Steps>
@ -15,9 +15,8 @@ description: "Learn how to configure Google SAML for Infisical SSO."
In Infisical, head to your Organization Settings > Authentication > SAML SSO Configuration and select **Set up SAML SSO**.
Next, note the **ACS URL** and **SP Entity ID** to use when configuring the Google SAML application.
![Google SAML initial configuration](../../../images/sso/google-saml/init-config.png)
</Step>
<Step title="Create a SAML application in Google">
2.1. In your [Google Admin console](https://support.google.com/a/answer/182076), head to Menu > Apps > Web and mobile apps and
@ -33,7 +32,7 @@ description: "Learn how to configure Google SAML for Infisical SSO."
![Google SAML custom app details](../../../images/sso/google-saml/custom-saml-app-config.png)
2.4. Back in Infisical, set **SSO URL** and **Certificate** to the corresponding items from step 2.3.
2.4. Back in Infisical, set **SSO URL**, **IdP Entity ID**, and **Certificate** to the corresponding items from step 2.3.
![Google SAML Infisical config](../../../images/sso/google-saml/infisical-config.png)
@ -42,7 +41,7 @@ description: "Learn how to configure Google SAML for Infisical SSO."
Also, check the **Signed response** checkbox.
![Google SAML app config 2](../../../images/sso/google-saml/custom-saml-app-config-2.png)
2.6. In the **Attribute mapping** tab, configure the following map:
- **First name** -> **firstName**
@ -50,7 +49,7 @@ description: "Learn how to configure Google SAML for Infisical SSO."
- **Primary email** -> **email**
![Google SAML attribute mapping](../../../images/sso/google-saml/attribute-mapping.png)
Click **Finish**.
</Step>
<Step title="Assign users in Google Workspace to the application">
@ -58,11 +57,11 @@ description: "Learn how to configure Google SAML for Infisical SSO."
and press on **User access**.
![Google SAML user access](../../../images/sso/google-saml/user-access.png)
To assign everyone in your organization to the application, click **On for everyone** or **Off for everyone** and then click **Save**.
You can also assign an organizational unit or set of users to an application; you can learn more about that [here](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app).
![Google SAML user access assignment](../../../images/sso/google-saml/user-access-assign.png)
</Step>
<Step title="Enable SAML SSO in Infisical">
@ -76,24 +75,21 @@ description: "Learn how to configure Google SAML for Infisical SSO."
To enforce SAML SSO, you're required to test out the SAML connection by successfully authenticating at least one Google user with Infisical;
Once you've completed this requirement, you can toggle the **Enforce SAML SSO** button to enforce SAML SSO.
<Warning>
We recommend ensuring that your account is provisioned the application in Google
prior to enforcing SAML SSO to prevent any unintended issues.
</Warning>
</Step>
</Steps>
<Note>
If you're configuring SAML SSO on a self-hosted instance of Infisical, make
sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to
work: - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This
can be a random 32-byte base64 string generated with `openssl rand -base64
32`. - `SITE_URL`: The URL of your self-hosted instance of Infisical - should
be an absolute URL including the protocol (e.g. https://app.infisical.com)
If you're configuring SAML SSO on a self-hosted instance of Infisical, make sure to
set the `AUTH_SECRET` and `SITE_URL` environment variable for it to work:
- `AUTH_SECRET`: A secret key used for signing and verifying JWT. This can be a random 32-byte base64 string generated with `openssl rand -base64 32`.
- `SITE_URL`: The URL of your self-hosted instance of Infisical - should be an absolute URL including the protocol (e.g. https://app.infisical.com)
</Note>
References:
- Google's guide to [set up your own custom SAML app](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app).
- Google's guide to [set up your own custom SAML app](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app).

Binary file not shown.

After

Width:  |  Height:  |  Size: 408 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 584 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 219 KiB

After

Width:  |  Height:  |  Size: 605 KiB

View File

@ -1,5 +1,5 @@
---
title: "Kubernetes Operator"
title: "Kubernetes"
description: "How to use Infisical to inject secrets into Kubernetes clusters."
---
@ -9,10 +9,6 @@ The Infisical Secrets Operator is a Kubernetes controller that retrieves secrets
It uses an `InfisicalSecret` resource to specify authentication and storage methods.
The operator continuously updates secrets and can also reload dependent deployments automatically.
<Note>
If you are already using the External Secrets operator, you can view the integration documentation for it [here](https://external-secrets.io/latest/provider/infisical/).
</Note>
## Install Operator
The operator can be install via [Helm](https://helm.sh) or [kubectl](https://github.com/kubernetes/kubectl)

Some files were not shown because too many files have changed in this diff Show More