mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-31 10:38:12 +00:00
Compare commits
90 Commits
daniel/mig
...
infisical/
Author | SHA1 | Date | |
---|---|---|---|
|
21403f6fe5 | ||
|
2f9e542b31 | ||
|
089d6812fd | ||
|
71c9c0fa1e | ||
|
2b977eeb33 | ||
|
a692148597 | ||
|
64bfa4f334 | ||
|
e3eb14bfd9 | ||
|
24b50651c9 | ||
|
1cd459fda7 | ||
|
38917327d9 | ||
|
d7b494c6f8 | ||
|
93208afb36 | ||
|
1a084d8fcf | ||
|
dd4f133c6c | ||
|
c41d27e1ae | ||
|
1866ed8d23 | ||
|
7b3b232dde | ||
|
9d618b4ae9 | ||
|
5330ab2171 | ||
|
662e588c22 | ||
|
90057d80ff | ||
|
1eda7aaaac | ||
|
00dcadbc08 | ||
|
7a7289ebd0 | ||
|
e5d4677fd6 | ||
|
bce3f3d676 | ||
|
300372fa98 | ||
|
47a4f8bae9 | ||
|
863719f296 | ||
|
7317dc1cf5 | ||
|
75df898e78 | ||
|
0de6add3f7 | ||
|
0c008b6393 | ||
|
0c3894496c | ||
|
35fbd5d49d | ||
|
d03b453e3d | ||
|
96e331b678 | ||
|
d4d468660d | ||
|
75a4965928 | ||
|
660c09ded4 | ||
|
b5287d91c0 | ||
|
6a17763237 | ||
|
f2bd3daea2 | ||
|
7f70f96936 | ||
|
73e0a54518 | ||
|
0d295a2824 | ||
|
9a62efea4f | ||
|
506c30bcdb | ||
|
735ad4ff65 | ||
|
41e36dfcef | ||
|
421d8578b7 | ||
|
6685f8aa0a | ||
|
d6c37c1065 | ||
|
54f3f94185 | ||
|
907537f7c0 | ||
|
61263b9384 | ||
|
d71c85e052 | ||
|
b6d8be2105 | ||
|
0693f81d0a | ||
|
61d516ef35 | ||
|
31fc64fb4c | ||
|
8bf7e4c4d1 | ||
|
2027d4b44e | ||
|
d401c9074e | ||
|
afe35dbbb5 | ||
|
6ff1602fd5 | ||
|
6603364749 | ||
|
7c84adc1c2 | ||
|
fa8d6735a1 | ||
|
a6137f267d | ||
|
3be3d807d2 | ||
|
9f7ea3c4e5 | ||
|
e67218f170 | ||
|
269c40c67c | ||
|
089a7e880b | ||
|
64ec741f1a | ||
|
c98233ddaf | ||
|
d4cfd0b6ed | ||
|
ba1fd8a3f7 | ||
|
e8f09d2c7b | ||
|
ada63b9e7d | ||
|
3f6a0c77f1 | ||
|
9e4b66e215 | ||
|
8a14914bc3 | ||
|
fc3a409164 | ||
|
ffc58b0313 | ||
|
9a7e05369c | ||
|
33b49f4466 | ||
|
60895537a7 |
@@ -74,6 +74,14 @@ CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=
|
||||
OTEL_EXPORT_TYPE=
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
|
@@ -1,6 +1,12 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
# Check if infisical is installed
|
||||
if ! command -v infisical >/dev/null 2>&1; then
|
||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
4
Makefile
4
Makefile
@@ -10,6 +10,9 @@ up-dev:
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
@@ -27,4 +30,3 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
@@ -5,6 +5,9 @@ export const mockSmtpServer = (): TSmtpService => {
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
},
|
||||
verify: async () => {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
1616
backend/package-lock.json
generated
1616
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -132,6 +132,7 @@
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
@@ -140,6 +141,14 @@
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
|
@@ -8,61 +8,80 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const sanitizeInputParam = (value: string) => {
|
||||
// Escape double quotes and wrap the entire value in double quotes
|
||||
if (value) {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return '""';
|
||||
};
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||
const exportPort = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||
);
|
||||
const exportUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||
const exportDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||
const importUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||
const importDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
requestId: string;
|
||||
}
|
||||
}
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@@ -1,6 +1,6 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
@@ -8,7 +8,7 @@ declare global {
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<Logger>,
|
||||
Readonly<CustomLogger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
|
@@ -2,7 +2,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 30_000;
|
||||
const BATCH_SIZE = 10_000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
@@ -12,7 +12,18 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.string("authMethod").nullable();
|
||||
});
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
// first we remove identities without auth method that is unused
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.whereNull(`${TableName.Identity}.authMethod`)
|
||||
.delete();
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
let totalUpdated = 0;
|
||||
|
||||
do {
|
||||
@@ -33,24 +44,15 @@ export async function up(knex: Knex): Promise<void> {
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
|
||||
totalUpdated += batchIds.length;
|
||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||
} while (nullableAccessTokens.length > 0);
|
||||
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.whereNotExists((queryBuilder) => {
|
||||
void queryBuilder
|
||||
.select("id")
|
||||
.from(TableName.Identity)
|
||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||
.whereNotNull("authMethod");
|
||||
})
|
||||
.delete();
|
||||
|
||||
// Finally we set the authMethod to notNullable after populating the column.
|
||||
// This will fail if the data is not populated correctly, so it's safe.
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,20 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).alter();
|
||||
});
|
||||
}
|
||||
}
|
@@ -13,6 +13,7 @@ import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
@@ -27,5 +28,6 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||
[DynamicSecretProviders.Totp]: TotpProvider()
|
||||
});
|
||||
|
@@ -17,6 +17,17 @@ export enum LdapCredentialType {
|
||||
Static = "static"
|
||||
}
|
||||
|
||||
export enum TotpConfigType {
|
||||
URL = "url",
|
||||
MANUAL = "manual"
|
||||
}
|
||||
|
||||
export enum TotpAlgorithm {
|
||||
SHA1 = "sha1",
|
||||
SHA256 = "sha256",
|
||||
SHA512 = "sha512"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
@@ -221,6 +232,34 @@ export const LdapSchema = z.union([
|
||||
})
|
||||
]);
|
||||
|
||||
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.URL),
|
||||
url: z
|
||||
.string()
|
||||
.url()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((val) => {
|
||||
const urlObj = new URL(val);
|
||||
const secret = urlObj.searchParams.get("secret");
|
||||
|
||||
return Boolean(secret);
|
||||
}, "OTP URL must contain secret field")
|
||||
}),
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.MANUAL),
|
||||
secret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.transform((val) => val.replace(/\s+/g, "")),
|
||||
period: z.number().optional(),
|
||||
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
|
||||
digits: z.number().optional()
|
||||
})
|
||||
]);
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
@@ -234,7 +273,8 @@ export enum DynamicSecretProviders {
|
||||
AzureEntraID = "azure-entra-id",
|
||||
Ldap = "ldap",
|
||||
SapHana = "sap-hana",
|
||||
Snowflake = "snowflake"
|
||||
Snowflake = "snowflake",
|
||||
Totp = "totp"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
@@ -250,7 +290,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
92
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
92
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { authenticator } from "otplib";
|
||||
import { HashAlgorithms } from "otplib/core";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||
|
||||
export const TotpProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const validateConnection = async () => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
}
|
||||
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const renew = async (_inputs: unknown, _entityId: string) => {
|
||||
throw new BadRequestError({
|
||||
message: "Lease renewal is not supported for TOTPs"
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@@ -27,7 +27,7 @@ export const initializeHsmModule = () => {
|
||||
|
||||
logger.info("PKCS#11 module initialized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to initialize PKCS#11 module:", err);
|
||||
logger.error(err, "Failed to initialize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
@@ -39,7 +39,7 @@ export const initializeHsmModule = () => {
|
||||
isInitialized = false;
|
||||
logger.info("PKCS#11 module finalized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to finalize PKCS#11 module:", err);
|
||||
logger.error(err, "Failed to finalize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
@@ -36,8 +36,7 @@ export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean>
|
||||
});
|
||||
|
||||
ldapClient.on("error", (err) => {
|
||||
logger.error("LDAP client error:", err);
|
||||
logger.error(err);
|
||||
logger.error(err, "LDAP client error");
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
|
@@ -161,8 +161,8 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`,
|
||||
error
|
||||
error,
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`
|
||||
);
|
||||
await keyStore.setItemWithExpiry(
|
||||
FEATURE_CACHE_KEY(orgId),
|
||||
|
@@ -17,7 +17,7 @@ import {
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
@@ -56,7 +56,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
@@ -223,6 +223,7 @@ export const oidcConfigServiceFactory = ({
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@@ -230,6 +231,23 @@ export const oidcConfigServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
@@ -332,14 +350,20 @@ export const oidcConfigServiceFactory = ({
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
await smtpService
|
||||
.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
throw new OidcAuthError({
|
||||
message: `Error sending email confirmation code for user registration - contact the Infisical instance admin. ${err.message}`
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
@@ -395,6 +419,18 @@ export const oidcConfigServiceFactory = ({
|
||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (isActive && !serverCfg.trustOidcEmails) {
|
||||
const isSmtpConnected = await smtpService.verify();
|
||||
if (!isSmtpConnected) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Cannot enable OIDC when there are issues with the instance's SMTP configuration. Bypass this by turning on trust for OIDC emails in the server admin console."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
|
@@ -127,14 +127,15 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const subQueryUserGroups = db(TableName.UserGroupMembership).where("userId", userId).select("groupId");
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.where(`${TableName.Users}.id`, userId)
|
||||
.leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.GroupProjectMembership, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId]))
|
||||
.andOn(`${TableName.GroupProjectMembership}.groupId`, `${TableName.UserGroupMembership}.groupId`);
|
||||
// @ts-expect-error akhilmhdh: this is valid knexjs query. Its just ts type argument is missing it
|
||||
.andOnIn(`${TableName.GroupProjectMembership}.groupId`, subQueryUserGroups);
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.GroupProjectMembershipRole,
|
||||
|
@@ -1,14 +1,7 @@
|
||||
import picomatch from "picomatch";
|
||||
import { z } from "zod";
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
import { PermissionConditionOperators } from "@app/lib/casl";
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
|
@@ -1,10 +1,10 @@
|
||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
|
||||
import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types";
|
||||
import { PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
export enum ProjectPermissionActions {
|
||||
Read = "read",
|
||||
|
@@ -46,7 +46,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
}
|
||||
return rateLimit;
|
||||
} catch (err) {
|
||||
logger.error("Error fetching rate limits %o", err);
|
||||
logger.error(err, "Error fetching rate limits");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@@ -69,12 +69,12 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
mfaRateLimit: rateLimit.mfaRateLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
logger.info(newRateLimitMaxConfiguration, "syncRateLimitConfiguration: rate limit configuration");
|
||||
Object.freeze(newRateLimitMaxConfiguration);
|
||||
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error syncing rate limit configurations: %o`, error);
|
||||
logger.error(error, "Error syncing rate limit configurations");
|
||||
}
|
||||
};
|
||||
|
||||
|
@@ -238,11 +238,11 @@ export const secretScanningQueueFactory = ({
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretPushEventScan, "failed", (job, err) => {
|
||||
logger.error("Failed to secret scan on push", job?.data, err);
|
||||
logger.error(err, "Failed to secret scan on push", job?.data);
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretFullRepoScan, "failed", (job, err) => {
|
||||
logger.error("Failed to do full repo secret scan", job?.data, err);
|
||||
logger.error(err, "Failed to do full repo secret scan", job?.data);
|
||||
});
|
||||
|
||||
return { startFullRepoScan, startPushEventScan };
|
||||
|
@@ -54,3 +54,12 @@ export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2:
|
||||
|
||||
return set1.size >= set2.size;
|
||||
};
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { CustomLogger } from "../logger/logger";
|
||||
import { zpStr } from "../zod";
|
||||
|
||||
export const GITLAB_URL = "https://gitlab.com";
|
||||
@@ -157,6 +157,15 @@ const envSchema = z
|
||||
INFISICAL_CLOUD: zodStrBool.default("false"),
|
||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// TELEMETRY
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"),
|
||||
OTEL_EXPORT_OTLP_ENDPOINT: zpStr(z.string().optional()),
|
||||
OTEL_OTLP_PUSH_INTERVAL: z.coerce.number().default(30000),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME: zpStr(z.string().optional()),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
|
||||
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
|
||||
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||
@@ -203,11 +212,11 @@ let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
export const initEnvConfig = (logger: Logger) => {
|
||||
export const initEnvConfig = (logger?: CustomLogger) => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
logger.error("Invalid environment variables. Check the error below");
|
||||
logger.error(parsedEnv.error.issues);
|
||||
(logger ?? console).error("Invalid environment variables. Check the error below");
|
||||
(logger ?? console).error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
|
@@ -133,3 +133,15 @@ export class ScimRequestError extends Error {
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
export class OidcAuthError extends Error {
|
||||
name: string;
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||
super(message || "Something went wrong");
|
||||
this.name = name || "OidcAuthError";
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,8 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// logger follows a singleton pattern
|
||||
// easier to use it that's all.
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
import pino, { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
@@ -13,14 +15,37 @@ const logLevelToSeverityLookup: Record<string, string> = {
|
||||
"60": "CRITICAL"
|
||||
};
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<Logger>;
|
||||
// akhilmhdh:
|
||||
// The logger is not placed in the main app config to avoid a circular dependency.
|
||||
// The config requires the logger to display errors when an invalid environment is supplied.
|
||||
// On the other hand, the logger needs the config to obtain credentials for AWS or other transports.
|
||||
// By keeping the logger separate, it becomes an independent package.
|
||||
|
||||
// We define our own custom logger interface to enforce structure to the logging methods.
|
||||
|
||||
export interface CustomLogger extends Omit<Logger, "info" | "error" | "warn" | "debug"> {
|
||||
info: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
|
||||
error: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
warn: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
debug: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<CustomLogger>;
|
||||
|
||||
const loggerConfig = z.object({
|
||||
AWS_CLOUDWATCH_LOG_GROUP_NAME: z.string().default("infisical-log-stream"),
|
||||
AWS_CLOUDWATCH_LOG_REGION: z.string().default("us-east-1"),
|
||||
@@ -62,6 +87,17 @@ const redactedKeys = [
|
||||
"config"
|
||||
];
|
||||
|
||||
const UNKNOWN_REQUEST_ID = "UNKNOWN_REQUEST_ID";
|
||||
|
||||
const extractRequestId = () => {
|
||||
try {
|
||||
return requestContext.get("requestId") || UNKNOWN_REQUEST_ID;
|
||||
} catch (err) {
|
||||
console.log("failed to get request context", err);
|
||||
return UNKNOWN_REQUEST_ID;
|
||||
}
|
||||
};
|
||||
|
||||
export const initLogger = async () => {
|
||||
const cfg = loggerConfig.parse(process.env);
|
||||
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
||||
@@ -94,6 +130,30 @@ export const initLogger = async () => {
|
||||
targets
|
||||
});
|
||||
|
||||
const wrapLogger = (originalLogger: Logger): CustomLogger => {
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.info = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ requestId: extractRequestId() }).info(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.error = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ requestId: extractRequestId() }).error(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.warn = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ requestId: extractRequestId() }).warn(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.debug = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ requestId: extractRequestId() }).debug(obj, msg, ...args);
|
||||
};
|
||||
|
||||
return originalLogger;
|
||||
};
|
||||
|
||||
logger = pino(
|
||||
{
|
||||
mixin(_context, level) {
|
||||
@@ -113,5 +173,6 @@ export const initLogger = async () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
transport
|
||||
);
|
||||
return logger;
|
||||
|
||||
return wrapLogger(logger);
|
||||
};
|
||||
|
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||
import { Resource } from "@opentelemetry/resources";
|
||||
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initEnvConfig } from "../config/env";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const initTelemetryInstrumentation = ({
|
||||
exportType,
|
||||
otlpURL,
|
||||
otlpUser,
|
||||
otlpPassword,
|
||||
otlpPushInterval
|
||||
}: {
|
||||
exportType?: string;
|
||||
otlpURL?: string;
|
||||
otlpUser?: string;
|
||||
otlpPassword?: string;
|
||||
otlpPushInterval?: number;
|
||||
}) => {
|
||||
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
|
||||
|
||||
const resource = Resource.default().merge(
|
||||
new Resource({
|
||||
[ATTR_SERVICE_NAME]: "infisical-core",
|
||||
[ATTR_SERVICE_VERSION]: "0.1.0"
|
||||
})
|
||||
);
|
||||
|
||||
const metricReaders = [];
|
||||
switch (exportType) {
|
||||
case "prometheus": {
|
||||
const promExporter = new PrometheusExporter();
|
||||
metricReaders.push(promExporter);
|
||||
break;
|
||||
}
|
||||
case "otlp": {
|
||||
const otlpExporter = new OTLPMetricExporter({
|
||||
url: `${otlpURL}/v1/metrics`,
|
||||
headers: {
|
||||
Authorization: `Basic ${btoa(`${otlpUser}:${otlpPassword}`)}`
|
||||
},
|
||||
temporalityPreference: AggregationTemporality.DELTA
|
||||
});
|
||||
metricReaders.push(
|
||||
new PeriodicExportingMetricReader({
|
||||
exporter: otlpExporter,
|
||||
exportIntervalMillis: otlpPushInterval
|
||||
})
|
||||
);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid OTEL export type");
|
||||
}
|
||||
|
||||
const meterProvider = new MeterProvider({
|
||||
resource,
|
||||
readers: metricReaders
|
||||
});
|
||||
|
||||
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||
|
||||
registerInstrumentations({
|
||||
instrumentations: [getNodeAutoInstrumentations()]
|
||||
});
|
||||
};
|
||||
|
||||
const setupTelemetry = () => {
|
||||
const appCfg = initEnvConfig();
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
console.log("Initializing telemetry instrumentation");
|
||||
initTelemetryInstrumentation({
|
||||
otlpURL: appCfg.OTEL_EXPORT_OTLP_ENDPOINT,
|
||||
otlpUser: appCfg.OTEL_COLLECTOR_BASIC_AUTH_USERNAME,
|
||||
otlpPassword: appCfg.OTEL_COLLECTOR_BASIC_AUTH_PASSWORD,
|
||||
otlpPushInterval: appCfg.OTEL_OTLP_PUSH_INTERVAL,
|
||||
exportType: appCfg.OTEL_EXPORT_TYPE
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
void setupTelemetry();
|
@@ -1,3 +1,5 @@
|
||||
import "./lib/telemetry/instrumentation";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
|
||||
@@ -18,6 +20,7 @@ dotenv.config();
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
|
@@ -10,18 +10,21 @@ import fastifyFormBody from "@fastify/formbody";
|
||||
import helmet from "@fastify/helmet";
|
||||
import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
|
||||
import ratelimiter from "@fastify/rate-limit";
|
||||
import { fastifyRequestContext } from "@fastify/request-context";
|
||||
import fastify from "fastify";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { globalRateLimiterCfg } from "./config/rateLimiter";
|
||||
import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas";
|
||||
import { apiMetrics } from "./plugins/api-metrics";
|
||||
import { fastifyErrHandler } from "./plugins/error-handler";
|
||||
import { registerExternalNextjs } from "./plugins/external-nextjs";
|
||||
import { serializerCompiler, validatorCompiler, ZodTypeProvider } from "./plugins/fastify-zod";
|
||||
@@ -34,7 +37,7 @@ type TMain = {
|
||||
auditLogDb?: Knex;
|
||||
db: Knex;
|
||||
smtp: TSmtpService;
|
||||
logger?: Logger;
|
||||
logger?: CustomLogger;
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
hsmModule: HsmModule;
|
||||
@@ -46,7 +49,9 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
|
||||
const server = fastify({
|
||||
logger: appCfg.NODE_ENV === "test" ? false : logger,
|
||||
genReqId: () => `req-${alphaNumericNanoId(14)}`,
|
||||
trustProxy: true,
|
||||
|
||||
connectionTimeout: appCfg.isHsmConfigured ? 90_000 : 30_000,
|
||||
ignoreTrailingSlash: true,
|
||||
pluginTimeout: 40_000
|
||||
@@ -86,6 +91,10 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
// pull ip based on various proxy headers
|
||||
await server.register(fastifyIp);
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
await server.register(apiMetrics);
|
||||
}
|
||||
|
||||
await server.register(fastifySwagger);
|
||||
await server.register(fastifyFormBody);
|
||||
await server.register(fastifyErrHandler);
|
||||
@@ -99,6 +108,13 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
|
||||
await server.register(maintenanceMode);
|
||||
|
||||
await server.register(fastifyRequestContext, {
|
||||
defaultStoreValues: (request) => ({
|
||||
requestId: request.id,
|
||||
log: request.log.child({ requestId: request.id })
|
||||
})
|
||||
});
|
||||
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
||||
|
||||
if (appCfg.isProductionMode) {
|
||||
|
@@ -46,10 +46,10 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
|
||||
await createTransport(smtpCfg)
|
||||
.verify()
|
||||
.then(async () => {
|
||||
console.info("SMTP successfully connected");
|
||||
console.info(`SMTP - Verified connection to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
.catch((err: Error) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT} - ${err.message}`);
|
||||
logger.error(err);
|
||||
});
|
||||
|
||||
|
21
backend/src/server/plugins/api-metrics.ts
Normal file
21
backend/src/server/plugins/api-metrics.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import opentelemetry from "@opentelemetry/api";
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
export const apiMetrics = fp(async (fastify) => {
|
||||
const apiMeter = opentelemetry.metrics.getMeter("API");
|
||||
const latencyHistogram = apiMeter.createHistogram("API_latency", {
|
||||
unit: "ms"
|
||||
});
|
||||
|
||||
fastify.addHook("onResponse", async (request, reply) => {
|
||||
const { method } = request;
|
||||
const route = request.routerPath;
|
||||
const { statusCode } = reply;
|
||||
|
||||
latencyHistogram.record(reply.elapsedTime, {
|
||||
route,
|
||||
method,
|
||||
statusCode
|
||||
});
|
||||
});
|
||||
});
|
@@ -1,4 +1,4 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { ForbiddenError, PureAbility } from "@casl/ability";
|
||||
import fastifyPlugin from "fastify-plugin";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { ZodError } from "zod";
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
GatewayTimeoutError,
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
OidcAuthError,
|
||||
RateLimitError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
@@ -38,74 +39,102 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
if (error instanceof BadRequestError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.BadRequest)
|
||||
.send({ statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
|
||||
.send({ requestId: req.id, statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
|
||||
} else if (error instanceof NotFoundError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.NotFound)
|
||||
.send({ statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
|
||||
.send({ requestId: req.id, statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
|
||||
} else if (error instanceof UnauthorizedError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.Unauthorized)
|
||||
.send({ statusCode: HttpStatusCodes.Unauthorized, message: error.message, error: error.name });
|
||||
void res.status(HttpStatusCodes.Unauthorized).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Unauthorized,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.InternalServerError)
|
||||
.send({ statusCode: HttpStatusCodes.InternalServerError, message: "Something went wrong", error: error.name });
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
message: "Something went wrong",
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof GatewayTimeoutError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.GatewayTimeout)
|
||||
.send({ statusCode: HttpStatusCodes.GatewayTimeout, message: error.message, error: error.name });
|
||||
void res.status(HttpStatusCodes.GatewayTimeout).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.GatewayTimeout,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof ZodError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.Unauthorized)
|
||||
.send({ statusCode: HttpStatusCodes.Unauthorized, error: "ValidationFailure", message: error.issues });
|
||||
void res.status(HttpStatusCodes.Unauthorized).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Unauthorized,
|
||||
error: "ValidationFailure",
|
||||
message: error.issues
|
||||
});
|
||||
} else if (error instanceof ForbiddenError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "PermissionDenied",
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType} - ${JSON.stringify(error.subject)}`
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType}`,
|
||||
details: (error.ability as PureAbility).rulesFor(error.action as string, error.subjectType).map((el) => ({
|
||||
action: el.action,
|
||||
inverted: el.inverted,
|
||||
subject: el.subject,
|
||||
conditions: el.conditions
|
||||
}))
|
||||
});
|
||||
} else if (error instanceof ForbiddenRequestError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof RateLimitError) {
|
||||
void res.status(HttpStatusCodes.TooManyRequests).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.TooManyRequests,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof ScimRequestError) {
|
||||
void res.status(error.status).send({
|
||||
requestId: req.id,
|
||||
schemas: error.schemas,
|
||||
status: error.status,
|
||||
detail: error.detail
|
||||
});
|
||||
// Handle JWT errors and make them more human-readable for the end-user.
|
||||
} else if (error instanceof OidcAuthError) {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof jwt.JsonWebTokenError) {
|
||||
const message = (() => {
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
return "Your token has expired. Please re-authenticate.";
|
||||
}
|
||||
if (error.message === JWTErrors.JwtMalformed) {
|
||||
return "The provided access token is malformed. Please use a valid token or generate a new one and try again.";
|
||||
}
|
||||
if (error.message === JWTErrors.InvalidAlgorithm) {
|
||||
return "The access token is signed with an invalid algorithm. Please provide a valid token and try again.";
|
||||
}
|
||||
let errorMessage = error.message;
|
||||
|
||||
return error.message;
|
||||
})();
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
errorMessage = "Your token has expired. Please re-authenticate.";
|
||||
} else if (error.message === JWTErrors.JwtMalformed) {
|
||||
errorMessage =
|
||||
"The provided access token is malformed. Please use a valid token or generate a new one and try again.";
|
||||
} else if (error.message === JWTErrors.InvalidAlgorithm) {
|
||||
errorMessage =
|
||||
"The access token is signed with an invalid algorithm. Please provide a valid token and try again.";
|
||||
}
|
||||
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "TokenError",
|
||||
message
|
||||
message: errorMessage
|
||||
});
|
||||
} else {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
error: "InternalServerError",
|
||||
message: "Something went wrong"
|
||||
|
@@ -19,7 +19,7 @@ export const registerSecretScannerGhApp = async (server: FastifyZodProvider) =>
|
||||
|
||||
app.on("installation", async (context) => {
|
||||
const { payload } = context;
|
||||
logger.info("Installed secret scanner to:", { repositories: payload.repositories });
|
||||
logger.info({ repositories: payload.repositories }, "Installed secret scanner to");
|
||||
});
|
||||
|
||||
app.on("push", async (context) => {
|
||||
|
@@ -30,26 +30,32 @@ export const integrationAuthPubSchema = IntegrationAuthsSchema.pick({
|
||||
|
||||
export const DefaultResponseErrorsSchema = {
|
||||
400: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(400),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
}),
|
||||
404: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(404),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
}),
|
||||
401: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(401),
|
||||
message: z.any(),
|
||||
error: z.string()
|
||||
}),
|
||||
403: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(403),
|
||||
message: z.string(),
|
||||
details: z.any().optional(),
|
||||
error: z.string()
|
||||
}),
|
||||
500: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(500),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
|
@@ -9,6 +9,7 @@ import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema";
|
||||
import { Integrations } from "@app/services/integration-auth/integration-list";
|
||||
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import {} from "../sanitizedSchemas";
|
||||
@@ -206,6 +207,33 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
id: req.params.integrationId
|
||||
});
|
||||
|
||||
if (integration.region) {
|
||||
integration.metadata = {
|
||||
...(integration.metadata || {}),
|
||||
region: integration.region
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
integration.integration === Integrations.AWS_SECRET_MANAGER ||
|
||||
integration.integration === Integrations.AWS_PARAMETER_STORE
|
||||
) {
|
||||
const awsRoleDetails = await server.services.integration.getIntegrationAWSIamRole({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationId
|
||||
});
|
||||
|
||||
if (awsRoleDetails) {
|
||||
integration.metadata = {
|
||||
...(integration.metadata || {}),
|
||||
awsIamRole: awsRoleDetails.role
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { integration };
|
||||
}
|
||||
});
|
||||
|
@@ -29,7 +29,7 @@ import {
|
||||
} from "./identity-aws-auth-types";
|
||||
|
||||
type TIdentityAwsAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@@ -346,6 +346,8 @@ export const identityAwsAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AWS_AUTH }, tx);
|
||||
|
||||
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAwsAuth;
|
||||
|
@@ -30,7 +30,7 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@@ -70,7 +70,9 @@ export const identityAzureAuthServiceFactory = ({
|
||||
.map((servicePrincipalId) => servicePrincipalId.trim())
|
||||
.some((servicePrincipalId) => servicePrincipalId === azureIdentity.oid);
|
||||
|
||||
if (!isServicePrincipalAllowed) throw new UnauthorizedError({ message: "Service principal not allowed" });
|
||||
if (!isServicePrincipalAllowed) {
|
||||
throw new UnauthorizedError({ message: `Service principal '${azureIdentity.oid}' not allowed` });
|
||||
}
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
@@ -317,6 +319,8 @@ export const identityAzureAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AZURE_AUTH }, tx);
|
||||
|
||||
return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAzureAuth;
|
||||
|
@@ -28,7 +28,7 @@ import {
|
||||
type TIdentityGcpAuthServiceFactoryDep = {
|
||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@@ -365,6 +365,8 @@ export const identityGcpAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||
const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.GCP_AUTH }, tx);
|
||||
|
||||
return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityGcpAuth;
|
||||
|
@@ -41,7 +41,7 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
TIdentityKubernetesAuthDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
@@ -622,6 +622,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.KUBERNETES_AUTH }, tx);
|
||||
return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityKubernetesAuth;
|
||||
|
@@ -39,7 +39,7 @@ import {
|
||||
type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
@@ -539,6 +539,8 @@ export const identityOidcAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.OIDC_AUTH }, tx);
|
||||
|
||||
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
|
||||
|
@@ -182,7 +182,12 @@ export const identityProjectServiceFactory = ({
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
||||
({ role }) =>
|
||||
!Object.values(ProjectMembershipRole)
|
||||
// we don't want to include custom in this check;
|
||||
// this unintentionally enables setting slug to custom which is reserved
|
||||
.filter((r) => r !== ProjectMembershipRole.Custom)
|
||||
.includes(role as ProjectMembershipRole)
|
||||
);
|
||||
const hasCustomRole = Boolean(customInputRoles.length);
|
||||
const customRoles = hasCustomRole
|
||||
|
@@ -385,8 +385,8 @@ export const identityTokenAuthServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TUpdateTokenAuthTokenDTO) => {
|
||||
const foundToken = await identityAccessTokenDAL.findOne({
|
||||
id: tokenId,
|
||||
authMethod: IdentityAuthMethod.TOKEN_AUTH
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH
|
||||
});
|
||||
if (!foundToken) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` });
|
||||
|
||||
@@ -444,8 +444,8 @@ export const identityTokenAuthServiceFactory = ({
|
||||
}: TRevokeTokenAuthTokenDTO) => {
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
isAccessTokenRevoked: false,
|
||||
authMethod: IdentityAuthMethod.TOKEN_AUTH
|
||||
[`${TableName.IdentityAccessToken}.isAccessTokenRevoked` as "isAccessTokenRevoked"]: false,
|
||||
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH
|
||||
});
|
||||
if (!identityAccessToken)
|
||||
throw new NotFoundError({
|
||||
|
@@ -3075,7 +3075,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
}) => {
|
||||
// get secrets from Terraform Cloud
|
||||
const terraformSecrets = (
|
||||
await request.get<{ data: { attributes: { key: string; value: string }; id: string }[] }>(
|
||||
await request.get<{ data: { attributes: { key: string; value: string; sensitive: boolean }; id: string }[] }>(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
|
||||
{
|
||||
headers: {
|
||||
@@ -3089,7 +3089,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
...obj,
|
||||
[secret.attributes.key]: secret
|
||||
}),
|
||||
{} as Record<string, { attributes: { key: string; value: string }; id: string }>
|
||||
{} as Record<string, { attributes: { key: string; value: string; sensitive: boolean }; id: string }>
|
||||
);
|
||||
|
||||
const secretsToAdd: { [key: string]: string } = {};
|
||||
@@ -3170,7 +3170,8 @@ const syncSecretsTerraformCloud = async ({
|
||||
attributes: {
|
||||
key,
|
||||
value: secrets[key]?.value,
|
||||
category: integration.targetService
|
||||
category: integration.targetService,
|
||||
sensitive: true
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -3183,7 +3184,11 @@ const syncSecretsTerraformCloud = async ({
|
||||
}
|
||||
);
|
||||
// case: secret exists in Terraform Cloud
|
||||
} else if (secrets[key]?.value !== terraformSecrets[key].attributes.value) {
|
||||
} else if (
|
||||
// we now set secrets to sensitive in Terraform Cloud, this checks if existing secrets are not sensitive and updates them accordingly
|
||||
!terraformSecrets[key].attributes.sensitive ||
|
||||
secrets[key]?.value !== terraformSecrets[key].attributes.value
|
||||
) {
|
||||
// -> update secret
|
||||
await request.patch(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
|
||||
@@ -3193,7 +3198,8 @@ const syncSecretsTerraformCloud = async ({
|
||||
id: terraformSecrets[key].id,
|
||||
attributes: {
|
||||
...terraformSecrets[key],
|
||||
value: secrets[key]?.value
|
||||
value: secrets[key]?.value,
|
||||
sensitive: true
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -9,6 +9,7 @@ import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth
|
||||
import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service";
|
||||
import { deleteIntegrationSecrets } from "../integration-auth/integration-delete-secret";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { KmsDataKey } from "../kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "../project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "../secret/secret-dal";
|
||||
import { TSecretQueueFactory } from "../secret/secret-queue";
|
||||
@@ -237,6 +238,46 @@ export const integrationServiceFactory = ({
|
||||
return { ...integration, envId: integration.environment.id };
|
||||
};
|
||||
|
||||
const getIntegrationAWSIamRole = async ({ id, actor, actorAuthMethod, actorId, actorOrgId }: TGetIntegrationDTO) => {
|
||||
const integration = await integrationDAL.findById(id);
|
||||
|
||||
if (!integration) {
|
||||
throw new NotFoundError({
|
||||
message: `Integration with ID '${id}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integration?.projectId || "",
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
|
||||
const integrationAuth = await integrationAuthDAL.findById(integration.integrationAuthId);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: integration.projectId
|
||||
});
|
||||
let awsIamRole: string | null = null;
|
||||
if (integrationAuth.encryptedAwsAssumeIamRoleArn) {
|
||||
const awsAssumeRoleArn = secretManagerDecryptor({
|
||||
cipherTextBlob: Buffer.from(integrationAuth.encryptedAwsAssumeIamRoleArn)
|
||||
}).toString();
|
||||
if (awsAssumeRoleArn) {
|
||||
const [, role] = awsAssumeRoleArn.split(":role/");
|
||||
awsIamRole = role;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
role: awsIamRole
|
||||
};
|
||||
};
|
||||
|
||||
const deleteIntegration = async ({
|
||||
actorId,
|
||||
id,
|
||||
@@ -329,6 +370,7 @@ export const integrationServiceFactory = ({
|
||||
deleteIntegration,
|
||||
listIntegrationByProject,
|
||||
getIntegration,
|
||||
getIntegrationAWSIamRole,
|
||||
syncIntegration
|
||||
};
|
||||
};
|
||||
|
@@ -280,7 +280,12 @@ export const projectMembershipServiceFactory = ({
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
||||
({ role }) =>
|
||||
!Object.values(ProjectMembershipRole)
|
||||
// we don't want to include custom in this check;
|
||||
// this unintentionally enables setting slug to custom which is reserved
|
||||
.filter((r) => r !== ProjectMembershipRole.Custom)
|
||||
.includes(role as ProjectMembershipRole)
|
||||
);
|
||||
const hasCustomRole = Boolean(customInputRoles.length);
|
||||
if (hasCustomRole) {
|
||||
|
@@ -191,6 +191,10 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
return project;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new DatabaseError({ error, name: "Find all projects" });
|
||||
}
|
||||
};
|
||||
@@ -240,6 +244,10 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
return project;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError || error instanceof UnauthorizedError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new DatabaseError({ error, name: "Find project by slug" });
|
||||
}
|
||||
};
|
||||
@@ -260,7 +268,7 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
throw new BadRequestError({ message: "Invalid filter type" });
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
if (error instanceof BadRequestError || error instanceof NotFoundError || error instanceof UnauthorizedError) {
|
||||
throw error;
|
||||
}
|
||||
throw new DatabaseError({ error, name: `Failed to find project by ${filter.type}` });
|
||||
|
@@ -285,11 +285,14 @@ export const projectQueueFactory = ({
|
||||
|
||||
if (!orgMembership) {
|
||||
// This can happen. Since we don't remove project memberships and project keys when a user is removed from an org, this is a valid case.
|
||||
logger.info("User is not in organization", {
|
||||
userId: key.receiverId,
|
||||
orgId: project.orgId,
|
||||
projectId: project.id
|
||||
});
|
||||
logger.info(
|
||||
{
|
||||
userId: key.receiverId,
|
||||
orgId: project.orgId,
|
||||
projectId: project.id
|
||||
},
|
||||
"User is not in organization"
|
||||
);
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
@@ -551,10 +554,10 @@ export const projectQueueFactory = ({
|
||||
.catch(() => [null]);
|
||||
|
||||
if (!project) {
|
||||
logger.error("Failed to upgrade project, because no project was found", data);
|
||||
logger.error(data, "Failed to upgrade project, because no project was found");
|
||||
} else {
|
||||
await projectDAL.setProjectUpgradeStatus(data.projectId, ProjectUpgradeStatus.Failed);
|
||||
logger.error("Failed to upgrade project", err, {
|
||||
logger.error(err, "Failed to upgrade project", {
|
||||
extra: {
|
||||
project,
|
||||
jobData: data
|
||||
|
@@ -414,12 +414,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const encryptedValue = secretValue
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||
references: getAllSecretReferences(secretValue).nestedReferences
|
||||
}
|
||||
: {};
|
||||
const encryptedValue =
|
||||
typeof secretValue === "string"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||
references: getAllSecretReferences(secretValue).nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
if (secretValue) {
|
||||
const { nestedReferences, localReferences } = getAllSecretReferences(secretValue);
|
||||
@@ -1165,7 +1166,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkInsert({
|
||||
inputSecrets: inputSecrets.map((el) => {
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences;
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences;
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
@@ -1372,7 +1373,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
typeof el.secretValue !== "undefined"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
|
@@ -77,5 +77,21 @@ export const smtpServiceFactory = (cfg: TSmtpConfig) => {
|
||||
}
|
||||
};
|
||||
|
||||
return { sendMail };
|
||||
const verify = async () => {
|
||||
const isConnected = smtp
|
||||
.verify()
|
||||
.then(async () => {
|
||||
logger.info("SMTP connected");
|
||||
return true;
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
logger.error("SMTP error");
|
||||
logger.error(err);
|
||||
return false;
|
||||
});
|
||||
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
return { sendMail, verify };
|
||||
};
|
||||
|
@@ -142,7 +142,7 @@ export const fnTriggerWebhook = async ({
|
||||
!isDisabled && picomatch.isMatch(secretPath, hookSecretPath, { strictSlashes: false })
|
||||
);
|
||||
if (!toBeTriggeredHooks.length) return;
|
||||
logger.info("Secret webhook job started", { environment, secretPath, projectId });
|
||||
logger.info({ environment, secretPath, projectId }, "Secret webhook job started");
|
||||
const project = await projectDAL.findById(projectId);
|
||||
const webhooksTriggered = await Promise.allSettled(
|
||||
toBeTriggeredHooks.map((hook) =>
|
||||
@@ -195,5 +195,5 @@ export const fnTriggerWebhook = async ({
|
||||
);
|
||||
}
|
||||
});
|
||||
logger.info("Secret webhook job ended", { environment, secretPath, projectId });
|
||||
logger.info({ environment, secretPath, projectId }, "Secret webhook job ended");
|
||||
};
|
||||
|
@@ -111,7 +111,7 @@ var exportCmd = &cobra.Command{
|
||||
accessToken = token.Token
|
||||
} else {
|
||||
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
@@ -41,7 +41,7 @@ var initCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
userCreds, err := util.GetCurrentLoggedInUserDetails()
|
||||
userCreds, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get your login details")
|
||||
}
|
||||
|
@@ -154,6 +154,8 @@ var loginCmd = &cobra.Command{
|
||||
DisableFlagsInUseLine: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
presetDomain := config.INFISICAL_URL
|
||||
|
||||
clearSelfHostedDomains, err := cmd.Flags().GetBool("clear-domains")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@@ -198,7 +200,7 @@ var loginCmd = &cobra.Command{
|
||||
|
||||
// standalone user auth
|
||||
if loginMethod == "user" {
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
|
||||
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
|
||||
log.Debug().Err(err)
|
||||
@@ -216,11 +218,19 @@ var loginCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
usePresetDomain, err := usePresetDomain(presetDomain)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
//override domain
|
||||
domainQuery := true
|
||||
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" &&
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_EU_URL) &&
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL) {
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL) &&
|
||||
!usePresetDomain {
|
||||
overrideDomain, err := DomainOverridePrompt()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@@ -228,7 +238,7 @@ var loginCmd = &cobra.Command{
|
||||
|
||||
//if not override set INFISICAL_URL to exported var
|
||||
//set domainQuery to false
|
||||
if !overrideDomain {
|
||||
if !overrideDomain && !usePresetDomain {
|
||||
domainQuery = false
|
||||
config.INFISICAL_URL = util.AppendAPIEndpoint(config.INFISICAL_URL_MANUAL_OVERRIDE)
|
||||
config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", strings.TrimSuffix(config.INFISICAL_URL, "/api"))
|
||||
@@ -237,7 +247,7 @@ var loginCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
//prompt user to select domain between Infisical cloud and self-hosting
|
||||
if domainQuery {
|
||||
if domainQuery && !usePresetDomain {
|
||||
err = askForDomain()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse domain url")
|
||||
@@ -526,6 +536,45 @@ func DomainOverridePrompt() (bool, error) {
|
||||
return selectedOption == OVERRIDE, err
|
||||
}
|
||||
|
||||
func usePresetDomain(presetDomain string) (bool, error) {
|
||||
infisicalConfig, err := util.GetConfigFile()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("askForDomain: unable to get config file because [err=%s]", err)
|
||||
}
|
||||
|
||||
preconfiguredUrl := strings.TrimSuffix(presetDomain, "/api")
|
||||
|
||||
if preconfiguredUrl != "" && preconfiguredUrl != util.INFISICAL_DEFAULT_US_URL && preconfiguredUrl != util.INFISICAL_DEFAULT_EU_URL {
|
||||
parsedDomain := strings.TrimSuffix(strings.Trim(preconfiguredUrl, "/"), "/api")
|
||||
|
||||
_, err := url.ParseRequestURI(parsedDomain)
|
||||
if err != nil {
|
||||
return false, errors.New(fmt.Sprintf("Invalid domain URL: '%s'", parsedDomain))
|
||||
}
|
||||
|
||||
config.INFISICAL_URL = fmt.Sprintf("%s/api", parsedDomain)
|
||||
config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", parsedDomain)
|
||||
|
||||
if !slices.Contains(infisicalConfig.Domains, parsedDomain) {
|
||||
infisicalConfig.Domains = append(infisicalConfig.Domains, parsedDomain)
|
||||
err = util.WriteConfigFile(&infisicalConfig)
|
||||
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("askForDomain: unable to write domains to config file because [err=%s]", err)
|
||||
}
|
||||
}
|
||||
|
||||
whilte := color.New(color.FgGreen)
|
||||
boldWhite := whilte.Add(color.Bold)
|
||||
time.Sleep(time.Second * 1)
|
||||
boldWhite.Printf("[INFO] Using domain '%s' from domain flag or INFISICAL_API_URL environment variable\n", parsedDomain)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func askForDomain() error {
|
||||
|
||||
// query user to choose between Infisical cloud or self-hosting
|
||||
|
@@ -54,7 +54,7 @@ func init() {
|
||||
util.CheckForUpdate()
|
||||
}
|
||||
|
||||
loggedInDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInDetails, err := util.GetCurrentLoggedInUserDetails(false)
|
||||
|
||||
if !silent && err == nil && loggedInDetails.IsUserLoggedIn && !loggedInDetails.LoginExpired {
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
|
@@ -194,7 +194,7 @@ var secretsSetCmd = &cobra.Command{
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to authenticate [err=%v]")
|
||||
}
|
||||
@@ -278,7 +278,7 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to authenticate")
|
||||
}
|
||||
|
@@ -41,7 +41,7 @@ var tokensCreateCmd = &cobra.Command{
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
// get plain text workspace key
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to retrieve your logged in your details. Please login in then try again")
|
||||
|
@@ -55,7 +55,7 @@ func GetUserCredsFromKeyRing(userEmail string) (credentials models.UserCredentia
|
||||
return userCredentials, err
|
||||
}
|
||||
|
||||
func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) {
|
||||
func GetCurrentLoggedInUserDetails(setConfigVariables bool) (LoggedInUserDetails, error) {
|
||||
if ConfigFileExists() {
|
||||
configFile, err := GetConfigFile()
|
||||
if err != nil {
|
||||
@@ -75,18 +75,20 @@ func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if setConfigVariables {
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE = config.INFISICAL_URL
|
||||
//configFile.LoggedInUserDomain
|
||||
//if not empty set as infisical url
|
||||
if configFile.LoggedInUserDomain != "" {
|
||||
config.INFISICAL_URL = AppendAPIEndpoint(configFile.LoggedInUserDomain)
|
||||
}
|
||||
}
|
||||
|
||||
// check to to see if the JWT is still valid
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(userCreds.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE = config.INFISICAL_URL
|
||||
//configFile.LoggedInUserDomain
|
||||
//if not empty set as infisical url
|
||||
if configFile.LoggedInUserDomain != "" {
|
||||
config.INFISICAL_URL = AppendAPIEndpoint(configFile.LoggedInUserDomain)
|
||||
}
|
||||
|
||||
isAuthenticated := api.CallIsAuthenticated(httpClient)
|
||||
// TODO: add refresh token
|
||||
// if !isAuthenticated {
|
||||
|
@@ -20,7 +20,7 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
|
||||
|
||||
log.Debug().Msg("GetAllFolders: Trying to fetch folders using logged in details")
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -177,7 +177,7 @@ func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, er
|
||||
if params.InfisicalToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
|
||||
if err != nil {
|
||||
return models.SingleFolder{}, err
|
||||
@@ -224,7 +224,7 @@ func DeleteFolder(params models.DeleteFolderParameters) ([]models.SingleFolder,
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@@ -246,7 +246,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
|
||||
|
||||
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
isConnected := ValidateInfisicalAPIConnection()
|
||||
|
||||
if isConnected {
|
||||
|
@@ -86,6 +86,7 @@ services:
|
||||
- .env
|
||||
ports:
|
||||
- 4000:4000
|
||||
- 9464:9464 # for OTEL collection of Prometheus metrics
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||
@@ -95,6 +96,42 @@ services:
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
volumes:
|
||||
- ./prometheus.dev.yml:/etc/prometheus/prometheus.yml
|
||||
ports:
|
||||
- "9090:9090"
|
||||
command:
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
profiles: [metrics]
|
||||
|
||||
otel-collector:
|
||||
image: otel/opentelemetry-collector-contrib
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otelcol-contrib/config.yaml
|
||||
ports:
|
||||
- 1888:1888 # pprof extension
|
||||
- 8888:8888 # Prometheus metrics exposed by the Collector
|
||||
- 8889:8889 # Prometheus exporter metrics
|
||||
- 13133:13133 # health_check extension
|
||||
- 4317:4317 # OTLP gRPC receiver
|
||||
- 4318:4318 # OTLP http receiver
|
||||
- 55679:55679 # zpages extension
|
||||
profiles: [metrics-otel]
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
container_name: grafana
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- GF_LOG_LEVEL=debug
|
||||
ports:
|
||||
- "3005:3000"
|
||||
volumes:
|
||||
- "grafana_storage:/var/lib/grafana"
|
||||
profiles: [metrics]
|
||||
|
||||
frontend:
|
||||
container_name: infisical-dev-frontend
|
||||
restart: unless-stopped
|
||||
@@ -166,3 +203,4 @@ volumes:
|
||||
driver: local
|
||||
ldap_data:
|
||||
ldap_config:
|
||||
grafana_storage:
|
||||
|
@@ -3,6 +3,3 @@ title: "Bulk Create"
|
||||
openapi: "POST /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@@ -3,6 +3,3 @@ title: "Create"
|
||||
openapi: "POST /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@@ -3,6 +3,3 @@ title: "Bulk Delete"
|
||||
openapi: "DELETE /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@@ -3,6 +3,3 @@ title: "Delete"
|
||||
openapi: "DELETE /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@@ -2,7 +2,3 @@
|
||||
title: "List"
|
||||
openapi: "GET /api/v3/secrets/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@@ -3,6 +3,3 @@ title: "Retrieve"
|
||||
openapi: "GET /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@@ -3,6 +3,3 @@ title: "Bulk Update"
|
||||
openapi: "PATCH /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@@ -2,7 +2,3 @@
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@@ -3,7 +3,7 @@ title: 'Install'
|
||||
description: "Infisical's CLI is one of the best way to manage environments and secrets. Install it here"
|
||||
---
|
||||
|
||||
The Infisical CLI is powerful command line tool that can be used to retrieve, modify, export and inject secrets into any process or application as environment variables.
|
||||
The Infisical CLI is a powerful command line tool that can be used to retrieve, modify, export and inject secrets into any process or application as environment variables.
|
||||
You can use it across various environments, whether it's local development, CI/CD, staging, or production.
|
||||
|
||||
## Installation
|
||||
|
@@ -69,7 +69,7 @@ The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCa
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -131,12 +131,12 @@ The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCa
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -66,7 +66,7 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -138,12 +138,12 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the lease details and delete the lease ahead of its expiration time.
|
||||
This will allow you to see the lease details and delete the lease ahead of its expiration time.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -98,7 +98,7 @@ Click on Add assignments. Search for the application name you created and select
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -151,12 +151,12 @@ Click on Add assignments. Search for the application name you created and select
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -39,7 +39,7 @@ The above configuration allows user creation and granting permissions.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -116,12 +116,12 @@ The above configuration allows user creation and granting permissions.
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the lease details and delete the lease ahead of its expiration time.
|
||||
This will allow you to see the lease details and delete the lease ahead of its expiration time.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -34,7 +34,7 @@ The Infisical Elasticsearch dynamic secret allows you to generate Elasticsearch
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -114,12 +114,12 @@ The Infisical Elasticsearch dynamic secret allows you to generate Elasticsearch
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -31,7 +31,7 @@ The Infisical LDAP dynamic secret allows you to generate user credentials on dem
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -171,7 +171,7 @@ The Infisical LDAP dynamic secret allows you to generate user credentials on dem
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
|
@@ -30,7 +30,7 @@ Create a project scopped API Key with the required permission in your Mongo Atla
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -101,12 +101,12 @@ Create a project scopped API Key with the required permission in your Mongo Atla
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -31,7 +31,7 @@ Create a user with the required permission in your MongoDB instance. This user w
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -103,12 +103,12 @@ Create a user with the required permission in your MongoDB instance. This user w
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -28,7 +28,7 @@ Create a user with the required permission in your SQL instance. This user will
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -105,12 +105,12 @@ Create a user with the required permission in your SQL instance. This user will
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete the lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete the lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -27,7 +27,7 @@ Create a user with the required permission in your SQL instance. This user will
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -102,12 +102,12 @@ Create a user with the required permission in your SQL instance. This user will
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -27,7 +27,7 @@ Create a user with the required permission in your SQL instance. This user will
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -102,12 +102,12 @@ Create a user with the required permission in your SQL instance. This user will
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -28,7 +28,7 @@ Create a user with the required permission in your SQL instance. This user will
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -105,12 +105,12 @@ Create a user with the required permission in your SQL instance. This user will
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete the lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete the lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -28,7 +28,7 @@ The Infisical RabbitMQ dynamic secret allows you to generate RabbitMQ credential
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -103,12 +103,12 @@ The Infisical RabbitMQ dynamic secret allows you to generate RabbitMQ credential
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -27,7 +27,7 @@ Create a user with the required permission in your Redis instance. This user wil
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -93,12 +93,12 @@ Create a user with the required permission in your Redis instance. This user wil
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -30,7 +30,7 @@ The Infisical SAP HANA dynamic secret allows you to generate SAP HANA database c
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
@@ -106,13 +106,13 @@ The Infisical SAP HANA dynamic secret allows you to generate SAP HANA database c
|
||||
## Audit or Revoke Leases
|
||||
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the lease details and delete the lease ahead of its expiration time.
|
||||
This will allow you to see the lease details and delete the lease ahead of its expiration time.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
|
||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
|
@@ -109,7 +109,7 @@ Infisical's Snowflake dynamic secrets allow you to generate Snowflake user crede
|
||||
## Audit or Revoke Leases
|
||||
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the lease details and delete the lease ahead of its expiration time.
|
||||
This will allow you to see the lease details and delete the lease ahead of its expiration time.
|
||||
|
||||

|
||||
|
||||
|
70
docs/documentation/platform/dynamic-secrets/totp.mdx
Normal file
70
docs/documentation/platform/dynamic-secrets/totp.mdx
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
title: "TOTP"
|
||||
description: "Learn how to dynamically generate time-based one-time passwords."
|
||||
---
|
||||
|
||||
The Infisical TOTP dynamic secret allows you to generate time-based one-time passwords on demand.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
- Infisical requires either an OTP url or a secret key from a TOTP provider.
|
||||
|
||||
## Set up Dynamic Secrets with TOTP
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select TOTP">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
<ParamField path="Configuration Type" type="string" required>
|
||||
There are two supported configuration types - `url` and `manual`.
|
||||
|
||||
When `url` is selected, you can configure the TOTP generator using the OTP URL.
|
||||
|
||||
When `manual` is selected, you can configure the TOTP generator using the secret key along with other configurations like period, number of digits, and algorithm.
|
||||
</ParamField>
|
||||
<ParamField path="URL" type="string">
|
||||
OTP URL in `otpauth://` format used to generate TOTP codes.
|
||||
</ParamField>
|
||||
<ParamField path="Secret Key" type="string">
|
||||
Base32 encoded secret used to generate TOTP codes.
|
||||
</ParamField>
|
||||
<ParamField path="Period" type="number">
|
||||
Time interval in seconds between generating new TOTP codes.
|
||||
</ParamField>
|
||||
<ParamField path="Digits" type="number">
|
||||
Number of digits to generate in each TOTP code.
|
||||
</ParamField>
|
||||
<ParamField path="Algorithm" type="string">
|
||||
Hash algorithm to use when generating TOTP codes. The supported algorithms are sha1, sha256, and sha512.
|
||||
</ParamField>
|
||||
|
||||

|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand TOTPs.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
|
||||

|
||||
|
||||
Once you click the `Generate` button, a new secret lease will be generated and the TOTP will be shown to you.
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
</Steps>
|
Binary file not shown.
Before Width: | Height: | Size: 555 KiB After Width: | Height: | Size: 558 KiB |
Binary file not shown.
After Width: | Height: | Size: 487 KiB |
Binary file not shown.
After Width: | Height: | Size: 464 KiB |
Binary file not shown.
After Width: | Height: | Size: 408 KiB |
BIN
docs/images/platform/dynamic-secrets/totp-lease-value.png
Normal file
BIN
docs/images/platform/dynamic-secrets/totp-lease-value.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 400 KiB |
@@ -1,8 +1,9 @@
|
||||
---
|
||||
title: "Terraform"
|
||||
title: "Terraform Provider"
|
||||
description: "Learn how to fetch Secrets From Infisical With Terraform."
|
||||
url: "https://registry.terraform.io/providers/Infisical/infisical/latest/docs"
|
||||
---
|
||||
|
||||
{/*
|
||||
This guide provides step-by-step guidance on how to fetch secrets from Infisical using Terraform.
|
||||
|
||||
## Prerequisites
|
||||
@@ -98,4 +99,4 @@ Terraform will now fetch your secrets from Infisical and display them as output
|
||||
|
||||
## Conclusion
|
||||
|
||||
You have now successfully set up and used the Infisical provider with Terraform to fetch secrets. For more information, visit the [Infisical documentation](https://registry.terraform.io/providers/Infisical/infisical/latest/docs).
|
||||
You have now successfully set up and used the Infisical provider with Terraform to fetch secrets. For more information, visit the [Infisical documentation](https://registry.terraform.io/providers/Infisical/infisical/latest/docs). */}
|
||||
|
@@ -94,7 +94,7 @@ spec:
|
||||
projectSlug: new-ob-em
|
||||
envSlug: dev # "dev", "staging", "prod", etc..
|
||||
secretsPath: "/" # Root is "/"
|
||||
recursive: true # Wether or not to use recursive mode (Fetches all secrets in an environment from a given secret path, and all folders inside the path) / defaults to false
|
||||
recursive: true # Whether or not to use recursive mode (Fetches all secrets in an environment from a given secret path, and all folders inside the path) / defaults to false
|
||||
credentialsRef:
|
||||
secretName: universal-auth-credentials
|
||||
secretNamespace: default
|
||||
|
@@ -189,7 +189,8 @@
|
||||
"documentation/platform/dynamic-secrets/azure-entra-id",
|
||||
"documentation/platform/dynamic-secrets/ldap",
|
||||
"documentation/platform/dynamic-secrets/sap-hana",
|
||||
"documentation/platform/dynamic-secrets/snowflake"
|
||||
"documentation/platform/dynamic-secrets/snowflake",
|
||||
"documentation/platform/dynamic-secrets/totp"
|
||||
]
|
||||
},
|
||||
"documentation/platform/project-templates",
|
||||
|
@@ -30,7 +30,8 @@ Used to configure platform-specific security and operational settings
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="TELEMETRY_ENABLED" type="string" default="true" optional>
|
||||
Telemetry helps us improve Infisical but if you want to dsiable it you may set this to `false`.
|
||||
Telemetry helps us improve Infisical but if you want to disable it you may set
|
||||
this to `false`.
|
||||
</ParamField>
|
||||
|
||||
## Data Layer
|
||||
@@ -65,8 +66,9 @@ DB_READ_REPLICAS=[{"DB_CONNECTION_URI":""}]
|
||||
Use the command below to encode your certificate:
|
||||
`echo "<certificate>" | base64`
|
||||
|
||||
If not provided it will use master SSL certificate.
|
||||
If not provided it will use master SSL certificate.
|
||||
</ParamField>
|
||||
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
@@ -350,8 +352,9 @@ Optional (for TLS/SSL):
|
||||
|
||||
TLS: Available on the same ports (2525, 80, 25, 8025, or 587)
|
||||
SSL: Available on ports 465, 8465, and 443
|
||||
|
||||
</Note>
|
||||
</Accordion>
|
||||
</Accordion>
|
||||
|
||||
## Authentication
|
||||
|
||||
@@ -518,3 +521,36 @@ To help you sync secrets from Infisical to services such as Github and Gitlab, I
|
||||
OAuth2 client secret for Gitlab integration
|
||||
</ParamField>
|
||||
</Accordion>
|
||||
|
||||
## Observability
|
||||
|
||||
You can configure Infisical to collect and expose telemetry data for analytics and monitoring.
|
||||
|
||||
<ParamField
|
||||
query="OTEL_TELEMETRY_COLLECTION_ENABLED"
|
||||
type="string"
|
||||
default="false"
|
||||
>
|
||||
Whether or not to collect and expose telemetry data.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="OTEL_EXPORT_TYPE" type="enum" optional>
|
||||
Supported types are `prometheus` and `otlp`.
|
||||
|
||||
If export type is set to `prometheus`, metric data will be exposed in port 9464 in the `/metrics` path.
|
||||
|
||||
If export type is set to `otlp`, you will have to configure a value for `OTEL_EXPORT_OTLP_ENDPOINT`.
|
||||
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="OTEL_EXPORT_OTLP_ENDPOINT" type="string">
|
||||
Where telemetry data would be pushed to for collection. This is only
|
||||
applicable when `OTEL_EXPORT_TYPE` is set to `otlp`.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="OTEL_COLLECTOR_BASIC_AUTH_USERNAME" type="string">
|
||||
The username for authenticating with the telemetry collector.
|
||||
</ParamField>
|
||||
<ParamField query="OTEL_COLLECTOR_BASIC_AUTH_PASSWORD" type="string">
|
||||
The password for authenticating with the telemetry collector.
|
||||
</ParamField>
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import QRCode from "qrcode";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
|
||||
import { useGetUserTotpRegistration } from "@app/hooks/api";
|
||||
import { useVerifyUserTotpRegistration } from "@app/hooks/api/users/mutation";
|
||||
@@ -9,9 +10,10 @@ import { Button, ContentLoader, Input } from "../v2";
|
||||
|
||||
type Props = {
|
||||
onComplete?: () => Promise<void>;
|
||||
shouldCenterQr?: boolean;
|
||||
};
|
||||
|
||||
const TotpRegistration = ({ onComplete }: Props) => {
|
||||
const TotpRegistration = ({ onComplete, shouldCenterQr }: Props) => {
|
||||
const { data: registration, isLoading } = useGetUserTotpRegistration();
|
||||
const { mutateAsync: verifyUserTotp, isLoading: isVerifyLoading } =
|
||||
useVerifyUserTotpRegistration();
|
||||
@@ -48,16 +50,18 @@ const TotpRegistration = ({ onComplete }: Props) => {
|
||||
if (isLoading) {
|
||||
return <ContentLoader />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex max-w-sm flex-col text-bunker-200">
|
||||
<div className="mb-4 text-center">
|
||||
Download a two-step verification app (Duo, Google Authenticator, etc.) and scan the QR code.
|
||||
<div className="flex max-w-lg flex-col text-bunker-200">
|
||||
<div className="mb-8">
|
||||
1. Download a two-step verification app (Duo, Google Authenticator, etc.) and scan the QR
|
||||
code.
|
||||
</div>
|
||||
<div className="mb-10 flex items-center justify-center">
|
||||
<div className={twMerge("mb-8 flex items-center", shouldCenterQr && "justify-center")}>
|
||||
<img src={qrCodeUrl} alt="registration-qr" />
|
||||
</div>
|
||||
<form onSubmit={handleTotpVerify}>
|
||||
<div className="mb-4 text-center">Enter the resulting verification code</div>
|
||||
<div className="mb-4">2. Enter the resulting verification code</div>
|
||||
<div className="mb-4 flex flex-row gap-2">
|
||||
<Input
|
||||
onChange={(e) => setTotp(e.target.value)}
|
||||
|
@@ -4,13 +4,15 @@ import { Id, toast, ToastContainer, ToastOptions, TypeOptions } from "react-toas
|
||||
export type TNotification = {
|
||||
title?: string;
|
||||
text: ReactNode;
|
||||
children?: ReactNode;
|
||||
};
|
||||
|
||||
export const NotificationContent = ({ title, text }: TNotification) => {
|
||||
export const NotificationContent = ({ title, text, children }: TNotification) => {
|
||||
return (
|
||||
<div className="msg-container">
|
||||
{title && <div className="text-md mb-1 font-medium">{title}</div>}
|
||||
<div className={title ? "text-sm" : "text-md"}>{text}</div>
|
||||
<div className={title ? "text-sm text-neutral-400" : "text-md"}>{text}</div>
|
||||
{children && <div className="mt-2">{children}</div>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -23,7 +25,13 @@ export const createNotification = (
|
||||
position: "bottom-right",
|
||||
...toastProps,
|
||||
theme: "dark",
|
||||
type: myProps?.type || "info",
|
||||
type: myProps?.type || "info"
|
||||
});
|
||||
|
||||
export const NotificationContainer = () => <ToastContainer pauseOnHover toastClassName="border border-mineshaft-500" style={{ width: "400px" }} />;
|
||||
export const NotificationContainer = () => (
|
||||
<ToastContainer
|
||||
pauseOnHover
|
||||
toastClassName="border border-mineshaft-500"
|
||||
style={{ width: "400px" }}
|
||||
/>
|
||||
);
|
||||
|
@@ -33,6 +33,15 @@ export enum PermissionConditionOperators {
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
|
||||
export const formatedConditionsOperatorNames: { [K in PermissionConditionOperators]: string } = {
|
||||
[PermissionConditionOperators.$EQ]: "equal to",
|
||||
[PermissionConditionOperators.$IN]: "contains",
|
||||
[PermissionConditionOperators.$ALL]: "contains all",
|
||||
[PermissionConditionOperators.$NEQ]: "not equal to",
|
||||
[PermissionConditionOperators.$GLOB]: "matches glob pattern",
|
||||
[PermissionConditionOperators.$REGEX]: "matches regex pattern"
|
||||
};
|
||||
|
||||
export type TPermissionConditionOperators = {
|
||||
[PermissionConditionOperators.$IN]: string[];
|
||||
[PermissionConditionOperators.$ALL]: string[];
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user