Compare commits

..

1 Commits

Author SHA1 Message Date
Maidul Islam
646ceba089 update old local dev docs 2024-02-27 01:01:17 -05:00
128 changed files with 370 additions and 1201 deletions

13
.github/values.yaml vendored
View File

@@ -13,10 +13,11 @@ fullnameOverride: ""
##
infisical:
autoDatabaseSchemaMigration: false
## @param backend.enabled Enable backend
##
enabled: false
## @param backend.name Backend name
##
name: infisical
replicaCount: 3
image:
@@ -49,9 +50,3 @@ ingress:
- secretName: letsencrypt-prod
hosts:
- gamma.infisical.com
postgresql:
enabled: false
redis:
enabled: false

View File

@@ -11,4 +11,4 @@ up-prod:
docker-compose -f docker-compose.prod.yml up --build
down:
docker compose -f docker-compose.dev.yml down
docker-compose down

View File

@@ -1,30 +0,0 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
return {
setItem: async (key, value) => {
store[key] = value;
return "OK";
},
setItemWithExpiry: async (key, value) => {
store[key] = value;
return "OK";
},
deleteItem: async (key) => {
delete store[key];
return 1;
},
getItem: async (key) => {
const value = store[key];
if (typeof value === "string") {
return value;
}
return null;
},
incrementBy: async () => {
return 1;
}
};
};

View File

@@ -14,7 +14,6 @@ import { AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@@ -42,8 +41,7 @@ export default {
await db.seed.run();
const smtp = mockSmtpServer();
const queue = mockQueue();
const keyStore = mockKeyStore();
const server = await main({ db, smtp, logger, queue, keyStore });
const server = await main({ db, smtp, logger, queue });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type

View File

@@ -11,7 +11,7 @@
"dependencies": {
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cookie": "^9.2.0",
"@fastify/cors": "^8.5.0",
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
@@ -29,11 +29,11 @@
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"aws-sdk": "^2.1549.0",
"axios": "^1.6.7",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.3.3",
"bullmq": "^5.1.6",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@@ -56,7 +56,7 @@
"pg": "^8.11.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"posthog-node": "^3.6.2",
"posthog-node": "^3.6.0",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"tweetnacl": "^1.0.3",
@@ -1687,9 +1687,9 @@
}
},
"node_modules/@fastify/cookie": {
"version": "9.3.1",
"resolved": "https://registry.npmjs.org/@fastify/cookie/-/cookie-9.3.1.tgz",
"integrity": "sha512-h1NAEhB266+ZbZ0e9qUE6NnNR07i7DnNXWG9VbbZ8uC6O/hxHpl+Zoe5sw1yfdZ2U6XhToUGDnzQtWJdCaPwfg==",
"version": "9.2.0",
"resolved": "https://registry.npmjs.org/@fastify/cookie/-/cookie-9.2.0.tgz",
"integrity": "sha512-fkg1yjjQRHPFAxSHeLC8CqYuNzvR6Lwlj/KjrzQcGjNBK+K82nW+UfCjfN71g1GkoVoc1GTOgIWkFJpcMfMkHQ==",
"dependencies": {
"cookie-signature": "^1.1.0",
"fastify-plugin": "^4.0.0"
@@ -2193,6 +2193,7 @@
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
"dev": true,
"dependencies": {
"@nodelib/fs.stat": "2.0.5",
"run-parallel": "^1.1.9"
@@ -2205,6 +2206,7 @@
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
"dev": true,
"engines": {
"node": ">= 8"
}
@@ -2213,6 +2215,7 @@
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
"integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
"dev": true,
"dependencies": {
"@nodelib/fs.scandir": "2.1.5",
"fastq": "^1.6.0"
@@ -5186,9 +5189,9 @@
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/aws-sdk": {
"version": "2.1553.0",
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1553.0.tgz",
"integrity": "sha512-CfZaw8dR9e642aBOeFhkFL7KoQApeLR15uH2IQqfL/12snWYayAAesYh0tEaU+XbhrH0CUsf2Zro5IraEXEZMg==",
"version": "2.1549.0",
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1549.0.tgz",
"integrity": "sha512-SoVfrrV3A2mxH+NV2tA0eMtG301glhewvhL3Ob4107qLWjvwjy/CoWLclMLmfXniTGxbI8tsgN0r5mLZUKey3Q==",
"dependencies": {
"buffer": "4.9.2",
"events": "1.1.1",
@@ -5439,6 +5442,7 @@
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"dev": true,
"dependencies": {
"fill-range": "^7.0.1"
},
@@ -5488,15 +5492,14 @@
}
},
"node_modules/bullmq": {
"version": "5.3.3",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.3.3.tgz",
"integrity": "sha512-Gc/68HxiCHLMPBiGIqtINxcf8HER/5wvBYMY/6x3tFejlvldUBFaAErMTLDv4TnPsTyzNPrfBKmFCEM58uVnJg==",
"version": "5.1.6",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.1.6.tgz",
"integrity": "sha512-VkLfig+xm4U3hc4QChzuuAy0NGQ9dfPB8o54hmcZHCX9ofp0Zn6bEY+W3Ytkk76eYwPAgXfywDBlAb2Unjl1Rg==",
"dependencies": {
"cron-parser": "^4.6.0",
"fast-glob": "^3.3.2",
"glob": "^8.0.3",
"ioredis": "^5.3.2",
"lodash": "^4.17.21",
"minimatch": "^9.0.3",
"msgpackr": "^1.10.1",
"node-abort-controller": "^3.1.1",
"semver": "^7.5.4",
@@ -5504,28 +5507,6 @@
"uuid": "^9.0.0"
}
},
"node_modules/bullmq/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/bullmq/node_modules/minimatch": {
"version": "9.0.3",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
"integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=16 || 14 >=14.17"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/bundle-require": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/bundle-require/-/bundle-require-4.0.2.tgz",
@@ -6925,6 +6906,7 @@
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
"integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
"dev": true,
"dependencies": {
"@nodelib/fs.stat": "^2.0.2",
"@nodelib/fs.walk": "^1.2.3",
@@ -7076,6 +7058,7 @@
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"
},
@@ -7527,6 +7510,7 @@
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dev": true,
"dependencies": {
"is-glob": "^4.0.1"
},
@@ -8127,6 +8111,7 @@
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
@@ -8157,6 +8142,7 @@
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dev": true,
"dependencies": {
"is-extglob": "^2.1.1"
},
@@ -8191,6 +8177,7 @@
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"engines": {
"node": ">=0.12.0"
}
@@ -8947,6 +8934,7 @@
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
"dev": true,
"engines": {
"node": ">= 8"
}
@@ -8963,6 +8951,7 @@
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
"dev": true,
"dependencies": {
"braces": "^3.0.2",
"picomatch": "^2.3.1"
@@ -8975,6 +8964,7 @@
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"engines": {
"node": ">=8.6"
},
@@ -10318,9 +10308,9 @@
"dev": true
},
"node_modules/posthog-node": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/posthog-node/-/posthog-node-3.6.2.tgz",
"integrity": "sha512-tVIaShR3SxBx17AlAUS86jQTweKuJIFRedBB504fCz7YPnXJTYSrVcUHn5IINE2wu4jUQimQK6ihQr90Djrdrg==",
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/posthog-node/-/posthog-node-3.6.0.tgz",
"integrity": "sha512-N/4//SIQR4fhwbHnDdJ2rQCYdu9wo0EVPK4lVgZswp5R/E42RKlpuO6ZfPsBl+Bcg06OYiOd/WR/jLV90FCoSw==",
"dependencies": {
"axios": "^1.6.2",
"rusha": "^0.8.14"
@@ -10567,6 +10557,7 @@
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
"dev": true,
"funding": [
{
"type": "github",
@@ -10913,6 +10904,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
"integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
"dev": true,
"funding": [
{
"type": "github",
@@ -11713,6 +11705,7 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"dependencies": {
"is-number": "^7.0.0"
},

View File

@@ -72,7 +72,7 @@
"dependencies": {
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cookie": "^9.2.0",
"@fastify/cors": "^8.5.0",
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
@@ -90,11 +90,11 @@
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"aws-sdk": "^2.1549.0",
"axios": "^1.6.7",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.3.3",
"bullmq": "^5.1.6",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@@ -117,7 +117,7 @@
"pg": "^8.11.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"posthog-node": "^3.6.2",
"posthog-node": "^3.6.0",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"tweetnacl": "^1.0.3",

View File

@@ -44,7 +44,7 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
if (!value || value === "null") return;
switch (type) {
case "uuid":
return `.default("00000000-0000-0000-0000-000000000000")`;
return;
case "character varying": {
if (value === "gen_random_uuid()") return;
if (typeof value === "string" && value.includes("::")) {
@@ -100,8 +100,7 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
// don't put optional on id
if (colInfo.defaultValue && columnName !== "id") {
if (colInfo.defaultValue) {
const { defaultValue } = colInfo;
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
if (zSchema) {
@@ -121,7 +120,6 @@ const main = async () => {
.split("_")
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
// the insert and update are changed to zod input type to use default cases
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
`// Code generated by automation script, DO NOT EDIT.
@@ -136,8 +134,8 @@ import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}});
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
export type T${pascalCase}Insert = Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>>;
export type T${pascalCase}Insert = Omit<T${pascalCase}, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<T${pascalCase}, TImmutableDBKeys>>;
`
);
}

6
backend/src/cache/redis.ts vendored Normal file
View File

@@ -0,0 +1,6 @@
import Redis from "ioredis";
export const initRedisConnection = (redisUrl: string) => {
const redis = new Redis(redisUrl);
return redis;
};

View File

@@ -17,15 +17,7 @@ dotenv.config({
export default {
development: {
client: "postgres",
connection: {
connectionString: process.env.DB_CONNECTION_URI,
ssl: process.env.DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
connection: process.env.DB_CONNECTION_URI,
pool: {
min: 2,
max: 10
@@ -39,15 +31,7 @@ export default {
},
production: {
client: "postgres",
connection: {
connectionString: process.env.DB_CONNECTION_URI,
ssl: process.env.DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
connection: process.env.DB_CONNECTION_URI,
pool: {
min: 2,
max: 10

View File

@@ -1,25 +0,0 @@
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck
import { Knex } from "knex";
import { TableName } from "../schemas";
const ADMIN_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.uuid("instanceId").notNullable().defaultTo(knex.fn.uuid());
});
const superUserConfigExists = await knex(TableName.SuperAdmin).where("id", ADMIN_CONFIG_UUID).first();
if (!superUserConfigExists) {
// eslint-disable-next-line
await knex(TableName.SuperAdmin).update({ id: ADMIN_CONFIG_UUID }).whereNotNull("id").limit(1);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("instanceId");
});
}

View File

@@ -19,5 +19,5 @@ export const ApiKeysSchema = z.object({
});
export type TApiKeys = z.infer<typeof ApiKeysSchema>;
export type TApiKeysInsert = Omit<z.input<typeof ApiKeysSchema>, TImmutableDBKeys>;
export type TApiKeysUpdate = Partial<Omit<z.input<typeof ApiKeysSchema>, TImmutableDBKeys>>;
export type TApiKeysInsert = Omit<TApiKeys, TImmutableDBKeys>;
export type TApiKeysUpdate = Partial<Omit<TApiKeys, TImmutableDBKeys>>;

View File

@@ -24,5 +24,5 @@ export const AuditLogsSchema = z.object({
});
export type TAuditLogs = z.infer<typeof AuditLogsSchema>;
export type TAuditLogsInsert = Omit<z.input<typeof AuditLogsSchema>, TImmutableDBKeys>;
export type TAuditLogsUpdate = Partial<Omit<z.input<typeof AuditLogsSchema>, TImmutableDBKeys>>;
export type TAuditLogsInsert = Omit<TAuditLogs, TImmutableDBKeys>;
export type TAuditLogsUpdate = Partial<Omit<TAuditLogs, TImmutableDBKeys>>;

View File

@@ -20,5 +20,5 @@ export const AuthTokenSessionsSchema = z.object({
});
export type TAuthTokenSessions = z.infer<typeof AuthTokenSessionsSchema>;
export type TAuthTokenSessionsInsert = Omit<z.input<typeof AuthTokenSessionsSchema>, TImmutableDBKeys>;
export type TAuthTokenSessionsUpdate = Partial<Omit<z.input<typeof AuthTokenSessionsSchema>, TImmutableDBKeys>>;
export type TAuthTokenSessionsInsert = Omit<TAuthTokenSessions, TImmutableDBKeys>;
export type TAuthTokenSessionsUpdate = Partial<Omit<TAuthTokenSessions, TImmutableDBKeys>>;

View File

@@ -21,5 +21,5 @@ export const AuthTokensSchema = z.object({
});
export type TAuthTokens = z.infer<typeof AuthTokensSchema>;
export type TAuthTokensInsert = Omit<z.input<typeof AuthTokensSchema>, TImmutableDBKeys>;
export type TAuthTokensUpdate = Partial<Omit<z.input<typeof AuthTokensSchema>, TImmutableDBKeys>>;
export type TAuthTokensInsert = Omit<TAuthTokens, TImmutableDBKeys>;
export type TAuthTokensUpdate = Partial<Omit<TAuthTokens, TImmutableDBKeys>>;

View File

@@ -22,5 +22,5 @@ export const BackupPrivateKeySchema = z.object({
});
export type TBackupPrivateKey = z.infer<typeof BackupPrivateKeySchema>;
export type TBackupPrivateKeyInsert = Omit<z.input<typeof BackupPrivateKeySchema>, TImmutableDBKeys>;
export type TBackupPrivateKeyUpdate = Partial<Omit<z.input<typeof BackupPrivateKeySchema>, TImmutableDBKeys>>;
export type TBackupPrivateKeyInsert = Omit<TBackupPrivateKey, TImmutableDBKeys>;
export type TBackupPrivateKeyUpdate = Partial<Omit<TBackupPrivateKey, TImmutableDBKeys>>;

View File

@@ -17,5 +17,5 @@ export const GitAppInstallSessionsSchema = z.object({
});
export type TGitAppInstallSessions = z.infer<typeof GitAppInstallSessionsSchema>;
export type TGitAppInstallSessionsInsert = Omit<z.input<typeof GitAppInstallSessionsSchema>, TImmutableDBKeys>;
export type TGitAppInstallSessionsUpdate = Partial<Omit<z.input<typeof GitAppInstallSessionsSchema>, TImmutableDBKeys>>;
export type TGitAppInstallSessionsInsert = Omit<TGitAppInstallSessions, TImmutableDBKeys>;
export type TGitAppInstallSessionsUpdate = Partial<Omit<TGitAppInstallSessions, TImmutableDBKeys>>;

View File

@@ -17,5 +17,5 @@ export const GitAppOrgSchema = z.object({
});
export type TGitAppOrg = z.infer<typeof GitAppOrgSchema>;
export type TGitAppOrgInsert = Omit<z.input<typeof GitAppOrgSchema>, TImmutableDBKeys>;
export type TGitAppOrgUpdate = Partial<Omit<z.input<typeof GitAppOrgSchema>, TImmutableDBKeys>>;
export type TGitAppOrgInsert = Omit<TGitAppOrg, TImmutableDBKeys>;
export type TGitAppOrgUpdate = Partial<Omit<TGitAppOrg, TImmutableDBKeys>>;

View File

@@ -16,5 +16,5 @@ export const IdentitiesSchema = z.object({
});
export type TIdentities = z.infer<typeof IdentitiesSchema>;
export type TIdentitiesInsert = Omit<z.input<typeof IdentitiesSchema>, TImmutableDBKeys>;
export type TIdentitiesUpdate = Partial<Omit<z.input<typeof IdentitiesSchema>, TImmutableDBKeys>>;
export type TIdentitiesInsert = Omit<TIdentities, TImmutableDBKeys>;
export type TIdentitiesUpdate = Partial<Omit<TIdentities, TImmutableDBKeys>>;

View File

@@ -23,5 +23,5 @@ export const IdentityAccessTokensSchema = z.object({
});
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;
export type TIdentityAccessTokensInsert = Omit<z.input<typeof IdentityAccessTokensSchema>, TImmutableDBKeys>;
export type TIdentityAccessTokensUpdate = Partial<Omit<z.input<typeof IdentityAccessTokensSchema>, TImmutableDBKeys>>;
export type TIdentityAccessTokensInsert = Omit<TIdentityAccessTokens, TImmutableDBKeys>;
export type TIdentityAccessTokensUpdate = Partial<Omit<TIdentityAccessTokens, TImmutableDBKeys>>;

View File

@@ -18,7 +18,5 @@ export const IdentityOrgMembershipsSchema = z.object({
});
export type TIdentityOrgMemberships = z.infer<typeof IdentityOrgMembershipsSchema>;
export type TIdentityOrgMembershipsInsert = Omit<z.input<typeof IdentityOrgMembershipsSchema>, TImmutableDBKeys>;
export type TIdentityOrgMembershipsUpdate = Partial<
Omit<z.input<typeof IdentityOrgMembershipsSchema>, TImmutableDBKeys>
>;
export type TIdentityOrgMembershipsInsert = Omit<TIdentityOrgMemberships, TImmutableDBKeys>;
export type TIdentityOrgMembershipsUpdate = Partial<Omit<TIdentityOrgMemberships, TImmutableDBKeys>>;

View File

@@ -18,10 +18,5 @@ export const IdentityProjectMembershipsSchema = z.object({
});
export type TIdentityProjectMemberships = z.infer<typeof IdentityProjectMembershipsSchema>;
export type TIdentityProjectMembershipsInsert = Omit<
z.input<typeof IdentityProjectMembershipsSchema>,
TImmutableDBKeys
>;
export type TIdentityProjectMembershipsUpdate = Partial<
Omit<z.input<typeof IdentityProjectMembershipsSchema>, TImmutableDBKeys>
>;
export type TIdentityProjectMembershipsInsert = Omit<TIdentityProjectMemberships, TImmutableDBKeys>;
export type TIdentityProjectMembershipsUpdate = Partial<Omit<TIdentityProjectMemberships, TImmutableDBKeys>>;

View File

@@ -23,7 +23,5 @@ export const IdentityUaClientSecretsSchema = z.object({
});
export type TIdentityUaClientSecrets = z.infer<typeof IdentityUaClientSecretsSchema>;
export type TIdentityUaClientSecretsInsert = Omit<z.input<typeof IdentityUaClientSecretsSchema>, TImmutableDBKeys>;
export type TIdentityUaClientSecretsUpdate = Partial<
Omit<z.input<typeof IdentityUaClientSecretsSchema>, TImmutableDBKeys>
>;
export type TIdentityUaClientSecretsInsert = Omit<TIdentityUaClientSecrets, TImmutableDBKeys>;
export type TIdentityUaClientSecretsUpdate = Partial<Omit<TIdentityUaClientSecrets, TImmutableDBKeys>>;

View File

@@ -21,7 +21,5 @@ export const IdentityUniversalAuthsSchema = z.object({
});
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;
export type TIdentityUniversalAuthsInsert = Omit<z.input<typeof IdentityUniversalAuthsSchema>, TImmutableDBKeys>;
export type TIdentityUniversalAuthsUpdate = Partial<
Omit<z.input<typeof IdentityUniversalAuthsSchema>, TImmutableDBKeys>
>;
export type TIdentityUniversalAuthsInsert = Omit<TIdentityUniversalAuths, TImmutableDBKeys>;
export type TIdentityUniversalAuthsUpdate = Partial<Omit<TIdentityUniversalAuths, TImmutableDBKeys>>;

View File

@@ -16,5 +16,5 @@ export const IncidentContactsSchema = z.object({
});
export type TIncidentContacts = z.infer<typeof IncidentContactsSchema>;
export type TIncidentContactsInsert = Omit<z.input<typeof IncidentContactsSchema>, TImmutableDBKeys>;
export type TIncidentContactsUpdate = Partial<Omit<z.input<typeof IncidentContactsSchema>, TImmutableDBKeys>>;
export type TIncidentContactsInsert = Omit<TIncidentContacts, TImmutableDBKeys>;
export type TIncidentContactsUpdate = Partial<Omit<TIncidentContacts, TImmutableDBKeys>>;

View File

@@ -33,5 +33,5 @@ export const IntegrationAuthsSchema = z.object({
});
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;
export type TIntegrationAuthsInsert = Omit<z.input<typeof IntegrationAuthsSchema>, TImmutableDBKeys>;
export type TIntegrationAuthsUpdate = Partial<Omit<z.input<typeof IntegrationAuthsSchema>, TImmutableDBKeys>>;
export type TIntegrationAuthsInsert = Omit<TIntegrationAuths, TImmutableDBKeys>;
export type TIntegrationAuthsUpdate = Partial<Omit<TIntegrationAuths, TImmutableDBKeys>>;

View File

@@ -31,5 +31,5 @@ export const IntegrationsSchema = z.object({
});
export type TIntegrations = z.infer<typeof IntegrationsSchema>;
export type TIntegrationsInsert = Omit<z.input<typeof IntegrationsSchema>, TImmutableDBKeys>;
export type TIntegrationsUpdate = Partial<Omit<z.input<typeof IntegrationsSchema>, TImmutableDBKeys>>;
export type TIntegrationsInsert = Omit<TIntegrations, TImmutableDBKeys>;
export type TIntegrationsUpdate = Partial<Omit<TIntegrations, TImmutableDBKeys>>;

View File

@@ -27,5 +27,5 @@ export const OrgBotsSchema = z.object({
});
export type TOrgBots = z.infer<typeof OrgBotsSchema>;
export type TOrgBotsInsert = Omit<z.input<typeof OrgBotsSchema>, TImmutableDBKeys>;
export type TOrgBotsUpdate = Partial<Omit<z.input<typeof OrgBotsSchema>, TImmutableDBKeys>>;
export type TOrgBotsInsert = Omit<TOrgBots, TImmutableDBKeys>;
export type TOrgBotsUpdate = Partial<Omit<TOrgBots, TImmutableDBKeys>>;

View File

@@ -20,5 +20,5 @@ export const OrgMembershipsSchema = z.object({
});
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
export type TOrgMembershipsInsert = Omit<z.input<typeof OrgMembershipsSchema>, TImmutableDBKeys>;
export type TOrgMembershipsUpdate = Partial<Omit<z.input<typeof OrgMembershipsSchema>, TImmutableDBKeys>>;
export type TOrgMembershipsInsert = Omit<TOrgMemberships, TImmutableDBKeys>;
export type TOrgMembershipsUpdate = Partial<Omit<TOrgMemberships, TImmutableDBKeys>>;

View File

@@ -19,5 +19,5 @@ export const OrgRolesSchema = z.object({
});
export type TOrgRoles = z.infer<typeof OrgRolesSchema>;
export type TOrgRolesInsert = Omit<z.input<typeof OrgRolesSchema>, TImmutableDBKeys>;
export type TOrgRolesUpdate = Partial<Omit<z.input<typeof OrgRolesSchema>, TImmutableDBKeys>>;
export type TOrgRolesInsert = Omit<TOrgRoles, TImmutableDBKeys>;
export type TOrgRolesUpdate = Partial<Omit<TOrgRoles, TImmutableDBKeys>>;

View File

@@ -19,5 +19,5 @@ export const OrganizationsSchema = z.object({
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
export type TOrganizationsInsert = Omit<z.input<typeof OrganizationsSchema>, TImmutableDBKeys>;
export type TOrganizationsUpdate = Partial<Omit<z.input<typeof OrganizationsSchema>, TImmutableDBKeys>>;
export type TOrganizationsInsert = Omit<TOrganizations, TImmutableDBKeys>;
export type TOrganizationsUpdate = Partial<Omit<TOrganizations, TImmutableDBKeys>>;

View File

@@ -26,5 +26,5 @@ export const ProjectBotsSchema = z.object({
});
export type TProjectBots = z.infer<typeof ProjectBotsSchema>;
export type TProjectBotsInsert = Omit<z.input<typeof ProjectBotsSchema>, TImmutableDBKeys>;
export type TProjectBotsUpdate = Partial<Omit<z.input<typeof ProjectBotsSchema>, TImmutableDBKeys>>;
export type TProjectBotsInsert = Omit<TProjectBots, TImmutableDBKeys>;
export type TProjectBotsUpdate = Partial<Omit<TProjectBots, TImmutableDBKeys>>;

View File

@@ -18,5 +18,5 @@ export const ProjectEnvironmentsSchema = z.object({
});
export type TProjectEnvironments = z.infer<typeof ProjectEnvironmentsSchema>;
export type TProjectEnvironmentsInsert = Omit<z.input<typeof ProjectEnvironmentsSchema>, TImmutableDBKeys>;
export type TProjectEnvironmentsUpdate = Partial<Omit<z.input<typeof ProjectEnvironmentsSchema>, TImmutableDBKeys>>;
export type TProjectEnvironmentsInsert = Omit<TProjectEnvironments, TImmutableDBKeys>;
export type TProjectEnvironmentsUpdate = Partial<Omit<TProjectEnvironments, TImmutableDBKeys>>;

View File

@@ -19,5 +19,5 @@ export const ProjectKeysSchema = z.object({
});
export type TProjectKeys = z.infer<typeof ProjectKeysSchema>;
export type TProjectKeysInsert = Omit<z.input<typeof ProjectKeysSchema>, TImmutableDBKeys>;
export type TProjectKeysUpdate = Partial<Omit<z.input<typeof ProjectKeysSchema>, TImmutableDBKeys>>;
export type TProjectKeysInsert = Omit<TProjectKeys, TImmutableDBKeys>;
export type TProjectKeysUpdate = Partial<Omit<TProjectKeys, TImmutableDBKeys>>;

View File

@@ -18,5 +18,5 @@ export const ProjectMembershipsSchema = z.object({
});
export type TProjectMemberships = z.infer<typeof ProjectMembershipsSchema>;
export type TProjectMembershipsInsert = Omit<z.input<typeof ProjectMembershipsSchema>, TImmutableDBKeys>;
export type TProjectMembershipsUpdate = Partial<Omit<z.input<typeof ProjectMembershipsSchema>, TImmutableDBKeys>>;
export type TProjectMembershipsInsert = Omit<TProjectMemberships, TImmutableDBKeys>;
export type TProjectMembershipsUpdate = Partial<Omit<TProjectMemberships, TImmutableDBKeys>>;

View File

@@ -19,5 +19,5 @@ export const ProjectRolesSchema = z.object({
});
export type TProjectRoles = z.infer<typeof ProjectRolesSchema>;
export type TProjectRolesInsert = Omit<z.input<typeof ProjectRolesSchema>, TImmutableDBKeys>;
export type TProjectRolesUpdate = Partial<Omit<z.input<typeof ProjectRolesSchema>, TImmutableDBKeys>>;
export type TProjectRolesInsert = Omit<TProjectRoles, TImmutableDBKeys>;
export type TProjectRolesUpdate = Partial<Omit<TProjectRoles, TImmutableDBKeys>>;

View File

@@ -20,5 +20,5 @@ export const ProjectsSchema = z.object({
});
export type TProjects = z.infer<typeof ProjectsSchema>;
export type TProjectsInsert = Omit<z.input<typeof ProjectsSchema>, TImmutableDBKeys>;
export type TProjectsUpdate = Partial<Omit<z.input<typeof ProjectsSchema>, TImmutableDBKeys>>;
export type TProjectsInsert = Omit<TProjects, TImmutableDBKeys>;
export type TProjectsUpdate = Partial<Omit<TProjects, TImmutableDBKeys>>;

View File

@@ -27,5 +27,5 @@ export const SamlConfigsSchema = z.object({
});
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
export type TSamlConfigsInsert = Omit<z.input<typeof SamlConfigsSchema>, TImmutableDBKeys>;
export type TSamlConfigsUpdate = Partial<Omit<z.input<typeof SamlConfigsSchema>, TImmutableDBKeys>>;
export type TSamlConfigsInsert = Omit<TSamlConfigs, TImmutableDBKeys>;
export type TSamlConfigsUpdate = Partial<Omit<TSamlConfigs, TImmutableDBKeys>>;

View File

@@ -17,5 +17,5 @@ export const ScimTokensSchema = z.object({
});
export type TScimTokens = z.infer<typeof ScimTokensSchema>;
export type TScimTokensInsert = Omit<z.input<typeof ScimTokensSchema>, TImmutableDBKeys>;
export type TScimTokensUpdate = Partial<Omit<z.input<typeof ScimTokensSchema>, TImmutableDBKeys>>;
export type TScimTokensInsert = Omit<TScimTokens, TImmutableDBKeys>;
export type TScimTokensUpdate = Partial<Omit<TScimTokens, TImmutableDBKeys>>;

View File

@@ -16,10 +16,5 @@ export const SecretApprovalPoliciesApproversSchema = z.object({
});
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;
export type TSecretApprovalPoliciesApproversInsert = Omit<
z.input<typeof SecretApprovalPoliciesApproversSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalPoliciesApproversUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesApproversSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalPoliciesApproversInsert = Omit<TSecretApprovalPoliciesApprovers, TImmutableDBKeys>;
export type TSecretApprovalPoliciesApproversUpdate = Partial<Omit<TSecretApprovalPoliciesApprovers, TImmutableDBKeys>>;

View File

@@ -18,7 +18,5 @@ export const SecretApprovalPoliciesSchema = z.object({
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
export type TSecretApprovalPoliciesInsert = Omit<z.input<typeof SecretApprovalPoliciesSchema>, TImmutableDBKeys>;
export type TSecretApprovalPoliciesUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalPoliciesInsert = Omit<TSecretApprovalPolicies, TImmutableDBKeys>;
export type TSecretApprovalPoliciesUpdate = Partial<Omit<TSecretApprovalPolicies, TImmutableDBKeys>>;

View File

@@ -16,10 +16,5 @@ export const SecretApprovalRequestSecretTagsSchema = z.object({
});
export type TSecretApprovalRequestSecretTags = z.infer<typeof SecretApprovalRequestSecretTagsSchema>;
export type TSecretApprovalRequestSecretTagsInsert = Omit<
z.input<typeof SecretApprovalRequestSecretTagsSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalRequestSecretTagsUpdate = Partial<
Omit<z.input<typeof SecretApprovalRequestSecretTagsSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalRequestSecretTagsInsert = Omit<TSecretApprovalRequestSecretTags, TImmutableDBKeys>;
export type TSecretApprovalRequestSecretTagsUpdate = Partial<Omit<TSecretApprovalRequestSecretTags, TImmutableDBKeys>>;

View File

@@ -17,10 +17,5 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
export type TSecretApprovalRequestsReviewersInsert = Omit<
z.input<typeof SecretApprovalRequestsReviewersSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalRequestsReviewersUpdate = Partial<
Omit<z.input<typeof SecretApprovalRequestsReviewersSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalRequestsReviewersInsert = Omit<TSecretApprovalRequestsReviewers, TImmutableDBKeys>;
export type TSecretApprovalRequestsReviewersUpdate = Partial<Omit<TSecretApprovalRequestsReviewers, TImmutableDBKeys>>;

View File

@@ -35,10 +35,5 @@ export const SecretApprovalRequestsSecretsSchema = z.object({
});
export type TSecretApprovalRequestsSecrets = z.infer<typeof SecretApprovalRequestsSecretsSchema>;
export type TSecretApprovalRequestsSecretsInsert = Omit<
z.input<typeof SecretApprovalRequestsSecretsSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalRequestsSecretsUpdate = Partial<
Omit<z.input<typeof SecretApprovalRequestsSecretsSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalRequestsSecretsInsert = Omit<TSecretApprovalRequestsSecrets, TImmutableDBKeys>;
export type TSecretApprovalRequestsSecretsUpdate = Partial<Omit<TSecretApprovalRequestsSecrets, TImmutableDBKeys>>;

View File

@@ -22,7 +22,5 @@ export const SecretApprovalRequestsSchema = z.object({
});
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;
export type TSecretApprovalRequestsInsert = Omit<z.input<typeof SecretApprovalRequestsSchema>, TImmutableDBKeys>;
export type TSecretApprovalRequestsUpdate = Partial<
Omit<z.input<typeof SecretApprovalRequestsSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalRequestsInsert = Omit<TSecretApprovalRequests, TImmutableDBKeys>;
export type TSecretApprovalRequestsUpdate = Partial<Omit<TSecretApprovalRequests, TImmutableDBKeys>>;

View File

@@ -20,5 +20,5 @@ export const SecretBlindIndexesSchema = z.object({
});
export type TSecretBlindIndexes = z.infer<typeof SecretBlindIndexesSchema>;
export type TSecretBlindIndexesInsert = Omit<z.input<typeof SecretBlindIndexesSchema>, TImmutableDBKeys>;
export type TSecretBlindIndexesUpdate = Partial<Omit<z.input<typeof SecretBlindIndexesSchema>, TImmutableDBKeys>>;
export type TSecretBlindIndexesInsert = Omit<TSecretBlindIndexes, TImmutableDBKeys>;
export type TSecretBlindIndexesUpdate = Partial<Omit<TSecretBlindIndexes, TImmutableDBKeys>>;

View File

@@ -18,5 +18,5 @@ export const SecretFolderVersionsSchema = z.object({
});
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;
export type TSecretFolderVersionsInsert = Omit<z.input<typeof SecretFolderVersionsSchema>, TImmutableDBKeys>;
export type TSecretFolderVersionsUpdate = Partial<Omit<z.input<typeof SecretFolderVersionsSchema>, TImmutableDBKeys>>;
export type TSecretFolderVersionsInsert = Omit<TSecretFolderVersions, TImmutableDBKeys>;
export type TSecretFolderVersionsUpdate = Partial<Omit<TSecretFolderVersions, TImmutableDBKeys>>;

View File

@@ -18,5 +18,5 @@ export const SecretFoldersSchema = z.object({
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
export type TSecretFoldersInsert = Omit<z.input<typeof SecretFoldersSchema>, TImmutableDBKeys>;
export type TSecretFoldersUpdate = Partial<Omit<z.input<typeof SecretFoldersSchema>, TImmutableDBKeys>>;
export type TSecretFoldersInsert = Omit<TSecretFolders, TImmutableDBKeys>;
export type TSecretFoldersUpdate = Partial<Omit<TSecretFolders, TImmutableDBKeys>>;

View File

@@ -19,5 +19,5 @@ export const SecretImportsSchema = z.object({
});
export type TSecretImports = z.infer<typeof SecretImportsSchema>;
export type TSecretImportsInsert = Omit<z.input<typeof SecretImportsSchema>, TImmutableDBKeys>;
export type TSecretImportsUpdate = Partial<Omit<z.input<typeof SecretImportsSchema>, TImmutableDBKeys>>;
export type TSecretImportsInsert = Omit<TSecretImports, TImmutableDBKeys>;
export type TSecretImportsUpdate = Partial<Omit<TSecretImports, TImmutableDBKeys>>;

View File

@@ -15,5 +15,5 @@ export const SecretRotationOutputsSchema = z.object({
});
export type TSecretRotationOutputs = z.infer<typeof SecretRotationOutputsSchema>;
export type TSecretRotationOutputsInsert = Omit<z.input<typeof SecretRotationOutputsSchema>, TImmutableDBKeys>;
export type TSecretRotationOutputsUpdate = Partial<Omit<z.input<typeof SecretRotationOutputsSchema>, TImmutableDBKeys>>;
export type TSecretRotationOutputsInsert = Omit<TSecretRotationOutputs, TImmutableDBKeys>;
export type TSecretRotationOutputsUpdate = Partial<Omit<TSecretRotationOutputs, TImmutableDBKeys>>;

View File

@@ -26,5 +26,5 @@ export const SecretRotationsSchema = z.object({
});
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
export type TSecretRotationsInsert = Omit<z.input<typeof SecretRotationsSchema>, TImmutableDBKeys>;
export type TSecretRotationsUpdate = Partial<Omit<z.input<typeof SecretRotationsSchema>, TImmutableDBKeys>>;
export type TSecretRotationsInsert = Omit<TSecretRotations, TImmutableDBKeys>;
export type TSecretRotationsUpdate = Partial<Omit<TSecretRotations, TImmutableDBKeys>>;

View File

@@ -42,7 +42,5 @@ export const SecretScanningGitRisksSchema = z.object({
});
export type TSecretScanningGitRisks = z.infer<typeof SecretScanningGitRisksSchema>;
export type TSecretScanningGitRisksInsert = Omit<z.input<typeof SecretScanningGitRisksSchema>, TImmutableDBKeys>;
export type TSecretScanningGitRisksUpdate = Partial<
Omit<z.input<typeof SecretScanningGitRisksSchema>, TImmutableDBKeys>
>;
export type TSecretScanningGitRisksInsert = Omit<TSecretScanningGitRisks, TImmutableDBKeys>;
export type TSecretScanningGitRisksUpdate = Partial<Omit<TSecretScanningGitRisks, TImmutableDBKeys>>;

View File

@@ -17,5 +17,5 @@ export const SecretSnapshotFoldersSchema = z.object({
});
export type TSecretSnapshotFolders = z.infer<typeof SecretSnapshotFoldersSchema>;
export type TSecretSnapshotFoldersInsert = Omit<z.input<typeof SecretSnapshotFoldersSchema>, TImmutableDBKeys>;
export type TSecretSnapshotFoldersUpdate = Partial<Omit<z.input<typeof SecretSnapshotFoldersSchema>, TImmutableDBKeys>>;
export type TSecretSnapshotFoldersInsert = Omit<TSecretSnapshotFolders, TImmutableDBKeys>;
export type TSecretSnapshotFoldersUpdate = Partial<Omit<TSecretSnapshotFolders, TImmutableDBKeys>>;

View File

@@ -17,5 +17,5 @@ export const SecretSnapshotSecretsSchema = z.object({
});
export type TSecretSnapshotSecrets = z.infer<typeof SecretSnapshotSecretsSchema>;
export type TSecretSnapshotSecretsInsert = Omit<z.input<typeof SecretSnapshotSecretsSchema>, TImmutableDBKeys>;
export type TSecretSnapshotSecretsUpdate = Partial<Omit<z.input<typeof SecretSnapshotSecretsSchema>, TImmutableDBKeys>>;
export type TSecretSnapshotSecretsInsert = Omit<TSecretSnapshotSecrets, TImmutableDBKeys>;
export type TSecretSnapshotSecretsUpdate = Partial<Omit<TSecretSnapshotSecrets, TImmutableDBKeys>>;

View File

@@ -17,5 +17,5 @@ export const SecretSnapshotsSchema = z.object({
});
export type TSecretSnapshots = z.infer<typeof SecretSnapshotsSchema>;
export type TSecretSnapshotsInsert = Omit<z.input<typeof SecretSnapshotsSchema>, TImmutableDBKeys>;
export type TSecretSnapshotsUpdate = Partial<Omit<z.input<typeof SecretSnapshotsSchema>, TImmutableDBKeys>>;
export type TSecretSnapshotsInsert = Omit<TSecretSnapshots, TImmutableDBKeys>;
export type TSecretSnapshotsUpdate = Partial<Omit<TSecretSnapshots, TImmutableDBKeys>>;

View File

@@ -14,5 +14,5 @@ export const SecretTagJunctionSchema = z.object({
});
export type TSecretTagJunction = z.infer<typeof SecretTagJunctionSchema>;
export type TSecretTagJunctionInsert = Omit<z.input<typeof SecretTagJunctionSchema>, TImmutableDBKeys>;
export type TSecretTagJunctionUpdate = Partial<Omit<z.input<typeof SecretTagJunctionSchema>, TImmutableDBKeys>>;
export type TSecretTagJunctionInsert = Omit<TSecretTagJunction, TImmutableDBKeys>;
export type TSecretTagJunctionUpdate = Partial<Omit<TSecretTagJunction, TImmutableDBKeys>>;

View File

@@ -19,5 +19,5 @@ export const SecretTagsSchema = z.object({
});
export type TSecretTags = z.infer<typeof SecretTagsSchema>;
export type TSecretTagsInsert = Omit<z.input<typeof SecretTagsSchema>, TImmutableDBKeys>;
export type TSecretTagsUpdate = Partial<Omit<z.input<typeof SecretTagsSchema>, TImmutableDBKeys>>;
export type TSecretTagsInsert = Omit<TSecretTags, TImmutableDBKeys>;
export type TSecretTagsUpdate = Partial<Omit<TSecretTags, TImmutableDBKeys>>;

View File

@@ -14,7 +14,5 @@ export const SecretVersionTagJunctionSchema = z.object({
});
export type TSecretVersionTagJunction = z.infer<typeof SecretVersionTagJunctionSchema>;
export type TSecretVersionTagJunctionInsert = Omit<z.input<typeof SecretVersionTagJunctionSchema>, TImmutableDBKeys>;
export type TSecretVersionTagJunctionUpdate = Partial<
Omit<z.input<typeof SecretVersionTagJunctionSchema>, TImmutableDBKeys>
>;
export type TSecretVersionTagJunctionInsert = Omit<TSecretVersionTagJunction, TImmutableDBKeys>;
export type TSecretVersionTagJunctionUpdate = Partial<Omit<TSecretVersionTagJunction, TImmutableDBKeys>>;

View File

@@ -36,5 +36,5 @@ export const SecretVersionsSchema = z.object({
});
export type TSecretVersions = z.infer<typeof SecretVersionsSchema>;
export type TSecretVersionsInsert = Omit<z.input<typeof SecretVersionsSchema>, TImmutableDBKeys>;
export type TSecretVersionsUpdate = Partial<Omit<z.input<typeof SecretVersionsSchema>, TImmutableDBKeys>>;
export type TSecretVersionsInsert = Omit<TSecretVersions, TImmutableDBKeys>;
export type TSecretVersionsUpdate = Partial<Omit<TSecretVersions, TImmutableDBKeys>>;

View File

@@ -34,5 +34,5 @@ export const SecretsSchema = z.object({
});
export type TSecrets = z.infer<typeof SecretsSchema>;
export type TSecretsInsert = Omit<z.input<typeof SecretsSchema>, TImmutableDBKeys>;
export type TSecretsUpdate = Partial<Omit<z.input<typeof SecretsSchema>, TImmutableDBKeys>>;
export type TSecretsInsert = Omit<TSecrets, TImmutableDBKeys>;
export type TSecretsUpdate = Partial<Omit<TSecrets, TImmutableDBKeys>>;

View File

@@ -25,5 +25,5 @@ export const ServiceTokensSchema = z.object({
});
export type TServiceTokens = z.infer<typeof ServiceTokensSchema>;
export type TServiceTokensInsert = Omit<z.input<typeof ServiceTokensSchema>, TImmutableDBKeys>;
export type TServiceTokensUpdate = Partial<Omit<z.input<typeof ServiceTokensSchema>, TImmutableDBKeys>>;
export type TServiceTokensInsert = Omit<TServiceTokens, TImmutableDBKeys>;
export type TServiceTokensUpdate = Partial<Omit<TServiceTokens, TImmutableDBKeys>>;

View File

@@ -13,10 +13,9 @@ export const SuperAdminSchema = z.object({
allowSignUp: z.boolean().default(true).nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
allowedSignUpDomain: z.string().nullable().optional(),
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000")
allowedSignUpDomain: z.string().nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
export type TSuperAdminInsert = Omit<z.input<typeof SuperAdminSchema>, TImmutableDBKeys>;
export type TSuperAdminUpdate = Partial<Omit<z.input<typeof SuperAdminSchema>, TImmutableDBKeys>>;
export type TSuperAdminInsert = Omit<TSuperAdmin, TImmutableDBKeys>;
export type TSuperAdminUpdate = Partial<Omit<TSuperAdmin, TImmutableDBKeys>>;

View File

@@ -20,5 +20,5 @@ export const TrustedIpsSchema = z.object({
});
export type TTrustedIps = z.infer<typeof TrustedIpsSchema>;
export type TTrustedIpsInsert = Omit<z.input<typeof TrustedIpsSchema>, TImmutableDBKeys>;
export type TTrustedIpsUpdate = Partial<Omit<z.input<typeof TrustedIpsSchema>, TImmutableDBKeys>>;
export type TTrustedIpsInsert = Omit<TTrustedIps, TImmutableDBKeys>;
export type TTrustedIpsUpdate = Partial<Omit<TTrustedIps, TImmutableDBKeys>>;

View File

@@ -16,5 +16,5 @@ export const UserActionsSchema = z.object({
});
export type TUserActions = z.infer<typeof UserActionsSchema>;
export type TUserActionsInsert = Omit<z.input<typeof UserActionsSchema>, TImmutableDBKeys>;
export type TUserActionsUpdate = Partial<Omit<z.input<typeof UserActionsSchema>, TImmutableDBKeys>>;
export type TUserActionsInsert = Omit<TUserActions, TImmutableDBKeys>;
export type TUserActionsUpdate = Partial<Omit<TUserActions, TImmutableDBKeys>>;

View File

@@ -25,5 +25,5 @@ export const UserEncryptionKeysSchema = z.object({
});
export type TUserEncryptionKeys = z.infer<typeof UserEncryptionKeysSchema>;
export type TUserEncryptionKeysInsert = Omit<z.input<typeof UserEncryptionKeysSchema>, TImmutableDBKeys>;
export type TUserEncryptionKeysUpdate = Partial<Omit<z.input<typeof UserEncryptionKeysSchema>, TImmutableDBKeys>>;
export type TUserEncryptionKeysInsert = Omit<TUserEncryptionKeys, TImmutableDBKeys>;
export type TUserEncryptionKeysUpdate = Partial<Omit<TUserEncryptionKeys, TImmutableDBKeys>>;

View File

@@ -24,5 +24,5 @@ export const UsersSchema = z.object({
});
export type TUsers = z.infer<typeof UsersSchema>;
export type TUsersInsert = Omit<z.input<typeof UsersSchema>, TImmutableDBKeys>;
export type TUsersUpdate = Partial<Omit<z.input<typeof UsersSchema>, TImmutableDBKeys>>;
export type TUsersInsert = Omit<TUsers, TImmutableDBKeys>;
export type TUsersUpdate = Partial<Omit<TUsers, TImmutableDBKeys>>;

View File

@@ -25,5 +25,5 @@ export const WebhooksSchema = z.object({
});
export type TWebhooks = z.infer<typeof WebhooksSchema>;
export type TWebhooksInsert = Omit<z.input<typeof WebhooksSchema>, TImmutableDBKeys>;
export type TWebhooksUpdate = Partial<Omit<z.input<typeof WebhooksSchema>, TImmutableDBKeys>>;
export type TWebhooksInsert = Omit<TWebhooks, TImmutableDBKeys>;
export type TWebhooksUpdate = Partial<Omit<TWebhooks, TImmutableDBKeys>>;

View File

@@ -505,9 +505,6 @@ export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }:
get isValidLicense() {
return isValidLicense;
},
getInstanceType() {
return instanceType;
},
getPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,

View File

@@ -240,7 +240,7 @@ export const secretRotationQueueFactory = ({
);
});
await telemetryService.sendPostHogEvents({
telemetryService.sendPostHogEvents({
event: PostHogEventTypes.SecretRotated,
distinctId: "",
properties: {

View File

@@ -158,7 +158,7 @@ export const secretScanningQueueFactory = ({
});
}
await telemetryService.sendPostHogEvents({
telemetryService.sendPostHogEvents({
event: PostHogEventTypes.SecretScannerPush,
distinctId: repository.fullName,
properties: {
@@ -228,7 +228,7 @@ export const secretScanningQueueFactory = ({
});
}
await telemetryService.sendPostHogEvents({
telemetryService.sendPostHogEvents({
event: PostHogEventTypes.SecretScannerFull,
distinctId: repository.fullName,
properties: {

View File

@@ -1,20 +0,0 @@
import { Redis } from "ioredis";
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
export const keyStoreFactory = (redisUrl: string) => {
const redis = new Redis(redisUrl);
const setItem = async (key: string, value: string | number | Buffer) => redis.set(key, value);
const getItem = async (key: string) => redis.get(key);
const setItemWithExpiry = async (key: string, exp: number | string, value: string | number | Buffer) =>
redis.setex(key, exp, value);
const deleteItem = async (key: string) => redis.del(key);
const incrementBy = async (key: string, value: number) => redis.incrby(key, value);
return { setItem, getItem, setItemWithExpiry, deleteItem, incrementBy };
};

View File

@@ -94,17 +94,14 @@ const envSchema = z
SECRET_SCANNING_WEBHOOK_SECRET: zpStr(z.string().optional()),
SECRET_SCANNING_GIT_APP_ID: zpStr(z.string().optional()),
SECRET_SCANNING_PRIVATE_KEY: zpStr(z.string().optional()),
// LICENSE
// LICENCE
LICENSE_SERVER_URL: zpStr(z.string().optional().default("https://portal.infisical.com")),
LICENSE_SERVER_KEY: zpStr(z.string().optional()),
LICENSE_KEY: zpStr(z.string().optional()),
// GENERIC
STANDALONE_MODE: z
.enum(["true", "false"])
.transform((val) => val === "true")
.optional(),
INFISICAL_CLOUD: zodStrBool.default("false")
.optional()
})
.transform((data) => ({
...data,

View File

@@ -1,7 +1,6 @@
import dotenv from "dotenv";
import { initDbConnection } from "./db";
import { keyStoreFactory } from "./keystore/keystore";
import { formatSmtpConfig, initEnvConfig } from "./lib/config/env";
import { initLogger } from "./lib/logger";
import { queueServiceFactory } from "./queue";
@@ -20,9 +19,8 @@ const run = async () => {
const smtp = smtpServiceFactory(formatSmtpConfig());
const queue = queueServiceFactory(appCfg.REDIS_URL);
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
const server = await main({ db, smtp, logger, queue, keyStore });
const server = await main({ db, smtp, logger, queue });
const bootstrap = await bootstrapCheck({ db });
// eslint-disable-next-line
process.on("SIGINT", async () => {

View File

@@ -13,7 +13,6 @@ export enum QueueName {
SecretReminder = "secret-reminder",
AuditLog = "audit-log",
AuditLogPrune = "audit-log-prune",
TelemetryInstanceStats = "telemtry-self-hosted-stats",
IntegrationSync = "sync-integrations",
SecretWebhook = "secret-webhook",
SecretFullRepoScan = "secret-full-repo-scan",
@@ -27,7 +26,6 @@ export enum QueueJobs {
AuditLog = "audit-log-job",
AuditLogPrune = "audit-log-prune-job",
SecWebhook = "secret-webhook-trigger",
TelemetryInstanceStats = "telemetry-self-hosted-stats",
IntegrationSync = "secret-integration-pull",
SecretScan = "secret-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost-job"
@@ -69,6 +67,7 @@ export type TQueueJobTypes = {
payload: TScanFullRepoEventPayload;
};
[QueueName.SecretPushEventScan]: { name: QueueJobs.SecretScan; payload: TScanPushEventPayload };
[QueueName.UpgradeProjectToGhost]: {
name: QueueJobs.UpgradeProjectToGhost;
payload: {
@@ -82,10 +81,6 @@ export type TQueueJobTypes = {
};
};
};
[QueueName.TelemetryInstanceStats]: {
name: QueueJobs.TelemetryInstanceStats;
payload: undefined;
};
};
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;

View File

@@ -14,7 +14,6 @@ import fasitfy from "fastify";
import { Knex } from "knex";
import { Logger } from "pino";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { TQueueServiceFactory } from "@app/queue";
import { TSmtpService } from "@app/services/smtp/smtp-service";
@@ -32,11 +31,10 @@ type TMain = {
smtp: TSmtpService;
logger?: Logger;
queue: TQueueServiceFactory;
keyStore: TKeyStoreFactory;
};
// Run the server!
export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
export const main = async ({ db, smtp, logger, queue }: TMain) => {
const appCfg = getConfig();
const server = fasitfy({
logger: appCfg.NODE_ENV === "test" ? false : logger,
@@ -72,7 +70,7 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
}
await server.register(helmet, { contentSecurityPolicy: false });
await server.register(registerRoutes, { smtp, queue, db, keyStore });
await server.register(registerRoutes, { smtp, queue, db });
if (appCfg.isProductionMode) {
await server.register(registerExternalNextjs, {

View File

@@ -34,7 +34,6 @@ import { snapshotFolderDALFactory } from "@app/ee/services/secret-snapshot/snaps
import { snapshotSecretDALFactory } from "@app/ee/services/secret-snapshot/snapshot-secret-dal";
import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal";
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { TQueueServiceFactory } from "@app/queue";
import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal";
@@ -97,8 +96,6 @@ import { serviceTokenServiceFactory } from "@app/services/service-token/service-
import { TSmtpService } from "@app/services/smtp/smtp-service";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { userDALFactory } from "@app/services/user/user-dal";
import { userServiceFactory } from "@app/services/user/user-service";
@@ -115,12 +112,7 @@ import { registerV3Routes } from "./v3";
export const registerRoutes = async (
server: FastifyZodProvider,
{
db,
smtp: smtpService,
queue: queueService,
keyStore
}: { db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory }
{ db, smtp: smtpService, queue: queueService }: { db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory }
) => {
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
@@ -167,7 +159,6 @@ export const registerRoutes = async (
const auditLogDAL = auditLogDALFactory(db);
const trustedIpDAL = trustedIpDALFactory(db);
const scimDAL = scimDALFactory(db);
const telemetryDAL = telemetryDALFactory(db);
// ee db layer ops
const permissionDAL = permissionDALFactory(db);
@@ -235,16 +226,7 @@ export const registerRoutes = async (
smtpService
});
const telemetryService = telemetryServiceFactory({
keyStore,
licenseService
});
const telemetryQueue = telemetryQueueServiceFactory({
keyStore,
telemetryDAL,
queueService
});
const telemetryService = telemetryServiceFactory();
const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL });
const userService = userServiceFactory({ userDAL });
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService });
@@ -281,8 +263,7 @@ export const registerRoutes = async (
userDAL,
authService: loginService,
serverCfgDAL: superAdminDAL,
orgService,
keyStore
orgService
});
const apiKeyService = apiKeyServiceFactory({ apiKeyDAL, userDAL });
@@ -510,13 +491,9 @@ export const registerRoutes = async (
});
await superAdminService.initServerCfg();
//
await auditLogQueue.startAuditLogPruneJob();
// setup the communication with license key server
await licenseService.init();
await auditLogQueue.startAuditLogPruneJob();
await telemetryQueue.startTelemetryCheck();
// inject all services
server.decorate<FastifyZodProvider["services"]>("services", {
login: loginService,

View File

@@ -16,7 +16,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
schema: {
response: {
200: z.object({
config: SuperAdminSchema.omit({ createdAt: true, updatedAt: true })
config: SuperAdminSchema
})
}
},
@@ -90,7 +90,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
userAgent: req.headers["user-agent"] || ""
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.AdminInit,
distinctId: user.user.email,
properties: {

View File

@@ -51,7 +51,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.MachineIdentityCreated,
distinctId: getTelemetryDistinctId(req),
properties: {

View File

@@ -82,7 +82,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IntegrationCreated,
distinctId: getTelemetryDistinctId(req),
properties: {

View File

@@ -32,7 +32,7 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.UserOrgInvitation,
distinctId: getTelemetryDistinctId(req),
properties: {

View File

@@ -154,7 +154,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
slug: req.body.slug
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.ProjectCreated,
distinctId: getTelemetryDistinctId(req),
properties: {

View File

@@ -95,7 +95,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretPulled,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -185,7 +185,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretPulled,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -261,7 +261,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretCreated,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -336,7 +336,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretUpdated,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -406,7 +406,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretDeleted,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -512,7 +512,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
(req.headers["user-agent"] !== "k8-operator" || shouldRecordK8Event);
const approximateNumberTotalSecrets = secrets.length * 20;
if (shouldCapture) {
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretPulled,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -589,7 +589,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretPulled,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -752,7 +752,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretCreated,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -934,7 +934,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretUpdated,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -1052,7 +1052,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretDeleted,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -1172,7 +1172,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretCreated,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -1292,7 +1292,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretUpdated,
distinctId: getTelemetryDistinctId(req),
properties: {
@@ -1400,7 +1400,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretDeleted,
distinctId: getTelemetryDistinctId(req),
properties: {

View File

@@ -238,8 +238,6 @@ export const projectMembershipServiceFactory = ({
if (orgMembers.length !== emails.length) throw new BadRequestError({ message: "Some users are not part of org" });
if (!orgMembers.length) return [];
const existingMembers = await projectMembershipDAL.find({
projectId,
$in: { userId: orgMembers.map(({ user }) => user.id).filter(Boolean) }

View File

@@ -19,7 +19,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
const { permission } = await permissionService.getProjectPermission(actor, actorId, projectId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Tags);
const existingTag = await secretTagDAL.findOne({ slug, projectId });
const existingTag = await secretTagDAL.findOne({ slug });
if (existingTag) throw new BadRequestError({ message: "Tag already exist" });
const newTag = await secretTagDAL.create({

View File

@@ -7,7 +7,7 @@ import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/
import { getConfig } from "@app/lib/config/env";
import { buildSecretBlindIndexFromName, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { groupBy, pick, unique } from "@app/lib/fn";
import { groupBy, pick } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "../auth/auth-type";
@@ -202,13 +202,12 @@ export const secretServiceFactory = ({
return deletedSecrets;
};
/**
* Checks and handles secrets using a blind index method.
* The function generates mappings between secret names and their blind indexes, validates user IDs for personal secrets, and retrieves secrets from the database based on their blind indexes.
* For new secrets (isNew = true), it ensures they don't already exist in the database.
* For existing secrets, it verifies their presence in the database.
* If discrepancies are found, errors are thrown. The function returns mappings and the fetched secrets.
*/
// this is a utility function for secret modification
// this will check given secret name blind index exist or not
// if its a created secret set isNew to true
// thus if these blindindex exist it will throw an error
// vice versa when u need to check for updated secret
// this will also return the blind index grouped by secretName
const fnSecretBlindIndexCheck = async ({
inputSecrets,
folderId,
@@ -243,18 +242,10 @@ export const secretServiceFactory = ({
if (isNew) {
if (secrets.length) throw new BadRequestError({ message: "Secret already exist" });
} else {
const secretKeysInDB = unique(secrets, (el) => el.secretBlindIndex as string).map(
(el) => blindIndex2KeyName[el.secretBlindIndex as string]
);
const hasUnknownSecretsProvided = secretKeysInDB.length !== inputSecrets.length;
if (hasUnknownSecretsProvided) {
const keysMissingInDB = Object.keys(keyName2BlindIndex).filter((key) => !secretKeysInDB.includes(key));
throw new BadRequestError({
message: `Secret not found: blind index ${keysMissingInDB.join(",")}`
});
}
}
} else if (secrets.length !== inputSecrets.length)
throw new BadRequestError({
message: `Secret not found: blind index ${JSON.stringify(keyName2BlindIndex)}`
});
return { blindIndex2KeyName, keyName2BlindIndex, secrets };
};

View File

@@ -1,5 +1,4 @@
import { TSuperAdmin, TSuperAdminUpdate } from "@app/db/schemas";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
@@ -15,7 +14,6 @@ type TSuperAdminServiceFactoryDep = {
userDAL: TUserDALFactory;
authService: Pick<TAuthLoginFactory, "generateUserTokens">;
orgService: Pick<TOrgServiceFactory, "createOrganization">;
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry" | "deleteItem">;
};
export type TSuperAdminServiceFactory = ReturnType<typeof superAdminServiceFactory>;
@@ -23,53 +21,26 @@ export type TSuperAdminServiceFactory = ReturnType<typeof superAdminServiceFacto
// eslint-disable-next-line
export let getServerCfg: () => Promise<TSuperAdmin>;
const ADMIN_CONFIG_KEY = "infisical-admin-cfg";
const ADMIN_CONFIG_KEY_EXP = 60; // 60s
const ADMIN_CONFIG_DB_UUID = "00000000-0000-0000-0000-000000000000";
export const superAdminServiceFactory = ({
serverCfgDAL,
userDAL,
authService,
orgService,
keyStore
orgService
}: TSuperAdminServiceFactoryDep) => {
const initServerCfg = async () => {
// TODO(akhilmhdh): bad pattern time less change this later to me itself
getServerCfg = async () => {
const config = await keyStore.getItem(ADMIN_CONFIG_KEY);
// missing in keystore means fetch from db
if (!config) {
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
if (serverCfg) {
await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(serverCfg)); // insert it back to keystore
}
return serverCfg;
}
getServerCfg = () => serverCfgDAL.findOne({});
const keyStoreServerCfg = JSON.parse(config) as TSuperAdmin;
return {
...keyStoreServerCfg,
// this is to allow admin router to work
createdAt: new Date(keyStoreServerCfg.createdAt),
updatedAt: new Date(keyStoreServerCfg.updatedAt)
};
};
// reset on initialized
await keyStore.deleteItem(ADMIN_CONFIG_KEY);
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
const serverCfg = await serverCfgDAL.findOne({});
if (serverCfg) return;
// @ts-expect-error id is kept as fixed for idempotence and to avoid race condition
const newCfg = await serverCfgDAL.create({ initialized: false, allowSignUp: true, id: ADMIN_CONFIG_DB_UUID });
const newCfg = await serverCfgDAL.create({ initialized: false, allowSignUp: true });
return newCfg;
};
const updateServerCfg = async (data: TSuperAdminUpdate) => {
const updatedServerCfg = await serverCfgDAL.updateById(ADMIN_CONFIG_DB_UUID, data);
await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(updatedServerCfg));
return updatedServerCfg;
const serverCfg = await getServerCfg();
const cfg = await serverCfgDAL.updateById(serverCfg.id, data);
return cfg;
};
const adminSignUp = async ({

View File

@@ -1,39 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
export type TTelemetryDALFactory = ReturnType<typeof telemetryDALFactory>;
export const telemetryDALFactory = (db: TDbClient) => {
const getTelemetryInstanceStats = async () => {
try {
const userCount = (await db(TableName.Users).where({ isGhost: false }).count().first())?.count as string;
const users = parseInt(userCount || "0", 10);
const identityCount = (await db(TableName.Identity).count().first())?.count as string;
const identities = parseInt(identityCount || "0", 10);
const projectCount = (await db(TableName.Project).count().first())?.count as string;
const projects = parseInt(projectCount || "0", 10);
const secretCount = (await db(TableName.Secret).count().first())?.count as string;
const secrets = parseInt(secretCount || "0", 10);
const organizationNames = await db(TableName.Organization).select("name");
const organizations = organizationNames.length;
return {
users,
identities,
projects,
secrets,
organizations,
organizationNames: organizationNames.map(({ name }) => name)
};
} catch (error) {
throw new DatabaseError({ error, name: "TelemtryInstanceStats" });
}
};
return { getTelemetryInstanceStats };
};

View File

@@ -1,78 +0,0 @@
import { PostHog } from "posthog-node";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { getServerCfg } from "../super-admin/super-admin-service";
import { TTelemetryDALFactory } from "./telemetry-dal";
import { TELEMETRY_SECRET_OPERATIONS_KEY, TELEMETRY_SECRET_PROCESSED_KEY } from "./telemetry-service";
import { PostHogEventTypes } from "./telemetry-types";
type TTelemetryQueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
keyStore: Pick<TKeyStoreFactory, "getItem" | "deleteItem">;
telemetryDAL: TTelemetryDALFactory;
};
export type TTelemetryQueueServiceFactory = ReturnType<typeof telemetryQueueServiceFactory>;
export const telemetryQueueServiceFactory = ({
queueService,
keyStore,
telemetryDAL
}: TTelemetryQueueServiceFactoryDep) => {
const appCfg = getConfig();
const postHog =
appCfg.isProductionMode && appCfg.TELEMETRY_ENABLED
? new PostHog(appCfg.POSTHOG_PROJECT_API_KEY, { host: appCfg.POSTHOG_HOST, flushAt: 1, flushInterval: 0 })
: undefined;
queueService.start(QueueName.TelemetryInstanceStats, async () => {
const { instanceId } = await getServerCfg();
const telemtryStats = await telemetryDAL.getTelemetryInstanceStats();
// parse the redis values into integer
const numberOfSecretOperationsMade = parseInt((await keyStore.getItem(TELEMETRY_SECRET_OPERATIONS_KEY)) || "0", 10);
const numberOfSecretProcessed = parseInt((await keyStore.getItem(TELEMETRY_SECRET_PROCESSED_KEY)) || "0", 10);
const stats = { ...telemtryStats, numberOfSecretProcessed, numberOfSecretOperationsMade };
// send to postHog
postHog?.capture({
event: PostHogEventTypes.TelemetryInstanceStats,
distinctId: instanceId,
properties: stats
});
// reset the stats
await keyStore.deleteItem(TELEMETRY_SECRET_PROCESSED_KEY);
await keyStore.deleteItem(TELEMETRY_SECRET_OPERATIONS_KEY);
});
// every day at midnight a telemetry job executes on self hosted
// this sends some telemetry information like instance id secrets operated etc
const startTelemetryCheck = async () => {
// this is a fast way to check its cloud or not
if (appCfg.INFISICAL_CLOUD) return;
// clear previous job
await queueService.stopRepeatableJob(
QueueName.TelemetryInstanceStats,
QueueJobs.TelemetryInstanceStats,
{ pattern: "0 0 * * *", utc: true },
QueueName.TelemetryInstanceStats // just a job id
);
if (postHog) {
await queueService.queue(QueueName.TelemetryInstanceStats, QueueJobs.TelemetryInstanceStats, undefined, {
jobId: QueueName.TelemetryInstanceStats,
repeat: { pattern: "0 0 * * *", utc: true }
});
}
};
queueService.listen(QueueName.TelemetryInstanceStats, "failed", (err) => {
logger.error(err?.failedReason, `${QueueName.TelemetryInstanceStats}: failed`);
});
return {
startTelemetryCheck
};
};

View File

@@ -1,24 +1,15 @@
import { PostHog } from "posthog-node";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { InstanceType } from "@app/ee/services/license/license-types";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { logger } from "@app/lib/logger";
import { PostHogEventTypes, TPostHogEvent, TSecretModifiedEvent } from "./telemetry-types";
export const TELEMETRY_SECRET_PROCESSED_KEY = "telemetry-secret-processed";
export const TELEMETRY_SECRET_OPERATIONS_KEY = "telemetry-secret-operations";
import { TPostHogEvent } from "./telemetry-types";
export type TTelemetryServiceFactory = ReturnType<typeof telemetryServiceFactory>;
export type TTelemetryServiceFactoryDep = {
keyStore: Pick<TKeyStoreFactory, "getItem" | "incrementBy">;
licenseService: Pick<TLicenseServiceFactory, "getInstanceType">;
};
export const telemetryServiceFactory = ({ keyStore, licenseService }: TTelemetryServiceFactoryDep) => {
// type TTelemetryServiceFactoryDep = {};
export const telemetryServiceFactory = () => {
const appCfg = getConfig();
if (appCfg.isProductionMode && !appCfg.TELEMETRY_ENABLED) {
@@ -30,9 +21,10 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme
`);
}
const postHog = appCfg.TELEMETRY_ENABLED
? new PostHog(appCfg.POSTHOG_PROJECT_API_KEY, { host: appCfg.POSTHOG_HOST })
: undefined;
const postHog =
appCfg.isProductionMode && appCfg.TELEMETRY_ENABLED
? new PostHog(appCfg.POSTHOG_PROJECT_API_KEY, { host: appCfg.POSTHOG_HOST })
: undefined;
// used for email marketting email sending purpose
const sendLoopsEvent = async (email: string, firstName?: string, lastName?: string) => {
@@ -59,33 +51,13 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme
}
};
const sendPostHogEvents = async (event: TPostHogEvent) => {
const sendPostHogEvents = (event: TPostHogEvent) => {
if (postHog) {
const instanceType = licenseService.getInstanceType();
// capture posthog only when its cloud or signup event happens in self hosted
if (instanceType === InstanceType.Cloud || event.event === PostHogEventTypes.UserSignedUp) {
postHog.capture({
event: event.event,
distinctId: event.distinctId,
properties: event.properties
});
return;
}
if (
[
PostHogEventTypes.SecretPulled,
PostHogEventTypes.SecretCreated,
PostHogEventTypes.SecretDeleted,
PostHogEventTypes.SecretUpdated
].includes(event.event)
) {
await keyStore.incrementBy(
TELEMETRY_SECRET_PROCESSED_KEY,
(event as TSecretModifiedEvent).properties.numberOfSecrets
);
await keyStore.incrementBy(TELEMETRY_SECRET_OPERATIONS_KEY, 1);
}
postHog.capture({
event: event.event,
distinctId: event.distinctId,
properties: event.properties
});
}
};

View File

@@ -12,8 +12,7 @@ export enum PostHogEventTypes {
ProjectCreated = "Project Created",
IntegrationCreated = "Integration Created",
MachineIdentityCreated = "Machine Identity Created",
UserOrgInvitation = "User Org Invitation",
TelemetryInstanceStats = "Self Hosted Instance Stats"
UserOrgInvitation = "User Org Invitation"
}
export type TSecretModifiedEvent = {
@@ -102,20 +101,6 @@ export type TUserOrgInvitedEvent = {
};
};
export type TTelemetryInstanceStatsEvent = {
event: PostHogEventTypes.TelemetryInstanceStats;
properties: {
users: number;
identities: number;
projects: number;
secrets: number;
organizations: number;
organizationNames: number;
numberOfSecretOperationsMade: number;
numberOfSecretProcessed: number;
};
};
export type TPostHogEvent = { distinctId: string } & (
| TSecretModifiedEvent
| TAdminInitEvent
@@ -125,5 +110,4 @@ export type TPostHogEvent = { distinctId: string } & (
| TMachineIdentityCreatedEvent
| TIntegrationCreatedEvent
| TProjectCreateEvent
| TTelemetryInstanceStatsEvent
);

View File

@@ -1,5 +1,5 @@
infisical:
address: "https://app.infisical.com/"
address: "http://localhost:8080"
auth:
type: "universal-auth"
config:
@@ -13,12 +13,3 @@ sinks:
templates:
- source-path: my-dot-ev-secret-template
destination-path: my-dot-env.env
config:
polling-interval: 60s
execute:
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
- source-path: my-dot-ev-secret-template1
destination-path: my-dot-env-1.env
config:
exec:
command: mkdir hello-world1

View File

@@ -490,7 +490,5 @@ func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Reques
return GetRawSecretsV3Response{}, fmt.Errorf("CallGetRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
}
getRawSecretsV3Response.ETag = response.Header().Get(("etag"))
return getRawSecretsV3Response, nil
}

View File

@@ -505,5 +505,4 @@ type GetRawSecretsV3Response struct {
SecretComment string `json:"secretComment"`
} `json:"secrets"`
Imports []any `json:"imports"`
ETag string
}

View File

@@ -5,15 +5,12 @@ package cmd
import (
"bytes"
"context"
"encoding/base64"
"fmt"
"io/ioutil"
"os"
"os/exec"
"os/signal"
"path"
"runtime"
"strings"
"sync"
"syscall"
@@ -74,56 +71,12 @@ type Template struct {
SourcePath string `yaml:"source-path"`
Base64TemplateContent string `yaml:"base64-template-content"`
DestinationPath string `yaml:"destination-path"`
Config struct { // Configurations for the template
PollingInterval string `yaml:"polling-interval"` // How often to poll for changes in the secret
Execute struct {
Command string `yaml:"command"` // Command to execute once the template has been rendered
Timeout int64 `yaml:"timeout"` // Timeout for the command
} `yaml:"execute"` // Command to execute once the template has been rendered
} `yaml:"config"`
}
func ReadFile(filePath string) ([]byte, error) {
return ioutil.ReadFile(filePath)
}
func ExecuteCommandWithTimeout(command string, timeout int64) error {
shell := [2]string{"sh", "-c"}
if runtime.GOOS == "windows" {
shell = [2]string{"cmd", "/C"}
} else {
currentShell := os.Getenv("SHELL")
if currentShell != "" {
shell[0] = currentShell
}
}
ctx := context.Background()
if timeout > 0 {
var cancel context.CancelFunc
ctx, cancel = context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second)
defer cancel()
}
cmd := exec.CommandContext(ctx, shell[0], shell[1], command)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
if exitError, ok := err.(*exec.ExitError); ok { // type assertion
if exitError.ProcessState.ExitCode() == -1 {
return fmt.Errorf("command timed out")
}
}
return err
} else {
return nil
}
}
func FileExists(filepath string) bool {
info, err := os.Stat(filepath)
if os.IsNotExist(err) {
@@ -217,24 +170,20 @@ func ParseAgentConfig(configFile []byte) (*Config, error) {
return config, nil
}
func secretTemplateFunction(accessToken string, existingEtag string, currentEtag *string) func(string, string, string) ([]models.SingleEnvironmentVariable, error) {
func secretTemplateFunction(accessToken string) func(string, string, string) ([]models.SingleEnvironmentVariable, error) {
return func(projectID, envSlug, secretPath string) ([]models.SingleEnvironmentVariable, error) {
res, err := util.GetPlainTextSecretsViaMachineIdentity(accessToken, projectID, envSlug, secretPath, false)
secrets, err := util.GetPlainTextSecretsViaMachineIdentity(accessToken, projectID, envSlug, secretPath, false)
if err != nil {
return nil, err
}
if existingEtag != res.Etag {
*currentEtag = res.Etag
}
return res.Secrets, nil
return secrets, nil
}
}
func ProcessTemplate(templatePath string, data interface{}, accessToken string, existingEtag string, currentEtag *string) (*bytes.Buffer, error) {
func ProcessTemplate(templatePath string, data interface{}, accessToken string) (*bytes.Buffer, error) {
// custom template function to fetch secrets from Infisical
secretFunction := secretTemplateFunction(accessToken, existingEtag, currentEtag)
secretFunction := secretTemplateFunction(accessToken)
funcs := template.FuncMap{
"secret": secretFunction,
}
@@ -254,7 +203,7 @@ func ProcessTemplate(templatePath string, data interface{}, accessToken string,
return &buf, nil
}
func ProcessBase64Template(encodedTemplate string, data interface{}, accessToken string, existingEtag string, currentEtag *string) (*bytes.Buffer, error) {
func ProcessBase64Template(encodedTemplate string, data interface{}, accessToken string) (*bytes.Buffer, error) {
// custom template function to fetch secrets from Infisical
decoded, err := base64.StdEncoding.DecodeString(encodedTemplate)
if err != nil {
@@ -263,7 +212,7 @@ func ProcessBase64Template(encodedTemplate string, data interface{}, accessToken
templateString := string(decoded)
secretFunction := secretTemplateFunction(accessToken, existingEtag, currentEtag) // TODO: Fix this
secretFunction := secretTemplateFunction(accessToken)
funcs := template.FuncMap{
"secret": secretFunction,
}
@@ -301,16 +250,7 @@ type TokenManager struct {
}
func NewTokenManager(fileDeposits []Sink, templates []Template, clientIdPath string, clientSecretPath string, newAccessTokenNotificationChan chan bool, removeClientSecretOnRead bool, exitAfterAuth bool) *TokenManager {
return &TokenManager{
filePaths: fileDeposits,
templates: templates,
clientIdPath: clientIdPath,
clientSecretPath: clientSecretPath,
newAccessTokenNotificationChan: newAccessTokenNotificationChan,
removeClientSecretOnRead: removeClientSecretOnRead,
exitAfterAuth: exitAfterAuth,
}
return &TokenManager{filePaths: fileDeposits, templates: templates, clientIdPath: clientIdPath, clientSecretPath: clientSecretPath, newAccessTokenNotificationChan: newAccessTokenNotificationChan, removeClientSecretOnRead: removeClientSecretOnRead, exitAfterAuth: exitAfterAuth}
}
func (tm *TokenManager) SetToken(token string, accessTokenTTL time.Duration, accessTokenMaxTTL time.Duration) {
@@ -488,80 +428,38 @@ func (tm *TokenManager) WriteTokenToFiles() {
}
}
func (tm *TokenManager) WriteTemplateToFile(bytes *bytes.Buffer, template *Template) {
if err := WriteBytesToFile(bytes, template.DestinationPath); err != nil {
log.Error().Msgf("template engine: unable to write secrets to path because %s. Will try again on next cycle", err)
return
}
log.Info().Msgf("template engine: secret template at path %s has been rendered and saved to path %s", template.SourcePath, template.DestinationPath)
}
func (tm *TokenManager) MonitorSecretChanges(secretTemplate Template, sigChan chan os.Signal) {
pollingInterval := time.Duration(5 * time.Minute)
if secretTemplate.Config.PollingInterval != "" {
interval, err := util.ConvertPollingIntervalToTime(secretTemplate.Config.PollingInterval)
if err != nil {
log.Error().Msgf("unable to convert polling interval to time because %v", err)
sigChan <- syscall.SIGINT
return
} else {
pollingInterval = interval
}
}
var existingEtag string
var currentEtag string
var firstRun = true
execTimeout := secretTemplate.Config.Execute.Timeout
execCommand := secretTemplate.Config.Execute.Command
func (tm *TokenManager) FetchSecrets() {
log.Info().Msgf("template engine started...")
for {
token := tm.GetToken()
if token != "" {
var processedTemplate *bytes.Buffer
var err error
if secretTemplate.SourcePath != "" {
processedTemplate, err = ProcessTemplate(secretTemplate.SourcePath, nil, token, existingEtag, &currentEtag)
} else {
processedTemplate, err = ProcessBase64Template(secretTemplate.Base64TemplateContent, nil, token, existingEtag, &currentEtag)
}
if err != nil {
log.Error().Msgf("unable to process template because %v", err)
} else {
if (existingEtag != currentEtag) || firstRun {
tm.WriteTemplateToFile(processedTemplate, &secretTemplate)
existingEtag = currentEtag
if !firstRun && execCommand != "" {
log.Info().Msgf("executing command: %s", execCommand)
err := ExecuteCommandWithTimeout(execCommand, execTimeout)
if err != nil {
log.Error().Msgf("unable to execute command because %v", err)
}
}
if firstRun {
firstRun = false
}
for _, secretTemplate := range tm.templates {
var processedTemplate *bytes.Buffer
var err error
if secretTemplate.SourcePath != "" {
processedTemplate, err = ProcessTemplate(secretTemplate.SourcePath, nil, token)
} else {
processedTemplate, err = ProcessBase64Template(secretTemplate.Base64TemplateContent, nil, token)
}
}
time.Sleep(pollingInterval)
} else {
// It fails to get the access token. So we will re-try in 3 seconds. We do this because if we don't, the user will have to wait for the next polling interval to get the first secret render.
time.Sleep(3 * time.Second)
}
if err != nil {
log.Error().Msgf("template engine: unable to render secrets because %s. Will try again on next cycle", err)
continue
}
if err := WriteBytesToFile(processedTemplate, secretTemplate.DestinationPath); err != nil {
log.Error().Msgf("template engine: unable to write secrets to path because %s. Will try again on next cycle", err)
continue
}
log.Info().Msgf("template engine: secret template at path %s has been rendered and saved to path %s", secretTemplate.SourcePath, secretTemplate.DestinationPath)
}
// fetch new secrets every 5 minutes (TODO: add PubSub in the future )
time.Sleep(5 * time.Minute)
}
}
}
@@ -646,11 +544,7 @@ var agentCmd = &cobra.Command{
tm := NewTokenManager(filePaths, agentConfig.Templates, configUniversalAuthType.ClientIDPath, configUniversalAuthType.ClientSecretPath, tokenRefreshNotifier, configUniversalAuthType.RemoveClientSecretOnRead, agentConfig.Infisical.ExitAfterAuth)
go tm.ManageTokenLifecycle()
for i, template := range agentConfig.Templates {
log.Info().Msgf("template engine started for template %v...", i+1)
go tm.MonitorSecretChanges(template, sigChan)
}
go tm.FetchSecrets()
for {
select {

View File

@@ -87,12 +87,16 @@ var exportCmd = &cobra.Command{
var output string
if shouldExpandSecrets {
secrets = util.ExpandSecrets(secrets, infisicalToken, "")
}
secrets = util.FilterSecretsByTag(secrets, tagSlugs)
output, err = formatEnvs(secrets, format)
if err != nil {
util.HandleError(err)
substitutions := util.ExpandSecrets(secrets, infisicalToken, "")
output, err = formatEnvs(substitutions, format)
if err != nil {
util.HandleError(err)
}
} else {
output, err = formatEnvs(secrets, format)
if err != nil {
util.HandleError(err)
}
}
fmt.Print(output)

View File

@@ -34,11 +34,6 @@ type SingleEnvironmentVariable struct {
Comment string `json:"comment"`
}
type PlaintextSecretResult struct {
Secrets []SingleEnvironmentVariable
Etag string
}
type SingleFolder struct {
ID string `json:"_id"`
Name string `json:"name"`

View File

@@ -1,41 +0,0 @@
package util
import (
"fmt"
"strconv"
"time"
)
// ConvertPollingIntervalToTime converts a string representation of a polling interval to a time.Duration
func ConvertPollingIntervalToTime(pollingInterval string) (time.Duration, error) {
length := len(pollingInterval)
if length < 2 {
return 0, fmt.Errorf("invalid format")
}
unit := pollingInterval[length-1:]
numberPart := pollingInterval[:length-1]
number, err := strconv.Atoi(numberPart)
if err != nil {
return 0, err
}
switch unit {
case "s":
if number < 60 {
return 0, fmt.Errorf("polling interval should be at least 60 seconds")
}
return time.Duration(number) * time.Second, nil
case "m":
return time.Duration(number) * time.Minute, nil
case "h":
return time.Duration(number) * time.Hour, nil
case "d":
return time.Duration(number) * 24 * time.Hour, nil
case "w":
return time.Duration(number) * 7 * 24 * time.Hour, nil
default:
return 0, fmt.Errorf("invalid time unit")
}
}

View File

@@ -152,7 +152,7 @@ func GetPlainTextSecretsViaJTW(JTWToken string, receiversPrivateKey string, work
return plainTextSecrets, nil
}
func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool) (models.PlaintextSecretResult, error) {
func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool) ([]models.SingleEnvironmentVariable, error) {
httpClient := resty.New()
httpClient.SetAuthToken(accessToken).
SetHeader("Accept", "application/json")
@@ -170,12 +170,12 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
rawSecrets, err := api.CallGetRawSecretsV3(httpClient, api.GetRawSecretsV3Request{WorkspaceId: workspaceId, SecretPath: secretsPath, Environment: environmentName})
if err != nil {
return models.PlaintextSecretResult{}, err
return nil, err
}
plainTextSecrets := []models.SingleEnvironmentVariable{}
if err != nil {
return models.PlaintextSecretResult{}, fmt.Errorf("unable to decrypt your secrets [err=%v]", err)
return nil, fmt.Errorf("unable to decrypt your secrets [err=%v]", err)
}
for _, secret := range rawSecrets.Secrets {
@@ -189,10 +189,7 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
// }
// }
return models.PlaintextSecretResult{
Secrets: plainTextSecrets,
Etag: rawSecrets.ETag,
}, nil
return plainTextSecrets, nil
}
func InjectImportedSecret(plainTextWorkspaceKey []byte, secrets []models.SingleEnvironmentVariable, importedSecrets []api.ImportedSecretV3) ([]models.SingleEnvironmentVariable, error) {
@@ -223,30 +220,6 @@ func InjectImportedSecret(plainTextWorkspaceKey []byte, secrets []models.SingleE
return secrets, nil
}
func FilterSecretsByTag(plainTextSecrets []models.SingleEnvironmentVariable, tagSlugs string) []models.SingleEnvironmentVariable {
if tagSlugs == "" {
return plainTextSecrets
}
tagSlugsMap := make(map[string]bool)
tagSlugsList := strings.Split(tagSlugs, ",")
for _, slug := range tagSlugsList {
tagSlugsMap[slug] = true
}
filteredSecrets := []models.SingleEnvironmentVariable{}
for _, secret := range plainTextSecrets {
for _, tag := range secret.Tags {
if tagSlugsMap[tag.Slug] {
filteredSecrets = append(filteredSecrets, secret)
break
}
}
}
return filteredSecrets
}
func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectConfigFilePath string) ([]models.SingleEnvironmentVariable, error) {
var infisicalToken string
if params.InfisicalToken == "" {

View File

@@ -86,7 +86,6 @@ services:
environment:
- NODE_ENV=development
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
- TELEMETRY_ENABLED=false
volumes:
- ./backend/src:/app/src

View File

@@ -52,7 +52,7 @@ services:
restart: always
env_file: .env
volumes:
- pg_data:/var/lib/postgresql/data
- pg_data:/data/db
networks:
- infisical
healthcheck:

View File

@@ -4,22 +4,7 @@ title: "Changelog"
The changelog below reflects new product developments and updates on a monthly basis.
## Feb 2024
- Added org-scoped authentication enforcement for SAML
- Added support for [SCIM](https://infisical.com/docs/documentation/platform/scim/overview) along with instructions for setting it up with [Okta](https://infisical.com/docs/documentation/platform/scim/okta), [Azure](https://infisical.com/docs/documentation/platform/scim/azure), and [JumpCloud](https://infisical.com/docs/documentation/platform/scim/jumpcloud).
- Pushed out project update for non-E2EE w/ new endpoints like for project creation and member invitation.
- Added API Integration testing for new backend.
- Added capability to create projects in Terraform.
- Added slug-based capabilities to both organizations and projects to gradually make the API more developer-friendly moving forward.
- Fixed + improved various analytics/telemetry-related items.
- Fixed various issues associated with the Python SDK: build during installation on Mac OS, Rust dependency.
- Updated self-hosting documentation to reflect [new backend](https://infisical.com/docs/self-hosting/overview).
- Released [Postgres-based Infisical helm chart](https://cloudsmith.io/~infisical/repos/helm-charts/packages/detail/helm/infisical-standalone/).
- Added checks to ensure that breaking API changes don't get released.
- Automated API reference documentation to be inline with latest releases of Infisical.
## Jan 2024
- Completed Postgres migration initiative with restructed Fastify-based backend.
## January 2024
- Reduced size of Infisical Node.js SDK by ≈90%.
- Added secret fallback support to all SDK's.
- Added Machine Identity support to [Terraform Provider](https://github.com/Infisical/terraform-provider-infisical).
@@ -27,21 +12,21 @@ The changelog below reflects new product developments and updates on a monthly b
- Added symmetric encryption support to all SDK's.
- Fixed secret reminders bug, where reminders were not being updated correctly.
## Dec 2023
## December 2023
- Released [(machine) identities](https://infisical.com/docs/documentation/platform/identities/overview) and [universal auth](https://infisical.com/docs/documentation/platform/identities/universal-auth) features.
- Created new cross-language SDKs for [Python](https://infisical.com/docs/sdks/languages/python), [Node](https://infisical.com/docs/sdks/languages/node), and [Java](https://infisical.com/docs/sdks/languages/java).
- Released first version of the [Infisical Agent](https://infisical.com/docs/infisical-agent/overview)
- Added ability to [manage folders via CLI](https://infisical.com/docs/cli/commands/secrets).
## Nov 2023
## November 2023
- Replaced internal [Winston](https://github.com/winstonjs/winston) with [Pino](https://github.com/pinojs/pino) logging library with external logging to AWS CloudWatch
- Added admin panel to self-hosting experience.
- Released [secret rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview) feature with preliminary support for rotating [SendGrid](https://infisical.com/docs/documentation/platform/secret-rotation/sendgrid), [PostgreSQL/CockroachDB](https://infisical.com/docs/documentation/platform/secret-rotation/postgres), and [MySQL/MariaDB](https://infisical.com/docs/documentation/platform/secret-rotation/mysql) credentials.
- Released secret reminders feature.
## Oct 2023
## October 2023
- Added support for [GitLab SSO](https://infisical.com/docs/documentation/platform/sso/gitlab).
- Became SOC 2 (Type II) certified.
@@ -50,7 +35,7 @@ The changelog below reflects new product developments and updates on a monthly b
- Added native [Hasura Cloud integration](https://infisical.com/docs/integrations/cloud/hasura-cloud).
- Updated resource deletion logic for user, organization, and project deletion.
## Sep 2023
## September 2023
- Released [secret approvals](https://infisical.com/docs/documentation/platform/pr-workflows) feature.
- Released an update to access controls; every user role now clearly defines and enforces a certain set of conditions across Infisical.
@@ -58,7 +43,7 @@ The changelog below reflects new product developments and updates on a monthly b
- Added a native integration with [Qovery](https://infisical.com/docs/integrations/cloud/qovery).
- Added service token generation capability for the CLI.
## Aug 2023
## August 2023
- Release Audit Logs V2.
- Add support for [GitHub SSO](https://infisical.com/docs/documentation/platform/sso/github).
@@ -186,7 +171,7 @@ The changelog below reflects new product developments and updates on a monthly b
- Added sorting capability to sort keys by name alphabetically in dashboard.
- Added downloading secrets back as `.env` file capability.
## Aug 2022
## August 2022
- Released first version of the Infisical platform with push/pull capability and end-to-end encryption.
- Improved security handling of authentication tokens by storing refresh tokens in HttpOnly cookies.

Some files were not shown because too many files have changed in this diff Show More