Compare commits
83 Commits
daniel/nat
...
feat/add-o
Author | SHA1 | Date | |
---|---|---|---|
|
f4a1a00b59 | ||
|
b9933d711c | ||
|
847c2c67ec | ||
|
76a424dcfb | ||
|
c1ca2a6f8c | ||
|
9b6602a8e9 | ||
|
22db286dda | ||
|
9fd0373e39 | ||
|
62f92b0bfa | ||
|
abbef4fc44 | ||
|
34ca942f9d | ||
|
1acf25dd53 | ||
|
a0653883b6 | ||
|
f3a4c32e3a | ||
|
6a6fe3e202 | ||
|
8f4963839e | ||
|
4c06f134fb | ||
|
12d3632a03 | ||
|
c34c13887a | ||
|
378d6c259b | ||
|
2a538d9560 | ||
|
eafa50747b | ||
|
77f794e1d0 | ||
|
3b9afb8b5b | ||
|
8bf763dd5a | ||
|
e93b465004 | ||
|
000d87075b | ||
|
2291bdc036 | ||
|
791361d2c3 | ||
|
2a18844ef8 | ||
|
1dfad876cf | ||
|
7ddf4492a7 | ||
|
3c92a2a256 | ||
|
45683dc4c6 | ||
|
c6043568cf | ||
|
cf690e2e16 | ||
|
c67642786f | ||
|
41914e0027 | ||
|
a13d4a4970 | ||
|
5db6ac711c | ||
|
f426025fd5 | ||
|
d6fcba9169 | ||
|
51d4fcf9ee | ||
|
316259f218 | ||
|
7311cf8f6c | ||
|
5560c18a09 | ||
|
b0c472b5e1 | ||
|
25a615cbb3 | ||
|
4502d394a3 | ||
|
531d3751a8 | ||
|
2d0d90785f | ||
|
cec884ce34 | ||
|
346dbee96a | ||
|
d5229a27b2 | ||
|
a11f120a83 | ||
|
51c1487ed1 | ||
|
c9d6c5c5f7 | ||
|
3541ddf8ac | ||
|
0ae286a80e | ||
|
36b7911bcc | ||
|
520167a8ff | ||
|
8c2f709f2a | ||
|
804314cc18 | ||
|
0c9557b8b5 | ||
|
fb4f12fa37 | ||
|
29b106c5bd | ||
|
e7d32b5f2d | ||
|
862e0437e7 | ||
|
89eff65124 | ||
|
2347242776 | ||
|
3438dbc70d | ||
|
d79d7ca5e8 | ||
|
c097c918ed | ||
|
b801c1e48f | ||
|
cd2b81cb9f | ||
|
bdd65784a1 | ||
|
73195b07a4 | ||
|
bdff2cd33d | ||
|
40e7ab33cb | ||
|
686a28cc09 | ||
|
f11c2d6b3e | ||
|
3e9ce79398 | ||
|
75bb651b1d |
@@ -40,13 +40,14 @@ jobs:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
JWT_AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21.5'
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
r HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
@@ -73,4 +74,4 @@ jobs:
|
||||
run: |
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
docker remove infisical-api
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
@@ -25,6 +26,12 @@ export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
}
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
};
|
||||
};
|
||||
|
9
backend/package-lock.json
generated
@@ -51,7 +51,7 @@
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.7",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"ora": "^7.0.1",
|
||||
@@ -10290,9 +10290,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mysql2": {
|
||||
"version": "3.9.7",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.7.tgz",
|
||||
"integrity": "sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw==",
|
||||
"version": "3.9.8",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.8.tgz",
|
||||
"integrity": "sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"denque": "^2.1.0",
|
||||
"generate-function": "^2.3.1",
|
||||
|
@@ -112,7 +112,7 @@
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.7",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"ora": "^7.0.1",
|
||||
|
@@ -35,6 +35,8 @@ const getZodPrimitiveType = (type: string) => {
|
||||
return "z.coerce.number()";
|
||||
case "text":
|
||||
return "z.string()";
|
||||
case "bytea":
|
||||
return "zodBuffer";
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
@@ -96,10 +98,15 @@ const main = async () => {
|
||||
const columnNames = Object.keys(columns);
|
||||
|
||||
let schema = "";
|
||||
const zodImportSet = new Set<string>();
|
||||
for (let colNum = 0; colNum < columnNames.length; colNum++) {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
||||
// don't put optional on id
|
||||
if (colInfo.defaultValue && columnName !== "id") {
|
||||
const { defaultValue } = colInfo;
|
||||
@@ -121,6 +128,8 @@ const main = async () => {
|
||||
.split("_")
|
||||
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
|
||||
|
||||
const zodImports = Array.from(zodImportSet);
|
||||
|
||||
// the insert and update are changed to zod input type to use default cases
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
|
||||
@@ -131,6 +140,8 @@ const main = async () => {
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ${pascalCase}Schema = z.object({${schema}});
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
@@ -52,6 +52,7 @@ import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
@@ -108,6 +109,7 @@ declare module "fastify" {
|
||||
projectKey: TProjectKeyServiceFactory;
|
||||
projectRole: TProjectRoleServiceFactory;
|
||||
secret: TSecretServiceFactory;
|
||||
secretReplication: TSecretReplicationServiceFactory;
|
||||
secretTag: TSecretTagServiceFactory;
|
||||
secretImport: TSecretImportServiceFactory;
|
||||
projectBot: TProjectBotServiceFactory;
|
||||
|
21
backend/src/@types/knex.d.ts
vendored
@@ -98,6 +98,15 @@ import {
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate,
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate,
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
@@ -176,6 +185,9 @@ import {
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate,
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate,
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate,
|
||||
@@ -240,7 +252,6 @@ import {
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { TSecretReferences, TSecretReferencesInsert, TSecretReferencesUpdate } from "@app/db/schemas/secret-references";
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
@@ -514,5 +525,13 @@ declare module "knex/types/tables" {
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,33 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
|
||||
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (!hasExpiresAfterViewsColumn) {
|
||||
t.integer("expiresAfterViews");
|
||||
}
|
||||
|
||||
if (hasSecretNameColumn) {
|
||||
t.dropColumn("name");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
|
||||
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasExpiresAfterViewsColumn) {
|
||||
t.dropColumn("expiresAfterViews");
|
||||
}
|
||||
|
||||
if (!hasSecretNameColumn) {
|
||||
t.string("name").notNullable();
|
||||
}
|
||||
});
|
||||
}
|
@@ -0,0 +1,85 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
|
||||
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"isReplicationSuccess"
|
||||
);
|
||||
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"replicationStatus"
|
||||
);
|
||||
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
|
||||
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretImport)) {
|
||||
await knex.schema.alterTable(TableName.SecretImport, (t) => {
|
||||
if (!doesSecretImportIsReplicationExist) t.boolean("isReplication").defaultTo(false);
|
||||
if (!doesSecretImportIsReplicationSuccessExist) t.boolean("isReplicationSuccess").nullable();
|
||||
if (!doesSecretImportReplicationStatusExist) t.text("replicationStatus").nullable();
|
||||
if (!doesSecretImportLastReplicatedExist) t.datetime("lastReplicated").nullable();
|
||||
if (!doesSecretImportIsReservedExist) t.boolean("isReserved").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
|
||||
if (await knex.schema.hasTable(TableName.SecretFolder)) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
if (!doesSecretFolderReservedExist) t.boolean("isReserved").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"isReplicated"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
|
||||
if (!doesSecretApprovalRequestIsReplicatedExist) t.boolean("isReplicated");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
|
||||
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"isReplicationSuccess"
|
||||
);
|
||||
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"replicationStatus"
|
||||
);
|
||||
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
|
||||
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretImport)) {
|
||||
await knex.schema.alterTable(TableName.SecretImport, (t) => {
|
||||
if (doesSecretImportIsReplicationExist) t.dropColumn("isReplication");
|
||||
if (doesSecretImportIsReplicationSuccessExist) t.dropColumn("isReplicationSuccess");
|
||||
if (doesSecretImportReplicationStatusExist) t.dropColumn("replicationStatus");
|
||||
if (doesSecretImportLastReplicatedExist) t.dropColumn("lastReplicated");
|
||||
if (doesSecretImportIsReservedExist) t.dropColumn("isReserved");
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
|
||||
if (await knex.schema.hasTable(TableName.SecretFolder)) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
if (doesSecretFolderReservedExist) t.dropColumn("isReserved");
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"isReplicated"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
|
||||
if (doesSecretApprovalRequestIsReplicatedExist) t.dropColumn("isReplicated");
|
||||
});
|
||||
}
|
||||
}
|
56
backend/src/db/migrations/20240603075514_kms.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.KmsServerRootConfig))) {
|
||||
await knex.schema.createTable(TableName.KmsServerRootConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedRootKey").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.KmsKey))) {
|
||||
await knex.schema.createTable(TableName.KmsKey, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedKey").notNullable();
|
||||
t.string("encryptionAlgorithm").notNullable();
|
||||
t.integer("version").defaultTo(1).notNullable();
|
||||
t.string("description");
|
||||
t.boolean("isDisabled").defaultTo(false);
|
||||
t.boolean("isReserved").defaultTo(true);
|
||||
t.string("projectId");
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsKey);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.KmsKeyVersion))) {
|
||||
await knex.schema.createTable(TableName.KmsKeyVersion, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedKey").notNullable();
|
||||
t.integer("version").notNullable();
|
||||
t.uuid("kmsKeyId").notNullable();
|
||||
t.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsKeyVersion);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.KmsServerRootConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.KmsKeyVersion);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsKeyVersion);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.KmsKey);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsKey);
|
||||
}
|
@@ -30,6 +30,9 @@ export * from "./identity-universal-auths";
|
||||
export * from "./incident-contacts";
|
||||
export * from "./integration-auths";
|
||||
export * from "./integrations";
|
||||
export * from "./kms-key-versions";
|
||||
export * from "./kms-keys";
|
||||
export * from "./kms-root-config";
|
||||
export * from "./ldap-configs";
|
||||
export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
@@ -57,6 +60,7 @@ export * from "./secret-blind-indexes";
|
||||
export * from "./secret-folder-versions";
|
||||
export * from "./secret-folders";
|
||||
export * from "./secret-imports";
|
||||
export * from "./secret-references";
|
||||
export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
|
21
backend/src/db/schemas/kms-key-versions.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmsKeyVersionsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedKey: zodBuffer,
|
||||
version: z.number(),
|
||||
kmsKeyId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TKmsKeyVersions = z.infer<typeof KmsKeyVersionsSchema>;
|
||||
export type TKmsKeyVersionsInsert = Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>;
|
||||
export type TKmsKeyVersionsUpdate = Partial<Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>>;
|
26
backend/src/db/schemas/kms-keys.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmsKeysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedKey: zodBuffer,
|
||||
encryptionAlgorithm: z.string(),
|
||||
version: z.number().default(1),
|
||||
description: z.string().nullable().optional(),
|
||||
isDisabled: z.boolean().default(false).nullable().optional(),
|
||||
isReserved: z.boolean().default(true).nullable().optional(),
|
||||
projectId: z.string().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||
export type TKmsKeysInsert = Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>;
|
||||
export type TKmsKeysUpdate = Partial<Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>>;
|
19
backend/src/db/schemas/kms-root-config.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmsRootConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedRootKey: zodBuffer
|
||||
});
|
||||
|
||||
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
|
||||
export type TKmsRootConfigInsert = Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>;
|
||||
export type TKmsRootConfigUpdate = Partial<Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>>;
|
@@ -81,7 +81,11 @@ export enum TableName {
|
||||
DynamicSecretLease = "dynamic_secret_leases",
|
||||
// junction tables with tags
|
||||
JnSecretTag = "secret_tag_junction",
|
||||
SecretVersionTag = "secret_version_tag_junction"
|
||||
SecretVersionTag = "secret_version_tag_junction",
|
||||
// KMS Service
|
||||
KmsServerRootConfig = "kms_root_config",
|
||||
KmsKey = "kms_keys",
|
||||
KmsKeyVersion = "kms_key_versions"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
|
@@ -18,7 +18,8 @@ export const SecretApprovalRequestsSchema = z.object({
|
||||
statusChangeBy: z.string().uuid().nullable().optional(),
|
||||
committerId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
isReplicated: z.boolean().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;
|
||||
|
@@ -14,7 +14,8 @@ export const SecretFoldersSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
parentId: z.string().uuid().nullable().optional()
|
||||
parentId: z.string().uuid().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||
|
@@ -15,7 +15,12 @@ export const SecretImportsSchema = z.object({
|
||||
position: z.number(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
folderId: z.string().uuid()
|
||||
folderId: z.string().uuid(),
|
||||
isReplication: z.boolean().default(false).nullable().optional(),
|
||||
isReplicationSuccess: z.boolean().nullable().optional(),
|
||||
replicationStatus: z.string().nullable().optional(),
|
||||
lastReplicated: z.date().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretImports = z.infer<typeof SecretImportsSchema>;
|
||||
|
@@ -9,7 +9,6 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretSharingSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
encryptedValue: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
@@ -18,7 +17,8 @@ export const SecretSharingSchema = z.object({
|
||||
userId: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
expiresAfterViews: z.number().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@@ -23,8 +23,8 @@ export const UsersSchema = z.object({
|
||||
isGhost: z.boolean().default(false),
|
||||
username: z.string(),
|
||||
isEmailVerified: z.boolean().default(false).nullable().optional(),
|
||||
consecutiveFailedMfaAttempts: z.number().optional(),
|
||||
isLocked: z.boolean().optional(),
|
||||
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
||||
isLocked: z.boolean().default(false).nullable().optional(),
|
||||
temporaryLockDateEnd: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
|
@@ -32,22 +32,20 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approvals: SecretApprovalRequestsSchema.merge(
|
||||
z.object({
|
||||
// secretPath: z.string(),
|
||||
policy: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: z.string().array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
}),
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
environment: z.string(),
|
||||
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
|
||||
approvers: z.string().array()
|
||||
})
|
||||
).array()
|
||||
approvals: SecretApprovalRequestsSchema.extend({
|
||||
// secretPath: z.string(),
|
||||
policy: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: z.string().array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
}),
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
environment: z.string(),
|
||||
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
|
||||
approvers: z.string().array()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@@ -16,6 +16,8 @@ export const licenseDALFactory = (db: TDbClient) => {
|
||||
void bd.where({ orgId });
|
||||
}
|
||||
})
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where(`${TableName.Users}.isGhost`, false)
|
||||
.count();
|
||||
return doc?.[0].count;
|
||||
} catch (error) {
|
||||
|
@@ -15,9 +15,16 @@ import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
import { getAllNestedSecretReferences } from "@app/services/secret/secret-fns";
|
||||
import {
|
||||
fnSecretBlindIndexCheck,
|
||||
fnSecretBlindIndexCheckV2,
|
||||
fnSecretBulkDelete,
|
||||
fnSecretBulkInsert,
|
||||
fnSecretBulkUpdate,
|
||||
getAllNestedSecretReferences
|
||||
} from "@app/services/secret/secret-fns";
|
||||
import { TSecretQueueFactory } from "@app/services/secret/secret-queue";
|
||||
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { SecretOperations } from "@app/services/secret/secret-types";
|
||||
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
|
||||
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
|
||||
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
|
||||
@@ -32,7 +39,6 @@ import { TSecretApprovalRequestReviewerDALFactory } from "./secret-approval-requ
|
||||
import { TSecretApprovalRequestSecretDALFactory } from "./secret-approval-request-secret-dal";
|
||||
import {
|
||||
ApprovalStatus,
|
||||
CommitType,
|
||||
RequestState,
|
||||
TApprovalRequestCountDTO,
|
||||
TGenerateSecretApprovalRequestDTO,
|
||||
@@ -45,10 +51,11 @@ import {
|
||||
|
||||
type TSecretApprovalRequestServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
secretApprovalRequestDAL: TSecretApprovalRequestDALFactory;
|
||||
secretApprovalRequestSecretDAL: TSecretApprovalRequestSecretDALFactory;
|
||||
secretApprovalRequestReviewerDAL: TSecretApprovalRequestReviewerDALFactory;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findById" | "findSecretPathByFolderIds">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findSecretPathByFolderIds">;
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret">;
|
||||
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
|
||||
@@ -56,16 +63,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
secretService: Pick<
|
||||
TSecretServiceFactory,
|
||||
| "fnSecretBulkInsert"
|
||||
| "fnSecretBulkUpdate"
|
||||
| "fnSecretBlindIndexCheck"
|
||||
| "fnSecretBulkDelete"
|
||||
| "fnSecretBlindIndexCheckV2"
|
||||
>;
|
||||
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets">;
|
||||
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "removeSecretReminder">;
|
||||
};
|
||||
|
||||
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
|
||||
@@ -82,7 +80,6 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
projectDAL,
|
||||
permissionService,
|
||||
snapshotService,
|
||||
secretService,
|
||||
secretVersionDAL,
|
||||
secretQueueService,
|
||||
projectBotService
|
||||
@@ -302,11 +299,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||
if (!secretApprovalSecrets) throw new BadRequestError({ message: "No secrets found" });
|
||||
|
||||
const conflicts: Array<{ secretId: string; op: CommitType }> = [];
|
||||
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Create);
|
||||
const conflicts: Array<{ secretId: string; op: SecretOperations }> = [];
|
||||
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Create);
|
||||
if (secretCreationCommits.length) {
|
||||
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
|
||||
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({
|
||||
folderId,
|
||||
secretDAL,
|
||||
inputSecrets: secretCreationCommits.map(({ secretBlindIndex }) => {
|
||||
if (!secretBlindIndex) {
|
||||
throw new BadRequestError({
|
||||
@@ -319,17 +317,19 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretCreationCommits
|
||||
.filter(({ secretBlindIndex }) => conflictGroupByBlindIndex[secretBlindIndex || ""])
|
||||
.forEach((el) => {
|
||||
conflicts.push({ op: CommitType.Create, secretId: el.id });
|
||||
conflicts.push({ op: SecretOperations.Create, secretId: el.id });
|
||||
});
|
||||
secretCreationCommits = secretCreationCommits.filter(
|
||||
({ secretBlindIndex }) => !conflictGroupByBlindIndex[secretBlindIndex || ""]
|
||||
);
|
||||
}
|
||||
|
||||
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Update);
|
||||
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Update);
|
||||
if (secretUpdationCommits.length) {
|
||||
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
|
||||
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({
|
||||
folderId,
|
||||
secretDAL,
|
||||
userId: "",
|
||||
inputSecrets: secretUpdationCommits
|
||||
.filter(({ secretBlindIndex, secret }) => secret && secret.secretBlindIndex !== secretBlindIndex)
|
||||
.map(({ secretBlindIndex }) => {
|
||||
@@ -347,7 +347,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
(secretBlindIndex && conflictGroupByBlindIndex[secretBlindIndex]) || !secretId
|
||||
)
|
||||
.forEach((el) => {
|
||||
conflicts.push({ op: CommitType.Update, secretId: el.id });
|
||||
conflicts.push({ op: SecretOperations.Update, secretId: el.id });
|
||||
});
|
||||
|
||||
secretUpdationCommits = secretUpdationCommits.filter(
|
||||
@@ -356,11 +356,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Delete);
|
||||
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete);
|
||||
const botKey = await projectBotService.getBotKey(projectId).catch(() => null);
|
||||
const mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => {
|
||||
const newSecrets = secretCreationCommits.length
|
||||
? await secretService.fnSecretBulkInsert({
|
||||
? await fnSecretBulkInsert({
|
||||
tx,
|
||||
folderId,
|
||||
inputSecrets: secretCreationCommits.map((el) => ({
|
||||
@@ -403,7 +403,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
})
|
||||
: [];
|
||||
const updatedSecrets = secretUpdationCommits.length
|
||||
? await secretService.fnSecretBulkUpdate({
|
||||
? await fnSecretBulkUpdate({
|
||||
folderId,
|
||||
projectId,
|
||||
tx,
|
||||
@@ -449,11 +449,13 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
})
|
||||
: [];
|
||||
const deletedSecret = secretDeletionCommits.length
|
||||
? await secretService.fnSecretBulkDelete({
|
||||
? await fnSecretBulkDelete({
|
||||
projectId,
|
||||
folderId,
|
||||
tx,
|
||||
actorId: "",
|
||||
secretDAL,
|
||||
secretQueueService,
|
||||
inputSecrets: secretDeletionCommits.map(({ secretBlindIndex }) => {
|
||||
if (!secretBlindIndex) {
|
||||
throw new BadRequestError({
|
||||
@@ -480,12 +482,14 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
};
|
||||
});
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
const folder = await folderDAL.findById(folderId);
|
||||
// TODO(akhilmhdh-pg): change query to do secret path from folder
|
||||
const [folder] = await folderDAL.findSecretPathByFolderIds(projectId, [folderId]);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
await secretQueueService.syncSecrets({
|
||||
projectId,
|
||||
secretPath: "/",
|
||||
environment: folder?.environment.envSlug as string
|
||||
secretPath: folder.path,
|
||||
environmentSlug: folder.environmentSlug,
|
||||
actorId,
|
||||
actor
|
||||
});
|
||||
return mergeStatus;
|
||||
};
|
||||
@@ -533,9 +537,9 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const commits: Omit<TSecretApprovalRequestsSecretsInsert, "requestId">[] = [];
|
||||
const commitTagIds: Record<string, string[]> = {};
|
||||
// for created secret approval change
|
||||
const createdSecrets = data[CommitType.Create];
|
||||
const createdSecrets = data[SecretOperations.Create];
|
||||
if (createdSecrets && createdSecrets?.length) {
|
||||
const { keyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
|
||||
const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({
|
||||
inputSecrets: createdSecrets,
|
||||
folderId,
|
||||
isNew: true,
|
||||
@@ -546,7 +550,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
commits.push(
|
||||
...createdSecrets.map(({ secretName, ...el }) => ({
|
||||
...el,
|
||||
op: CommitType.Create as const,
|
||||
op: SecretOperations.Create as const,
|
||||
version: 1,
|
||||
secretBlindIndex: keyName2BlindIndex[secretName],
|
||||
algorithm: SecretEncryptionAlgo.AES_256_GCM,
|
||||
@@ -558,12 +562,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
});
|
||||
}
|
||||
// not secret approval for update operations
|
||||
const updatedSecrets = data[CommitType.Update];
|
||||
const updatedSecrets = data[SecretOperations.Update];
|
||||
if (updatedSecrets && updatedSecrets?.length) {
|
||||
// get all blind index
|
||||
// Find all those secrets
|
||||
// if not throw not found
|
||||
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await secretService.fnSecretBlindIndexCheck({
|
||||
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await fnSecretBlindIndexCheck({
|
||||
inputSecrets: updatedSecrets,
|
||||
folderId,
|
||||
isNew: false,
|
||||
@@ -574,8 +578,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
// now find any secret that needs to update its name
|
||||
// same process as above
|
||||
const nameUpdatedSecrets = updatedSecrets.filter(({ newSecretName }) => Boolean(newSecretName));
|
||||
const { keyName2BlindIndex: newKeyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
|
||||
inputSecrets: nameUpdatedSecrets,
|
||||
const { keyName2BlindIndex: newKeyName2BlindIndex } = await fnSecretBlindIndexCheck({
|
||||
inputSecrets: nameUpdatedSecrets.map(({ newSecretName }) => ({ secretName: newSecretName as string })),
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg,
|
||||
@@ -592,14 +596,14 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const secretId = secsGroupedByBlindIndex[keyName2BlindIndex[secretName]][0].id;
|
||||
const secretBlindIndex =
|
||||
newSecretName && newKeyName2BlindIndex[newSecretName]
|
||||
? newKeyName2BlindIndex?.[secretName]
|
||||
? newKeyName2BlindIndex?.[newSecretName]
|
||||
: keyName2BlindIndex[secretName];
|
||||
// add tags
|
||||
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
...el,
|
||||
op: CommitType.Update as const,
|
||||
op: SecretOperations.Update as const,
|
||||
secret: secretId,
|
||||
secretVersion: latestSecretVersions[secretId].id,
|
||||
secretBlindIndex,
|
||||
@@ -609,12 +613,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
);
|
||||
}
|
||||
// deleted secrets
|
||||
const deletedSecrets = data[CommitType.Delete];
|
||||
const deletedSecrets = data[SecretOperations.Delete];
|
||||
if (deletedSecrets && deletedSecrets.length) {
|
||||
// get all blind index
|
||||
// Find all those secrets
|
||||
// if not throw not found
|
||||
const { keyName2BlindIndex, secrets } = await secretService.fnSecretBlindIndexCheck({
|
||||
const { keyName2BlindIndex, secrets } = await fnSecretBlindIndexCheck({
|
||||
inputSecrets: deletedSecrets,
|
||||
folderId,
|
||||
isNew: false,
|
||||
@@ -635,7 +639,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (!latestSecretVersions[secretId].secretBlindIndex)
|
||||
throw new BadRequestError({ message: "Failed to find secret blind index" });
|
||||
return {
|
||||
op: CommitType.Delete as const,
|
||||
op: SecretOperations.Delete as const,
|
||||
...latestSecretVersions[secretId],
|
||||
secretBlindIndex: latestSecretVersions[secretId].secretBlindIndex as string,
|
||||
secret: secretId,
|
||||
|
@@ -1,11 +1,6 @@
|
||||
import { TImmutableDBKeys, TSecretApprovalPolicies, TSecretApprovalRequestsSecrets } from "@app/db/schemas";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export enum CommitType {
|
||||
Create = "create",
|
||||
Update = "update",
|
||||
Delete = "delete"
|
||||
}
|
||||
import { SecretOperations } from "@app/services/secret/secret-types";
|
||||
|
||||
export enum RequestState {
|
||||
Open = "open",
|
||||
@@ -18,14 +13,14 @@ export enum ApprovalStatus {
|
||||
REJECTED = "rejected"
|
||||
}
|
||||
|
||||
type TApprovalCreateSecret = Omit<
|
||||
export type TApprovalCreateSecret = Omit<
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TImmutableDBKeys | "version" | "algorithm" | "keyEncoding" | "requestId" | "op" | "secretVersion" | "secretBlindIndex"
|
||||
> & {
|
||||
secretName: string;
|
||||
tagIds?: string[];
|
||||
};
|
||||
type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
|
||||
export type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
|
||||
secretName: string;
|
||||
newSecretName?: string;
|
||||
tagIds?: string[];
|
||||
@@ -36,9 +31,9 @@ export type TGenerateSecretApprovalRequestDTO = {
|
||||
secretPath: string;
|
||||
policy: TSecretApprovalPolicies;
|
||||
data: {
|
||||
[CommitType.Create]?: TApprovalCreateSecret[];
|
||||
[CommitType.Update]?: TApprovalUpdateSecret[];
|
||||
[CommitType.Delete]?: { secretName: string }[];
|
||||
[SecretOperations.Create]?: TApprovalCreateSecret[];
|
||||
[SecretOperations.Update]?: TApprovalUpdateSecret[];
|
||||
[SecretOperations.Delete]?: { secretName: string }[];
|
||||
};
|
||||
} & TProjectPermission;
|
||||
|
||||
|
@@ -0,0 +1 @@
|
||||
export const MAX_REPLICATION_DEPTH = 5;
|
@@ -0,0 +1,10 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TSecretReplicationDALFactory = ReturnType<typeof secretReplicationDALFactory>;
|
||||
|
||||
export const secretReplicationDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.SecretVersion);
|
||||
return orm;
|
||||
};
|
@@ -0,0 +1,485 @@
|
||||
import { SecretType, TSecrets } from "@app/db/schemas";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
|
||||
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { groupBy, unique } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns";
|
||||
import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue";
|
||||
import { SecretOperations } from "@app/services/secret/secret-types";
|
||||
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
|
||||
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
|
||||
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { ReservedFolders } from "@app/services/secret-folder/secret-folder-types";
|
||||
import { TSecretImportDALFactory } from "@app/services/secret-import/secret-import-dal";
|
||||
import { fnSecretsFromImports } from "@app/services/secret-import/secret-import-fns";
|
||||
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
|
||||
import { MAX_REPLICATION_DEPTH } from "./secret-replication-constants";
|
||||
|
||||
type TSecretReplicationServiceFactoryDep = {
|
||||
secretDAL: Pick<
|
||||
TSecretDALFactory,
|
||||
"find" | "findByBlindIndexes" | "insertMany" | "bulkUpdate" | "delete" | "upsertSecretReferences" | "transaction"
|
||||
>;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "find" | "insertMany" | "update" | "findLatestVersionMany">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds">;
|
||||
folderDAL: Pick<
|
||||
TSecretFolderDALFactory,
|
||||
"findSecretPathByFolderIds" | "findBySecretPath" | "create" | "findOne" | "findByManySecretPath"
|
||||
>;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "find" | "insertMany">;
|
||||
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "replicateSecrets">;
|
||||
queueService: Pick<TQueueServiceFactory, "start" | "listen" | "queue" | "stopJobById">;
|
||||
secretApprovalPolicyService: Pick<TSecretApprovalPolicyServiceFactory, "getSecretApprovalPolicy">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
|
||||
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "find">;
|
||||
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findOne">;
|
||||
secretApprovalRequestSecretDAL: Pick<
|
||||
TSecretApprovalRequestSecretDALFactory,
|
||||
"insertMany" | "insertApprovalSecretTags"
|
||||
>;
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
};
|
||||
|
||||
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
|
||||
const SECRET_IMPORT_SUCCESS_LOCK = 10;
|
||||
|
||||
const keystoreReplicationSuccessKey = (jobId: string, secretImportId: string) => `${jobId}-${secretImportId}`;
|
||||
const getReplicationKeyLockPrefix = (projectId: string, environmentSlug: string, secretPath: string) =>
|
||||
`REPLICATION_SECRET_${projectId}-${environmentSlug}-${secretPath}`;
|
||||
export const getReplicationFolderName = (importId: string) => `${ReservedFolders.SecretReplication}${importId}`;
|
||||
|
||||
const getDecryptedKeyValue = (key: string, secret: TSecrets) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key
|
||||
});
|
||||
return { key: secretKey, value: secretValue };
|
||||
};
|
||||
|
||||
export const secretReplicationServiceFactory = ({
|
||||
secretDAL,
|
||||
queueService,
|
||||
secretVersionDAL,
|
||||
secretImportDAL,
|
||||
keyStore,
|
||||
secretVersionTagDAL,
|
||||
secretTagDAL,
|
||||
folderDAL,
|
||||
secretApprovalPolicyService,
|
||||
secretApprovalRequestSecretDAL,
|
||||
secretApprovalRequestDAL,
|
||||
secretQueueService,
|
||||
projectMembershipDAL,
|
||||
projectBotService
|
||||
}: TSecretReplicationServiceFactoryDep) => {
|
||||
const getReplicatedSecrets = (
|
||||
botKey: string,
|
||||
localSecrets: TSecrets[],
|
||||
importedSecrets: { secrets: TSecrets[] }[]
|
||||
) => {
|
||||
const deDupe = new Set<string>();
|
||||
const secrets = localSecrets
|
||||
.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex))
|
||||
.map((el) => {
|
||||
const decryptedSecret = getDecryptedKeyValue(botKey, el);
|
||||
deDupe.add(decryptedSecret.key);
|
||||
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
|
||||
});
|
||||
|
||||
for (let i = importedSecrets.length - 1; i >= 0; i = -1) {
|
||||
importedSecrets[i].secrets.forEach((el) => {
|
||||
const decryptedSecret = getDecryptedKeyValue(botKey, el);
|
||||
if (deDupe.has(decryptedSecret.key) || !el.secretBlindIndex) {
|
||||
return;
|
||||
}
|
||||
deDupe.add(decryptedSecret.key);
|
||||
secrets.push({ ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value });
|
||||
});
|
||||
}
|
||||
return secrets;
|
||||
};
|
||||
|
||||
// IMPORTANT NOTE BEFORE READING THE FUNCTION
|
||||
// SOURCE - Where secrets are copied from
|
||||
// DESTINATION - Where the replicated imports that points to SOURCE from Destination
|
||||
queueService.start(QueueName.SecretReplication, async (job) => {
|
||||
logger.info(job.data, "Replication started");
|
||||
const {
|
||||
secretPath,
|
||||
environmentSlug,
|
||||
projectId,
|
||||
actorId,
|
||||
actor,
|
||||
pickOnlyImportIds,
|
||||
_deDupeReplicationQueue: deDupeReplicationQueue,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth = 0
|
||||
} = job.data;
|
||||
if (depth > MAX_REPLICATION_DEPTH) return;
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, secretPath);
|
||||
if (!folder) return;
|
||||
|
||||
// the the replicated imports made to the source. These are the destinations
|
||||
const destinationSecretImports = await secretImportDAL.find({
|
||||
importPath: secretPath,
|
||||
importEnv: folder.envId
|
||||
});
|
||||
|
||||
// CASE: normal mode <- link import <- replicated import
|
||||
const nonReplicatedDestinationImports = destinationSecretImports.filter(({ isReplication }) => !isReplication);
|
||||
if (nonReplicatedDestinationImports.length) {
|
||||
// keep calling sync secret for all the imports made
|
||||
const importedFolderIds = unique(nonReplicatedDestinationImports, (i) => i.folderId).map(
|
||||
({ folderId }) => folderId
|
||||
);
|
||||
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
|
||||
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
await Promise.all(
|
||||
nonReplicatedDestinationImports
|
||||
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue?.[
|
||||
uniqueSecretQueueKey(
|
||||
foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
foldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
secretQueueService.replicateSecrets({
|
||||
projectId,
|
||||
secretPath: foldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
actorId,
|
||||
actor,
|
||||
_depth: depth + 1,
|
||||
_deDupeReplicationQueue: deDupeReplicationQueue,
|
||||
_deDupeQueue: deDupeQueue
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let destinationReplicatedSecretImports = destinationSecretImports.filter(({ isReplication }) =>
|
||||
Boolean(isReplication)
|
||||
);
|
||||
destinationReplicatedSecretImports = pickOnlyImportIds
|
||||
? destinationReplicatedSecretImports.filter(({ id }) => pickOnlyImportIds?.includes(id))
|
||||
: destinationReplicatedSecretImports;
|
||||
if (!destinationReplicatedSecretImports.length) return;
|
||||
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
|
||||
// these are the secrets to be added in replicated folders
|
||||
const sourceLocalSecrets = await secretDAL.find({ folderId: folder.id, type: SecretType.Shared });
|
||||
const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id });
|
||||
const sourceImportedSecrets = await fnSecretsFromImports({
|
||||
allowedImports: sourceSecretImports,
|
||||
secretDAL,
|
||||
folderDAL,
|
||||
secretImportDAL
|
||||
});
|
||||
// secrets that gets replicated across imports
|
||||
const sourceSecrets = getReplicatedSecrets(botKey, sourceLocalSecrets, sourceImportedSecrets);
|
||||
const sourceSecretsGroupByBlindIndex = groupBy(sourceSecrets, (i) => i.secretBlindIndex as string);
|
||||
|
||||
const lock = await keyStore.acquireLock(
|
||||
[getReplicationKeyLockPrefix(projectId, environmentSlug, secretPath)],
|
||||
5000
|
||||
);
|
||||
|
||||
try {
|
||||
/* eslint-disable no-await-in-loop */
|
||||
for (const destinationSecretImport of destinationReplicatedSecretImports) {
|
||||
try {
|
||||
const hasJobCompleted = await keyStore.getItem(
|
||||
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
|
||||
KeyStorePrefixes.SecretReplication
|
||||
);
|
||||
if (hasJobCompleted) {
|
||||
logger.info(
|
||||
{ jobId: job.id, importId: destinationSecretImport.id },
|
||||
"Skipping this job as this has been successfully replicated."
|
||||
);
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
const [destinationFolder] = await folderDAL.findSecretPathByFolderIds(projectId, [
|
||||
destinationSecretImport.folderId
|
||||
]);
|
||||
if (!destinationFolder) throw new BadRequestError({ message: "Imported folder not found" });
|
||||
|
||||
let destinationReplicationFolder = await folderDAL.findOne({
|
||||
parentId: destinationFolder.id,
|
||||
name: getReplicationFolderName(destinationSecretImport.id),
|
||||
isReserved: true
|
||||
});
|
||||
if (!destinationReplicationFolder) {
|
||||
destinationReplicationFolder = await folderDAL.create({
|
||||
parentId: destinationFolder.id,
|
||||
name: getReplicationFolderName(destinationSecretImport.id),
|
||||
envId: destinationFolder.envId,
|
||||
isReserved: true
|
||||
});
|
||||
}
|
||||
const destinationReplicationFolderId = destinationReplicationFolder.id;
|
||||
|
||||
const destinationLocalSecretsFromDB = await secretDAL.find({
|
||||
folderId: destinationReplicationFolderId
|
||||
});
|
||||
const destinationLocalSecrets = destinationLocalSecretsFromDB.map((el) => {
|
||||
const decryptedSecret = getDecryptedKeyValue(botKey, el);
|
||||
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
|
||||
});
|
||||
|
||||
const destinationLocalSecretsGroupedByBlindIndex = groupBy(
|
||||
destinationLocalSecrets.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex)),
|
||||
(i) => i.secretBlindIndex as string
|
||||
);
|
||||
|
||||
const locallyCreatedSecrets = sourceSecrets
|
||||
.filter(
|
||||
({ secretBlindIndex }) => !destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]
|
||||
)
|
||||
.map((el) => ({ ...el, operation: SecretOperations.Create })); // rewrite update ops to create
|
||||
|
||||
const locallyUpdatedSecrets = sourceSecrets
|
||||
.filter(
|
||||
({ secretBlindIndex, secretKey, secretValue }) =>
|
||||
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0] &&
|
||||
// if key or value changed
|
||||
(destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretKey !== secretKey ||
|
||||
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretValue !==
|
||||
secretValue)
|
||||
)
|
||||
.map((el) => ({ ...el, operation: SecretOperations.Update })); // rewrite update ops to create
|
||||
|
||||
const locallyDeletedSecrets = destinationLocalSecrets
|
||||
.filter(({ secretBlindIndex }) => !sourceSecretsGroupByBlindIndex[secretBlindIndex as string]?.[0])
|
||||
.map((el) => ({ ...el, operation: SecretOperations.Delete }));
|
||||
|
||||
const isEmtpy =
|
||||
locallyCreatedSecrets.length + locallyUpdatedSecrets.length + locallyDeletedSecrets.length === 0;
|
||||
// eslint-disable-next-line
|
||||
if (isEmtpy) continue;
|
||||
|
||||
const policy = await secretApprovalPolicyService.getSecretApprovalPolicy(
|
||||
projectId,
|
||||
destinationFolder.environmentSlug,
|
||||
destinationFolder.path
|
||||
);
|
||||
// this means it should be a approval request rather than direct replication
|
||||
if (policy && actor === ActorType.USER) {
|
||||
const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId });
|
||||
if (!membership) {
|
||||
logger.error("Project membership not found in %s for user %s", projectId, actorId);
|
||||
return;
|
||||
}
|
||||
|
||||
const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id);
|
||||
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
|
||||
destinationReplicationFolderId,
|
||||
localSecretsLatestVersions
|
||||
);
|
||||
await secretApprovalRequestDAL.transaction(async (tx) => {
|
||||
const approvalRequestDoc = await secretApprovalRequestDAL.create(
|
||||
{
|
||||
folderId: destinationReplicationFolderId,
|
||||
slug: alphaNumericNanoId(),
|
||||
policyId: policy.id,
|
||||
status: "open",
|
||||
hasMerged: false,
|
||||
committerId: membership.id,
|
||||
isReplicated: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
const commits = locallyCreatedSecrets
|
||||
.concat(locallyUpdatedSecrets)
|
||||
.concat(locallyDeletedSecrets)
|
||||
.map((doc) => {
|
||||
const { operation } = doc;
|
||||
const localSecret = destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string]?.[0];
|
||||
|
||||
return {
|
||||
op: operation,
|
||||
keyEncoding: doc.keyEncoding,
|
||||
algorithm: doc.algorithm,
|
||||
requestId: approvalRequestDoc.id,
|
||||
metadata: doc.metadata,
|
||||
secretKeyIV: doc.secretKeyIV,
|
||||
secretKeyTag: doc.secretKeyTag,
|
||||
secretKeyCiphertext: doc.secretKeyCiphertext,
|
||||
secretValueIV: doc.secretValueIV,
|
||||
secretValueTag: doc.secretValueTag,
|
||||
secretValueCiphertext: doc.secretValueCiphertext,
|
||||
secretBlindIndex: doc.secretBlindIndex,
|
||||
secretCommentIV: doc.secretCommentIV,
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding,
|
||||
// except create operation other two needs the secret id and version id
|
||||
...(operation !== SecretOperations.Create
|
||||
? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id }
|
||||
: {})
|
||||
};
|
||||
});
|
||||
const approvalCommits = await secretApprovalRequestSecretDAL.insertMany(commits, tx);
|
||||
|
||||
return { ...approvalRequestDoc, commits: approvalCommits };
|
||||
});
|
||||
} else {
|
||||
await secretDAL.transaction(async (tx) => {
|
||||
if (locallyCreatedSecrets.length) {
|
||||
await fnSecretBulkInsert({
|
||||
folderId: destinationReplicationFolderId,
|
||||
secretVersionDAL,
|
||||
secretDAL,
|
||||
tx,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
inputSecrets: locallyCreatedSecrets.map((doc) => {
|
||||
return {
|
||||
keyEncoding: doc.keyEncoding,
|
||||
algorithm: doc.algorithm,
|
||||
type: doc.type,
|
||||
metadata: doc.metadata,
|
||||
secretKeyIV: doc.secretKeyIV,
|
||||
secretKeyTag: doc.secretKeyTag,
|
||||
secretKeyCiphertext: doc.secretKeyCiphertext,
|
||||
secretValueIV: doc.secretValueIV,
|
||||
secretValueTag: doc.secretValueTag,
|
||||
secretValueCiphertext: doc.secretValueCiphertext,
|
||||
secretBlindIndex: doc.secretBlindIndex,
|
||||
secretCommentIV: doc.secretCommentIV,
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding
|
||||
};
|
||||
})
|
||||
});
|
||||
}
|
||||
if (locallyUpdatedSecrets.length) {
|
||||
await fnSecretBulkUpdate({
|
||||
projectId,
|
||||
folderId: destinationReplicationFolderId,
|
||||
secretVersionDAL,
|
||||
secretDAL,
|
||||
tx,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
inputSecrets: locallyUpdatedSecrets.map((doc) => {
|
||||
return {
|
||||
filter: {
|
||||
folderId: destinationReplicationFolderId,
|
||||
id: destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id
|
||||
},
|
||||
data: {
|
||||
keyEncoding: doc.keyEncoding,
|
||||
algorithm: doc.algorithm,
|
||||
type: doc.type,
|
||||
metadata: doc.metadata,
|
||||
secretKeyIV: doc.secretKeyIV,
|
||||
secretKeyTag: doc.secretKeyTag,
|
||||
secretKeyCiphertext: doc.secretKeyCiphertext,
|
||||
secretValueIV: doc.secretValueIV,
|
||||
secretValueTag: doc.secretValueTag,
|
||||
secretValueCiphertext: doc.secretValueCiphertext,
|
||||
secretBlindIndex: doc.secretBlindIndex,
|
||||
secretCommentIV: doc.secretCommentIV,
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding
|
||||
}
|
||||
};
|
||||
})
|
||||
});
|
||||
}
|
||||
if (locallyDeletedSecrets.length) {
|
||||
await secretDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: locallyDeletedSecrets.map(({ id }) => id)
|
||||
},
|
||||
folderId: destinationReplicationFolderId
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
await secretQueueService.syncSecrets({
|
||||
projectId,
|
||||
secretPath: destinationFolder.path,
|
||||
environmentSlug: destinationFolder.environmentSlug,
|
||||
actorId,
|
||||
actor,
|
||||
_depth: depth + 1,
|
||||
_deDupeReplicationQueue: deDupeReplicationQueue,
|
||||
_deDupeQueue: deDupeQueue
|
||||
});
|
||||
}
|
||||
|
||||
// this is used to avoid multiple times generating secret approval by failed one
|
||||
await keyStore.setItemWithExpiry(
|
||||
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
|
||||
SECRET_IMPORT_SUCCESS_LOCK,
|
||||
1,
|
||||
KeyStorePrefixes.SecretReplication
|
||||
);
|
||||
|
||||
await secretImportDAL.updateById(destinationSecretImport.id, {
|
||||
lastReplicated: new Date(),
|
||||
replicationStatus: null,
|
||||
isReplicationSuccess: true
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
err,
|
||||
`Failed to replicate secret with import id=[${destinationSecretImport.id}] env=[${destinationSecretImport.importEnv.slug}] path=[${destinationSecretImport.importPath}]`
|
||||
);
|
||||
await secretImportDAL.updateById(destinationSecretImport.id, {
|
||||
lastReplicated: new Date(),
|
||||
replicationStatus: (err as Error)?.message.slice(0, 500),
|
||||
isReplicationSuccess: false
|
||||
});
|
||||
}
|
||||
}
|
||||
/* eslint-enable no-await-in-loop */
|
||||
} finally {
|
||||
await lock.release();
|
||||
logger.info(job.data, "Replication finished");
|
||||
}
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretReplication, "failed", (job, err) => {
|
||||
logger.error(err, "Failed to replicate secret", job?.data);
|
||||
});
|
||||
};
|
@@ -0,0 +1,3 @@
|
||||
export type TSyncSecretReplicationDTO = {
|
||||
id: string;
|
||||
};
|
@@ -220,7 +220,7 @@ export const secretSnapshotServiceFactory = ({
|
||||
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
|
||||
// this will remove all secrets and folders on child
|
||||
// due to sql foreign key and link list connection removing the folders removes everything below too
|
||||
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId }, tx);
|
||||
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
|
||||
const deletedTopLevelFolders = groupBy(
|
||||
deletedFolders.filter(({ parentId }) => parentId === snapshot.folderId),
|
||||
(item) => item.id
|
||||
|
@@ -1,20 +1,75 @@
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { Redlock, Settings } from "@app/lib/red-lock";
|
||||
|
||||
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
|
||||
|
||||
// all the key prefixes used must be set here to avoid conflict
|
||||
export enum KeyStorePrefixes {
|
||||
SecretReplication = "secret-replication-import-lock"
|
||||
}
|
||||
|
||||
type TWaitTillReady = {
|
||||
key: string;
|
||||
waitingCb?: () => void;
|
||||
keyCheckCb: (val: string | null) => boolean;
|
||||
waitIteration?: number;
|
||||
delay?: number;
|
||||
jitter?: number;
|
||||
};
|
||||
|
||||
export const keyStoreFactory = (redisUrl: string) => {
|
||||
const redis = new Redis(redisUrl);
|
||||
const redisLock = new Redlock([redis], { retryCount: 2, retryDelay: 200 });
|
||||
|
||||
const setItem = async (key: string, value: string | number | Buffer) => redis.set(key, value);
|
||||
const setItem = async (key: string, value: string | number | Buffer, prefix?: string) =>
|
||||
redis.set(prefix ? `${prefix}:${key}` : key, value);
|
||||
|
||||
const getItem = async (key: string) => redis.get(key);
|
||||
const getItem = async (key: string, prefix?: string) => redis.get(prefix ? `${prefix}:${key}` : key);
|
||||
|
||||
const setItemWithExpiry = async (key: string, exp: number | string, value: string | number | Buffer) =>
|
||||
redis.setex(key, exp, value);
|
||||
const setItemWithExpiry = async (
|
||||
key: string,
|
||||
exp: number | string,
|
||||
value: string | number | Buffer,
|
||||
prefix?: string
|
||||
) => redis.setex(prefix ? `${prefix}:${key}` : key, exp, value);
|
||||
|
||||
const deleteItem = async (key: string) => redis.del(key);
|
||||
|
||||
const incrementBy = async (key: string, value: number) => redis.incrby(key, value);
|
||||
|
||||
return { setItem, getItem, setItemWithExpiry, deleteItem, incrementBy };
|
||||
const waitTillReady = async ({
|
||||
key,
|
||||
waitingCb,
|
||||
keyCheckCb,
|
||||
waitIteration = 10,
|
||||
delay = 1000,
|
||||
jitter = 200
|
||||
}: TWaitTillReady) => {
|
||||
let attempts = 0;
|
||||
let isReady = keyCheckCb(await getItem(key));
|
||||
while (!isReady) {
|
||||
if (attempts > waitIteration) return;
|
||||
// eslint-disable-next-line
|
||||
await new Promise((resolve) => {
|
||||
waitingCb?.();
|
||||
setTimeout(resolve, Math.max(0, delay + Math.floor((Math.random() * 2 - 1) * jitter)));
|
||||
});
|
||||
attempts += 1;
|
||||
// eslint-disable-next-line
|
||||
isReady = keyCheckCb(await getItem(key, "wait_till_ready"));
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
setItem,
|
||||
getItem,
|
||||
setItemWithExpiry,
|
||||
deleteItem,
|
||||
incrementBy,
|
||||
acquireLock(resources: string[], duration: number, settings?: Partial<Settings>) {
|
||||
return redisLock.acquire(resources, duration, settings);
|
||||
},
|
||||
waitTillReady
|
||||
};
|
||||
};
|
||||
|
@@ -225,7 +225,8 @@ export const PROJECT_IDENTITIES = {
|
||||
roles: {
|
||||
description: "A list of role slugs to assign to the identity project membership.",
|
||||
role: "The role slug to assign to the newly created identity project membership.",
|
||||
isTemporary: "Whether the assigned role is temporary.",
|
||||
isTemporary:
|
||||
"Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.",
|
||||
temporaryMode: "Type of temporary expiry.",
|
||||
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
|
||||
temporaryAccessStartTime: "Time to which the temporary access starts"
|
||||
@@ -242,7 +243,8 @@ export const PROJECT_IDENTITIES = {
|
||||
roles: {
|
||||
description: "A list of role slugs to assign to the newly created identity project membership.",
|
||||
role: "The role slug to assign to the newly created identity project membership.",
|
||||
isTemporary: "Whether the assigned role is temporary.",
|
||||
isTemporary:
|
||||
"Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.",
|
||||
temporaryMode: "Type of temporary expiry.",
|
||||
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
|
||||
temporaryAccessStartTime: "Time to which the temporary access starts"
|
||||
@@ -659,6 +661,7 @@ export const INTEGRATION = {
|
||||
targetServiceId:
|
||||
"The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank",
|
||||
owner: "External integration providers service entity owner. Used in Github.",
|
||||
url: "The self-hosted URL of the platform to integrate with",
|
||||
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault",
|
||||
region: "AWS region to sync secrets to.",
|
||||
scope: "Scope of the provider. Used by Github, Qovery",
|
||||
@@ -671,7 +674,8 @@ export const INTEGRATION = {
|
||||
secretGCPLabel: "The label for GCP secrets.",
|
||||
secretAWSTag: "The tags for AWS secrets.",
|
||||
kmsKeyId: "The ID of the encryption key from AWS KMS.",
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store."
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets"
|
||||
}
|
||||
},
|
||||
UPDATE: {
|
||||
|
49
backend/src/lib/crypto/cipher/cipher.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import crypto from "crypto";
|
||||
|
||||
import { SymmetricEncryption, TSymmetricEncryptionFns } from "./types";
|
||||
|
||||
const getIvLength = () => {
|
||||
return 12;
|
||||
};
|
||||
|
||||
const getTagLength = () => {
|
||||
return 16;
|
||||
};
|
||||
|
||||
export const symmetricCipherService = (type: SymmetricEncryption): TSymmetricEncryptionFns => {
|
||||
const IV_LENGTH = getIvLength();
|
||||
const TAG_LENGTH = getTagLength();
|
||||
|
||||
const encrypt = (text: Buffer, key: Buffer) => {
|
||||
const iv = crypto.randomBytes(IV_LENGTH);
|
||||
const cipher = crypto.createCipheriv(type, key, iv);
|
||||
|
||||
let encrypted = cipher.update(text);
|
||||
encrypted = Buffer.concat([encrypted, cipher.final()]);
|
||||
|
||||
// Get the authentication tag
|
||||
const tag = cipher.getAuthTag();
|
||||
|
||||
// Concatenate IV, encrypted text, and tag into a single buffer
|
||||
const ciphertextBlob = Buffer.concat([iv, encrypted, tag]);
|
||||
return ciphertextBlob;
|
||||
};
|
||||
|
||||
const decrypt = (ciphertextBlob: Buffer, key: Buffer) => {
|
||||
// Extract the IV, encrypted text, and tag from the buffer
|
||||
const iv = ciphertextBlob.subarray(0, IV_LENGTH);
|
||||
const tag = ciphertextBlob.subarray(-TAG_LENGTH);
|
||||
const encrypted = ciphertextBlob.subarray(IV_LENGTH, -TAG_LENGTH);
|
||||
|
||||
const decipher = crypto.createDecipheriv(type, key, iv);
|
||||
decipher.setAuthTag(tag);
|
||||
|
||||
const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
|
||||
return decrypted;
|
||||
};
|
||||
|
||||
return {
|
||||
encrypt,
|
||||
decrypt
|
||||
};
|
||||
};
|
2
backend/src/lib/crypto/cipher/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { symmetricCipherService } from "./cipher";
|
||||
export { SymmetricEncryption } from "./types";
|
9
backend/src/lib/crypto/cipher/types.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export enum SymmetricEncryption {
|
||||
AES_GCM_256 = "aes-256-gcm",
|
||||
AES_GCM_128 = "aes-128-gcm"
|
||||
}
|
||||
|
||||
export type TSymmetricEncryptionFns = {
|
||||
encrypt: (text: Buffer, key: Buffer) => Buffer;
|
||||
decrypt: (blob: Buffer, key: Buffer) => Buffer;
|
||||
};
|
@@ -11,6 +11,8 @@ import { getConfig } from "../config/env";
|
||||
export const decodeBase64 = (s: string) => naclUtils.decodeBase64(s);
|
||||
export const encodeBase64 = (u: Uint8Array) => naclUtils.encodeBase64(u);
|
||||
|
||||
export const randomSecureBytes = (length = 32) => crypto.randomBytes(length);
|
||||
|
||||
export type TDecryptSymmetricInput = {
|
||||
ciphertext: string;
|
||||
iv: string;
|
||||
|
@@ -9,7 +9,8 @@ export {
|
||||
encryptAsymmetric,
|
||||
encryptSymmetric,
|
||||
encryptSymmetric128BitHexKeyUTF8,
|
||||
generateAsymmetricKeyPair
|
||||
generateAsymmetricKeyPair,
|
||||
randomSecureBytes
|
||||
} from "./encryption";
|
||||
export {
|
||||
decryptIntegrationAuths,
|
||||
|
@@ -128,7 +128,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
}
|
||||
if ($decr) {
|
||||
Object.entries($decr).forEach(([incrementField, incrementValue]) => {
|
||||
void query.increment(incrementField, incrementValue);
|
||||
void query.decrement(incrementField, incrementValue);
|
||||
});
|
||||
}
|
||||
const [docs] = await query;
|
||||
|
682
backend/src/lib/red-lock/index.ts
Normal file
@@ -0,0 +1,682 @@
|
||||
/* eslint-disable */
|
||||
// Source code credits: https://github.com/mike-marcacci/node-redlock
|
||||
// Taken to avoid external dependency
|
||||
import { randomBytes, createHash } from "crypto";
|
||||
import { EventEmitter } from "events";
|
||||
|
||||
// AbortController became available as a global in node version 16. Once version
|
||||
// 14 reaches its end-of-life, this can be removed.
|
||||
|
||||
import { Redis as IORedisClient, Cluster as IORedisCluster } from "ioredis";
|
||||
|
||||
type Client = IORedisClient | IORedisCluster;
|
||||
|
||||
// Define script constants.
|
||||
const ACQUIRE_SCRIPT = `
|
||||
-- Return 0 if an entry already exists.
|
||||
for i, key in ipairs(KEYS) do
|
||||
if redis.call("exists", key) == 1 then
|
||||
return 0
|
||||
end
|
||||
end
|
||||
|
||||
-- Create an entry for each provided key.
|
||||
for i, key in ipairs(KEYS) do
|
||||
redis.call("set", key, ARGV[1], "PX", ARGV[2])
|
||||
end
|
||||
|
||||
-- Return the number of entries added.
|
||||
return #KEYS
|
||||
`;
|
||||
|
||||
const EXTEND_SCRIPT = `
|
||||
-- Return 0 if an entry exists with a *different* lock value.
|
||||
for i, key in ipairs(KEYS) do
|
||||
if redis.call("get", key) ~= ARGV[1] then
|
||||
return 0
|
||||
end
|
||||
end
|
||||
|
||||
-- Update the entry for each provided key.
|
||||
for i, key in ipairs(KEYS) do
|
||||
redis.call("set", key, ARGV[1], "PX", ARGV[2])
|
||||
end
|
||||
|
||||
-- Return the number of entries updated.
|
||||
return #KEYS
|
||||
`;
|
||||
|
||||
const RELEASE_SCRIPT = `
|
||||
local count = 0
|
||||
for i, key in ipairs(KEYS) do
|
||||
-- Only remove entries for *this* lock value.
|
||||
if redis.call("get", key) == ARGV[1] then
|
||||
redis.pcall("del", key)
|
||||
count = count + 1
|
||||
end
|
||||
end
|
||||
|
||||
-- Return the number of entries removed.
|
||||
return count
|
||||
`;
|
||||
|
||||
export type ClientExecutionResult =
|
||||
| {
|
||||
client: Client;
|
||||
vote: "for";
|
||||
value: number;
|
||||
}
|
||||
| {
|
||||
client: Client;
|
||||
vote: "against";
|
||||
error: Error;
|
||||
};
|
||||
|
||||
/*
|
||||
* This object contains a summary of results.
|
||||
*/
|
||||
export type ExecutionStats = {
|
||||
readonly membershipSize: number;
|
||||
readonly quorumSize: number;
|
||||
readonly votesFor: Set<Client>;
|
||||
readonly votesAgainst: Map<Client, Error>;
|
||||
};
|
||||
|
||||
/*
|
||||
* This object contains a summary of results. Because the result of an attempt
|
||||
* can sometimes be determined before all requests are finished, each attempt
|
||||
* contains a Promise that will resolve ExecutionStats once all requests are
|
||||
* finished. A rejection of these promises should be considered undefined
|
||||
* behavior and should cause a crash.
|
||||
*/
|
||||
export type ExecutionResult = {
|
||||
attempts: ReadonlyArray<Promise<ExecutionStats>>;
|
||||
start: number;
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
export interface Settings {
|
||||
readonly driftFactor: number;
|
||||
readonly retryCount: number;
|
||||
readonly retryDelay: number;
|
||||
readonly retryJitter: number;
|
||||
readonly automaticExtensionThreshold: number;
|
||||
}
|
||||
|
||||
// Define default settings.
|
||||
const defaultSettings: Readonly<Settings> = {
|
||||
driftFactor: 0.01,
|
||||
retryCount: 10,
|
||||
retryDelay: 200,
|
||||
retryJitter: 100,
|
||||
automaticExtensionThreshold: 500
|
||||
};
|
||||
|
||||
// Modifyng this object is forbidden.
|
||||
Object.freeze(defaultSettings);
|
||||
|
||||
/*
|
||||
* This error indicates a failure due to the existence of another lock for one
|
||||
* or more of the requested resources.
|
||||
*/
|
||||
export class ResourceLockedError extends Error {
|
||||
constructor(public readonly message: string) {
|
||||
super();
|
||||
this.name = "ResourceLockedError";
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This error indicates a failure of an operation to pass with a quorum.
|
||||
*/
|
||||
export class ExecutionError extends Error {
|
||||
constructor(
|
||||
public readonly message: string,
|
||||
public readonly attempts: ReadonlyArray<Promise<ExecutionStats>>
|
||||
) {
|
||||
super();
|
||||
this.name = "ExecutionError";
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* An object of this type is returned when a resource is successfully locked. It
|
||||
* contains convenience methods `release` and `extend` which perform the
|
||||
* associated Redlock method on itself.
|
||||
*/
|
||||
export class Lock {
|
||||
constructor(
|
||||
public readonly redlock: Redlock,
|
||||
public readonly resources: string[],
|
||||
public readonly value: string,
|
||||
public readonly attempts: ReadonlyArray<Promise<ExecutionStats>>,
|
||||
public expiration: number
|
||||
) {}
|
||||
|
||||
async release(): Promise<ExecutionResult> {
|
||||
return this.redlock.release(this);
|
||||
}
|
||||
|
||||
async extend(duration: number): Promise<Lock> {
|
||||
return this.redlock.extend(this, duration);
|
||||
}
|
||||
}
|
||||
|
||||
export type RedlockAbortSignal = AbortSignal & { error?: Error };
|
||||
|
||||
/**
|
||||
* A redlock object is instantiated with an array of at least one redis client
|
||||
* and an optional `options` object. Properties of the Redlock object should NOT
|
||||
* be changed after it is first used, as doing so could have unintended
|
||||
* consequences for live locks.
|
||||
*/
|
||||
export class Redlock extends EventEmitter {
|
||||
public readonly clients: Set<Client>;
|
||||
public readonly settings: Settings;
|
||||
public readonly scripts: {
|
||||
readonly acquireScript: { value: string; hash: string };
|
||||
readonly extendScript: { value: string; hash: string };
|
||||
readonly releaseScript: { value: string; hash: string };
|
||||
};
|
||||
|
||||
public constructor(
|
||||
clients: Iterable<Client>,
|
||||
settings: Partial<Settings> = {},
|
||||
scripts: {
|
||||
readonly acquireScript?: string | ((script: string) => string);
|
||||
readonly extendScript?: string | ((script: string) => string);
|
||||
readonly releaseScript?: string | ((script: string) => string);
|
||||
} = {}
|
||||
) {
|
||||
super();
|
||||
|
||||
// Prevent crashes on error events.
|
||||
this.on("error", () => {
|
||||
// Because redlock is designed for high availability, it does not care if
|
||||
// a minority of redis instances/clusters fail at an operation.
|
||||
//
|
||||
// However, it can be helpful to monitor and log such cases. Redlock emits
|
||||
// an "error" event whenever it encounters an error, even if the error is
|
||||
// ignored in its normal operation.
|
||||
//
|
||||
// This function serves to prevent node's default behavior of crashing
|
||||
// when an "error" event is emitted in the absence of listeners.
|
||||
});
|
||||
|
||||
// Create a new array of client, to ensure no accidental mutation.
|
||||
this.clients = new Set(clients);
|
||||
if (this.clients.size === 0) {
|
||||
throw new Error("Redlock must be instantiated with at least one redis client.");
|
||||
}
|
||||
|
||||
// Customize the settings for this instance.
|
||||
this.settings = {
|
||||
driftFactor: typeof settings.driftFactor === "number" ? settings.driftFactor : defaultSettings.driftFactor,
|
||||
retryCount: typeof settings.retryCount === "number" ? settings.retryCount : defaultSettings.retryCount,
|
||||
retryDelay: typeof settings.retryDelay === "number" ? settings.retryDelay : defaultSettings.retryDelay,
|
||||
retryJitter: typeof settings.retryJitter === "number" ? settings.retryJitter : defaultSettings.retryJitter,
|
||||
automaticExtensionThreshold:
|
||||
typeof settings.automaticExtensionThreshold === "number"
|
||||
? settings.automaticExtensionThreshold
|
||||
: defaultSettings.automaticExtensionThreshold
|
||||
};
|
||||
|
||||
// Use custom scripts and script modifiers.
|
||||
const acquireScript =
|
||||
typeof scripts.acquireScript === "function" ? scripts.acquireScript(ACQUIRE_SCRIPT) : ACQUIRE_SCRIPT;
|
||||
const extendScript =
|
||||
typeof scripts.extendScript === "function" ? scripts.extendScript(EXTEND_SCRIPT) : EXTEND_SCRIPT;
|
||||
const releaseScript =
|
||||
typeof scripts.releaseScript === "function" ? scripts.releaseScript(RELEASE_SCRIPT) : RELEASE_SCRIPT;
|
||||
|
||||
this.scripts = {
|
||||
acquireScript: {
|
||||
value: acquireScript,
|
||||
hash: this._hash(acquireScript)
|
||||
},
|
||||
extendScript: {
|
||||
value: extendScript,
|
||||
hash: this._hash(extendScript)
|
||||
},
|
||||
releaseScript: {
|
||||
value: releaseScript,
|
||||
hash: this._hash(releaseScript)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a sha1 hash compatible with redis evalsha.
|
||||
*/
|
||||
private _hash(value: string): string {
|
||||
return createHash("sha1").update(value).digest("hex");
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a cryptographically random string.
|
||||
*/
|
||||
private _random(): string {
|
||||
return randomBytes(16).toString("hex");
|
||||
}
|
||||
|
||||
/**
|
||||
* This method runs `.quit()` on all client connections.
|
||||
*/
|
||||
public async quit(): Promise<void> {
|
||||
const results = [];
|
||||
for (const client of this.clients) {
|
||||
results.push(client.quit());
|
||||
}
|
||||
|
||||
await Promise.all(results);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method acquires a locks on the resources for the duration specified by
|
||||
* the `duration`.
|
||||
*/
|
||||
public async acquire(resources: string[], duration: number, settings?: Partial<Settings>): Promise<Lock> {
|
||||
if (Math.floor(duration) !== duration) {
|
||||
throw new Error("Duration must be an integer value in milliseconds.");
|
||||
}
|
||||
|
||||
const value = this._random();
|
||||
|
||||
try {
|
||||
const { attempts, start } = await this._execute(
|
||||
this.scripts.acquireScript,
|
||||
resources,
|
||||
[value, duration],
|
||||
settings
|
||||
);
|
||||
|
||||
// Add 2 milliseconds to the drift to account for Redis expires precision,
|
||||
// which is 1 ms, plus the configured allowable drift factor.
|
||||
const drift = Math.round((settings?.driftFactor ?? this.settings.driftFactor) * duration) + 2;
|
||||
|
||||
return new Lock(this, resources, value, attempts, start + duration - drift);
|
||||
} catch (error) {
|
||||
// If there was an error acquiring the lock, release any partial lock
|
||||
// state that may exist on a minority of clients.
|
||||
await this._execute(this.scripts.releaseScript, resources, [value], {
|
||||
retryCount: 0
|
||||
}).catch(() => {
|
||||
// Any error here will be ignored.
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method unlocks the provided lock from all servers still persisting it.
|
||||
* It will fail with an error if it is unable to release the lock on a quorum
|
||||
* of nodes, but will make no attempt to restore the lock in the case of a
|
||||
* failure to release. It is safe to re-attempt a release or to ignore the
|
||||
* error, as the lock will automatically expire after its timeout.
|
||||
*/
|
||||
public async release(lock: Lock, settings?: Partial<Settings>): Promise<ExecutionResult> {
|
||||
// Immediately invalidate the lock.
|
||||
lock.expiration = 0;
|
||||
|
||||
// Attempt to release the lock.
|
||||
return this._execute(this.scripts.releaseScript, lock.resources, [lock.value], settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method extends a valid lock by the provided `duration`.
|
||||
*/
|
||||
public async extend(existing: Lock, duration: number, settings?: Partial<Settings>): Promise<Lock> {
|
||||
if (Math.floor(duration) !== duration) {
|
||||
throw new Error("Duration must be an integer value in milliseconds.");
|
||||
}
|
||||
|
||||
// The lock has already expired.
|
||||
if (existing.expiration < Date.now()) {
|
||||
throw new ExecutionError("Cannot extend an already-expired lock.", []);
|
||||
}
|
||||
|
||||
const { attempts, start } = await this._execute(
|
||||
this.scripts.extendScript,
|
||||
existing.resources,
|
||||
[existing.value, duration],
|
||||
settings
|
||||
);
|
||||
|
||||
// Invalidate the existing lock.
|
||||
existing.expiration = 0;
|
||||
|
||||
// Add 2 milliseconds to the drift to account for Redis expires precision,
|
||||
// which is 1 ms, plus the configured allowable drift factor.
|
||||
const drift = Math.round((settings?.driftFactor ?? this.settings.driftFactor) * duration) + 2;
|
||||
|
||||
const replacement = new Lock(this, existing.resources, existing.value, attempts, start + duration - drift);
|
||||
|
||||
return replacement;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a script on all clients. The resulting promise is resolved or
|
||||
* rejected as soon as this quorum is reached; the resolution or rejection
|
||||
* will contains a `stats` property that is resolved once all votes are in.
|
||||
*/
|
||||
private async _execute(
|
||||
script: { value: string; hash: string },
|
||||
keys: string[],
|
||||
args: (string | number)[],
|
||||
_settings?: Partial<Settings>
|
||||
): Promise<ExecutionResult> {
|
||||
const settings = _settings
|
||||
? {
|
||||
...this.settings,
|
||||
..._settings
|
||||
}
|
||||
: this.settings;
|
||||
|
||||
// For the purpose of easy config serialization, we treat a retryCount of
|
||||
// -1 a equivalent to Infinity.
|
||||
const maxAttempts = settings.retryCount === -1 ? Infinity : settings.retryCount + 1;
|
||||
|
||||
const attempts: Promise<ExecutionStats>[] = [];
|
||||
|
||||
while (true) {
|
||||
const { vote, stats, start } = await this._attemptOperation(script, keys, args);
|
||||
|
||||
attempts.push(stats);
|
||||
|
||||
// The operation achieved a quorum in favor.
|
||||
if (vote === "for") {
|
||||
return { attempts, start };
|
||||
}
|
||||
|
||||
// Wait before reattempting.
|
||||
if (attempts.length < maxAttempts) {
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(
|
||||
resolve,
|
||||
Math.max(0, settings.retryDelay + Math.floor((Math.random() * 2 - 1) * settings.retryJitter)),
|
||||
undefined
|
||||
);
|
||||
});
|
||||
} else {
|
||||
throw new ExecutionError("The operation was unable to achieve a quorum during its retry window.", attempts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async _attemptOperation(
|
||||
script: { value: string; hash: string },
|
||||
keys: string[],
|
||||
args: (string | number)[]
|
||||
): Promise<
|
||||
| { vote: "for"; stats: Promise<ExecutionStats>; start: number }
|
||||
| { vote: "against"; stats: Promise<ExecutionStats>; start: number }
|
||||
> {
|
||||
const start = Date.now();
|
||||
|
||||
return await new Promise((resolve) => {
|
||||
const clientResults = [];
|
||||
for (const client of this.clients) {
|
||||
clientResults.push(this._attemptOperationOnClient(client, script, keys, args));
|
||||
}
|
||||
|
||||
const stats: ExecutionStats = {
|
||||
membershipSize: clientResults.length,
|
||||
quorumSize: Math.floor(clientResults.length / 2) + 1,
|
||||
votesFor: new Set<Client>(),
|
||||
votesAgainst: new Map<Client, Error>()
|
||||
};
|
||||
|
||||
let done: () => void;
|
||||
const statsPromise = new Promise<typeof stats>((resolve) => {
|
||||
done = () => resolve(stats);
|
||||
});
|
||||
|
||||
// This is the expected flow for all successful and unsuccessful requests.
|
||||
const onResultResolve = (clientResult: ClientExecutionResult): void => {
|
||||
switch (clientResult.vote) {
|
||||
case "for":
|
||||
stats.votesFor.add(clientResult.client);
|
||||
break;
|
||||
case "against":
|
||||
stats.votesAgainst.set(clientResult.client, clientResult.error);
|
||||
break;
|
||||
}
|
||||
|
||||
// A quorum has determined a success.
|
||||
if (stats.votesFor.size === stats.quorumSize) {
|
||||
resolve({
|
||||
vote: "for",
|
||||
stats: statsPromise,
|
||||
start
|
||||
});
|
||||
}
|
||||
|
||||
// A quorum has determined a failure.
|
||||
if (stats.votesAgainst.size === stats.quorumSize) {
|
||||
resolve({
|
||||
vote: "against",
|
||||
stats: statsPromise,
|
||||
start
|
||||
});
|
||||
}
|
||||
|
||||
// All votes are in.
|
||||
if (stats.votesFor.size + stats.votesAgainst.size === stats.membershipSize) {
|
||||
done();
|
||||
}
|
||||
};
|
||||
|
||||
// This is unexpected and should crash to prevent undefined behavior.
|
||||
const onResultReject = (error: Error): void => {
|
||||
throw error;
|
||||
};
|
||||
|
||||
for (const result of clientResults) {
|
||||
result.then(onResultResolve, onResultReject);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async _attemptOperationOnClient(
|
||||
client: Client,
|
||||
script: { value: string; hash: string },
|
||||
keys: string[],
|
||||
args: (string | number)[]
|
||||
): Promise<ClientExecutionResult> {
|
||||
try {
|
||||
let result: number;
|
||||
try {
|
||||
// Attempt to evaluate the script by its hash.
|
||||
// @ts-expect-error
|
||||
const shaResult = (await client.evalsha(script.hash, keys.length, [...keys, ...args])) as unknown;
|
||||
|
||||
if (typeof shaResult !== "number") {
|
||||
throw new Error(`Unexpected result of type ${typeof shaResult} returned from redis.`);
|
||||
}
|
||||
|
||||
result = shaResult;
|
||||
} catch (error) {
|
||||
// If the redis server does not already have the script cached,
|
||||
// reattempt the request with the script's raw text.
|
||||
if (!(error instanceof Error) || !error.message.startsWith("NOSCRIPT")) {
|
||||
throw error;
|
||||
}
|
||||
// @ts-expect-error
|
||||
const rawResult = (await client.eval(script.value, keys.length, [...keys, ...args])) as unknown;
|
||||
|
||||
if (typeof rawResult !== "number") {
|
||||
throw new Error(`Unexpected result of type ${typeof rawResult} returned from redis.`);
|
||||
}
|
||||
|
||||
result = rawResult;
|
||||
}
|
||||
|
||||
// One or more of the resources was already locked.
|
||||
if (result !== keys.length) {
|
||||
throw new ResourceLockedError(
|
||||
`The operation was applied to: ${result} of the ${keys.length} requested resources.`
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
vote: "for",
|
||||
client,
|
||||
value: result
|
||||
};
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw new Error(`Unexpected type ${typeof error} thrown with value: ${error}`);
|
||||
}
|
||||
|
||||
// Emit the error on the redlock instance for observability.
|
||||
this.emit("error", error);
|
||||
|
||||
return {
|
||||
vote: "against",
|
||||
client,
|
||||
error
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap and execute a routine in the context of an auto-extending lock,
|
||||
* returning a promise of the routine's value. In the case that auto-extension
|
||||
* fails, an AbortSignal will be updated to indicate that abortion of the
|
||||
* routine is in order, and to pass along the encountered error.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await redlock.using([senderId, recipientId], 5000, { retryCount: 5 }, async (signal) => {
|
||||
* const senderBalance = await getBalance(senderId);
|
||||
* const recipientBalance = await getBalance(recipientId);
|
||||
*
|
||||
* if (senderBalance < amountToSend) {
|
||||
* throw new Error("Insufficient balance.");
|
||||
* }
|
||||
*
|
||||
* // The abort signal will be true if:
|
||||
* // 1. the above took long enough that the lock needed to be extended
|
||||
* // 2. redlock was unable to extend the lock
|
||||
* //
|
||||
* // In such a case, exclusivity can no longer be guaranteed for further
|
||||
* // operations, and should be handled as an exceptional case.
|
||||
* if (signal.aborted) {
|
||||
* throw signal.error;
|
||||
* }
|
||||
*
|
||||
* await setBalances([
|
||||
* {id: senderId, balance: senderBalance - amountToSend},
|
||||
* {id: recipientId, balance: recipientBalance + amountToSend},
|
||||
* ]);
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
|
||||
public async using<T>(
|
||||
resources: string[],
|
||||
duration: number,
|
||||
settings: Partial<Settings>,
|
||||
routine?: (signal: RedlockAbortSignal) => Promise<T>
|
||||
): Promise<T>;
|
||||
|
||||
public async using<T>(
|
||||
resources: string[],
|
||||
duration: number,
|
||||
routine: (signal: RedlockAbortSignal) => Promise<T>
|
||||
): Promise<T>;
|
||||
|
||||
public async using<T>(
|
||||
resources: string[],
|
||||
duration: number,
|
||||
settingsOrRoutine: undefined | Partial<Settings> | ((signal: RedlockAbortSignal) => Promise<T>),
|
||||
optionalRoutine?: (signal: RedlockAbortSignal) => Promise<T>
|
||||
): Promise<T> {
|
||||
if (Math.floor(duration) !== duration) {
|
||||
throw new Error("Duration must be an integer value in milliseconds.");
|
||||
}
|
||||
|
||||
const settings =
|
||||
settingsOrRoutine && typeof settingsOrRoutine !== "function"
|
||||
? {
|
||||
...this.settings,
|
||||
...settingsOrRoutine
|
||||
}
|
||||
: this.settings;
|
||||
|
||||
const routine = optionalRoutine ?? settingsOrRoutine;
|
||||
if (typeof routine !== "function") {
|
||||
throw new Error("INVARIANT: routine is not a function.");
|
||||
}
|
||||
|
||||
if (settings.automaticExtensionThreshold > duration - 100) {
|
||||
throw new Error(
|
||||
"A lock `duration` must be at least 100ms greater than the `automaticExtensionThreshold` setting."
|
||||
);
|
||||
}
|
||||
|
||||
// The AbortController/AbortSignal pattern allows the routine to be notified
|
||||
// of a failure to extend the lock, and subsequent expiration. In the event
|
||||
// of an abort, the error object will be made available at `signal.error`.
|
||||
const controller = new AbortController();
|
||||
|
||||
const signal = controller.signal as RedlockAbortSignal;
|
||||
|
||||
function queue(): void {
|
||||
timeout = setTimeout(
|
||||
() => (extension = extend()),
|
||||
lock.expiration - Date.now() - settings.automaticExtensionThreshold
|
||||
);
|
||||
}
|
||||
|
||||
async function extend(): Promise<void> {
|
||||
timeout = undefined;
|
||||
|
||||
try {
|
||||
lock = await lock.extend(duration);
|
||||
queue();
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw new Error(`Unexpected thrown ${typeof error}: ${error}.`);
|
||||
}
|
||||
|
||||
if (lock.expiration > Date.now()) {
|
||||
return (extension = extend());
|
||||
}
|
||||
|
||||
signal.error = error instanceof Error ? error : new Error(`${error}`);
|
||||
controller.abort();
|
||||
}
|
||||
}
|
||||
|
||||
let timeout: undefined | NodeJS.Timeout;
|
||||
let extension: undefined | Promise<void>;
|
||||
let lock = await this.acquire(resources, duration, settings);
|
||||
queue();
|
||||
|
||||
try {
|
||||
return await routine(signal);
|
||||
} finally {
|
||||
// Clean up the timer.
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
timeout = undefined;
|
||||
}
|
||||
|
||||
// Wait for an in-flight extension to finish.
|
||||
if (extension) {
|
||||
await extension.catch(() => {
|
||||
// An error here doesn't matter at all, because the routine has
|
||||
// already completed, and a release will be attempted regardless. The
|
||||
// only reason for waiting here is to prevent possible contention
|
||||
// between the extension and release.
|
||||
});
|
||||
}
|
||||
|
||||
await lock.release();
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,3 +7,7 @@ export const zpStr = <T extends ZodTypeAny>(schema: T, opt: { stripNull: boolean
|
||||
if (typeof val !== "string") return val;
|
||||
return val.trim() || undefined;
|
||||
}, schema);
|
||||
|
||||
export const zodBuffer = z.custom<Buffer>((data) => Buffer.isBuffer(data) || data instanceof Uint8Array, {
|
||||
message: "Expected binary data (Buffer Or Uint8Array)"
|
||||
});
|
||||
|
@@ -7,6 +7,7 @@ import {
|
||||
TScanFullRepoEventPayload,
|
||||
TScanPushEventPayload
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { TSyncSecretsDTO } from "@app/services/secret/secret-types";
|
||||
|
||||
export enum QueueName {
|
||||
SecretRotation = "secret-rotation",
|
||||
@@ -21,7 +22,9 @@ export enum QueueName {
|
||||
SecretFullRepoScan = "secret-full-repo-scan",
|
||||
SecretPushEventScan = "secret-push-event-scan",
|
||||
UpgradeProjectToGhost = "upgrade-project-to-ghost",
|
||||
DynamicSecretRevocation = "dynamic-secret-revocation"
|
||||
DynamicSecretRevocation = "dynamic-secret-revocation",
|
||||
SecretReplication = "secret-replication",
|
||||
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
|
||||
}
|
||||
|
||||
export enum QueueJobs {
|
||||
@@ -37,7 +40,9 @@ export enum QueueJobs {
|
||||
SecretScan = "secret-scan",
|
||||
UpgradeProjectToGhost = "upgrade-project-to-ghost-job",
|
||||
DynamicSecretRevocation = "dynamic-secret-revocation",
|
||||
DynamicSecretPruning = "dynamic-secret-pruning"
|
||||
DynamicSecretPruning = "dynamic-secret-pruning",
|
||||
SecretReplication = "secret-replication",
|
||||
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
|
||||
}
|
||||
|
||||
export type TQueueJobTypes = {
|
||||
@@ -116,6 +121,14 @@ export type TQueueJobTypes = {
|
||||
dynamicSecretCfgId: string;
|
||||
};
|
||||
};
|
||||
[QueueName.SecretReplication]: {
|
||||
name: QueueJobs.SecretReplication;
|
||||
payload: TSyncSecretsDTO;
|
||||
};
|
||||
[QueueName.SecretSync]: {
|
||||
name: QueueJobs.SecretSync;
|
||||
payload: TSyncSecretsDTO;
|
||||
};
|
||||
};
|
||||
|
||||
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
|
||||
@@ -132,7 +145,7 @@ export const queueServiceFactory = (redisUrl: string) => {
|
||||
|
||||
const start = <T extends QueueName>(
|
||||
name: T,
|
||||
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>) => Promise<void>,
|
||||
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>, token?: string) => Promise<void>,
|
||||
queueSettings: Omit<QueueOptions, "connection"> = {}
|
||||
) => {
|
||||
if (queueContainer[name]) {
|
||||
@@ -166,7 +179,7 @@ export const queueServiceFactory = (redisUrl: string) => {
|
||||
name: T,
|
||||
job: TQueueJobTypes[T]["name"],
|
||||
data: TQueueJobTypes[T]["payload"],
|
||||
opts: JobsOptions & { jobId?: string }
|
||||
opts?: JobsOptions & { jobId?: string }
|
||||
) => {
|
||||
const q = queueContainer[name];
|
||||
|
||||
|
@@ -28,7 +28,7 @@ export const readLimit: RateLimitOptions = {
|
||||
// POST, PATCH, PUT, DELETE endpoints
|
||||
export const writeLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 50,
|
||||
max: 200, // (too low, FA having issues so increasing it - maidul)
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
|
@@ -44,6 +44,7 @@ import { secretApprovalRequestDALFactory } from "@app/ee/services/secret-approva
|
||||
import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal";
|
||||
import { secretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
|
||||
import { secretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { secretReplicationServiceFactory } from "@app/ee/services/secret-replication/secret-replication-service";
|
||||
import { secretRotationDALFactory } from "@app/ee/services/secret-rotation/secret-rotation-dal";
|
||||
import { secretRotationQueueFactory } from "@app/ee/services/secret-rotation/secret-rotation-queue";
|
||||
import { secretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
@@ -96,6 +97,9 @@ import { integrationDALFactory } from "@app/services/integration/integration-dal
|
||||
import { integrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
import { integrationAuthDALFactory } from "@app/services/integration-auth/integration-auth-dal";
|
||||
import { integrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { kmsDALFactory } from "@app/services/kms/kms-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { incidentContactDALFactory } from "@app/services/org/incident-contacts-dal";
|
||||
import { orgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
@@ -240,8 +244,8 @@ export const registerRoutes = async (
|
||||
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
|
||||
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
|
||||
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
|
||||
const sarReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
|
||||
const sarSecretDAL = secretApprovalRequestSecretDALFactory(db);
|
||||
const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
|
||||
const secretApprovalRequestSecretDAL = secretApprovalRequestSecretDALFactory(db);
|
||||
|
||||
const secretRotationDAL = secretRotationDALFactory(db);
|
||||
const snapshotDAL = snapshotDALFactory(db);
|
||||
@@ -260,6 +264,9 @@ export const registerRoutes = async (
|
||||
const dynamicSecretDAL = dynamicSecretDALFactory(db);
|
||||
const dynamicSecretLeaseDAL = dynamicSecretLeaseDALFactory(db);
|
||||
|
||||
const kmsDAL = kmsDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
|
||||
const permissionService = permissionServiceFactory({
|
||||
permissionDAL,
|
||||
orgRoleDAL,
|
||||
@@ -268,6 +275,12 @@ export const registerRoutes = async (
|
||||
projectDAL
|
||||
});
|
||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL
|
||||
});
|
||||
|
||||
const trustedIpService = trustedIpServiceFactory({
|
||||
licenseService,
|
||||
projectDAL,
|
||||
@@ -288,7 +301,7 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
auditLogStreamDAL
|
||||
});
|
||||
const sapService = secretApprovalPolicyServiceFactory({
|
||||
const secretApprovalPolicyService = secretApprovalPolicyServiceFactory({
|
||||
projectMembershipDAL,
|
||||
projectEnvDAL,
|
||||
secretApprovalPolicyApproverDAL: sapApproverDAL,
|
||||
@@ -489,7 +502,7 @@ export const registerRoutes = async (
|
||||
projectBotDAL,
|
||||
projectMembershipDAL,
|
||||
secretApprovalRequestDAL,
|
||||
secretApprovalSecretDAL: sarSecretDAL,
|
||||
secretApprovalSecretDAL: secretApprovalRequestSecretDAL,
|
||||
projectUserMembershipRoleDAL
|
||||
});
|
||||
|
||||
@@ -587,6 +600,7 @@ export const registerRoutes = async (
|
||||
secretVersionTagDAL
|
||||
});
|
||||
const secretImportService = secretImportServiceFactory({
|
||||
licenseService,
|
||||
projectEnvDAL,
|
||||
folderDAL,
|
||||
permissionService,
|
||||
@@ -621,19 +635,18 @@ export const registerRoutes = async (
|
||||
secretSharingDAL
|
||||
});
|
||||
|
||||
const sarService = secretApprovalRequestServiceFactory({
|
||||
const secretApprovalRequestService = secretApprovalRequestServiceFactory({
|
||||
permissionService,
|
||||
projectBotService,
|
||||
folderDAL,
|
||||
secretDAL,
|
||||
secretTagDAL,
|
||||
secretApprovalRequestSecretDAL: sarSecretDAL,
|
||||
secretApprovalRequestReviewerDAL: sarReviewerDAL,
|
||||
secretApprovalRequestSecretDAL,
|
||||
secretApprovalRequestReviewerDAL,
|
||||
projectDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretApprovalRequestDAL,
|
||||
secretService,
|
||||
snapshotService,
|
||||
secretVersionTagDAL,
|
||||
secretQueueService
|
||||
@@ -662,6 +675,23 @@ export const registerRoutes = async (
|
||||
accessApprovalPolicyApproverDAL
|
||||
});
|
||||
|
||||
const secretReplicationService = secretReplicationServiceFactory({
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretImportDAL,
|
||||
keyStore,
|
||||
queueService,
|
||||
folderDAL,
|
||||
secretApprovalPolicyService,
|
||||
secretBlindIndexDAL,
|
||||
secretApprovalRequestDAL,
|
||||
secretApprovalRequestSecretDAL,
|
||||
secretQueueService,
|
||||
projectMembershipDAL,
|
||||
projectBotService
|
||||
});
|
||||
const secretRotationQueue = secretRotationQueueFactory({
|
||||
telemetryService,
|
||||
secretRotationDAL,
|
||||
@@ -805,6 +835,7 @@ export const registerRoutes = async (
|
||||
|
||||
await telemetryQueue.startTelemetryCheck();
|
||||
await dailyResourceCleanUp.startCleanUp();
|
||||
await kmsService.startService();
|
||||
|
||||
// inject all services
|
||||
server.decorate<FastifyZodProvider["services"]>("services", {
|
||||
@@ -826,6 +857,7 @@ export const registerRoutes = async (
|
||||
projectEnv: projectEnvService,
|
||||
projectRole: projectRoleService,
|
||||
secret: secretService,
|
||||
secretReplication: secretReplicationService,
|
||||
secretTag: secretTagService,
|
||||
folder: folderService,
|
||||
secretImport: secretImportService,
|
||||
@@ -842,10 +874,10 @@ export const registerRoutes = async (
|
||||
identityGcpAuth: identityGcpAuthService,
|
||||
identityAwsAuth: identityAwsAuthService,
|
||||
identityAzureAuth: identityAzureAuthService,
|
||||
secretApprovalPolicy: sapService,
|
||||
accessApprovalPolicy: accessApprovalPolicyService,
|
||||
accessApprovalRequest: accessApprovalRequestService,
|
||||
secretApprovalRequest: sarService,
|
||||
secretApprovalPolicy: secretApprovalPolicyService,
|
||||
secretApprovalRequest: secretApprovalRequestService,
|
||||
secretRotation: secretRotationService,
|
||||
dynamicSecret: dynamicSecretService,
|
||||
dynamicSecretLease: dynamicSecretLeaseService,
|
||||
|
@@ -8,7 +8,7 @@ import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { IntegrationMappingBehavior } from "@app/services/integration-auth/integration-list";
|
||||
import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema";
|
||||
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -42,39 +42,11 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
targetService: z.string().trim().optional().describe(INTEGRATION.CREATE.targetService),
|
||||
targetServiceId: z.string().trim().optional().describe(INTEGRATION.CREATE.targetServiceId),
|
||||
owner: z.string().trim().optional().describe(INTEGRATION.CREATE.owner),
|
||||
url: z.string().trim().optional().describe(INTEGRATION.CREATE.url),
|
||||
path: z.string().trim().optional().describe(INTEGRATION.CREATE.path),
|
||||
region: z.string().trim().optional().describe(INTEGRATION.CREATE.region),
|
||||
scope: z.string().trim().optional().describe(INTEGRATION.CREATE.scope),
|
||||
metadata: z
|
||||
.object({
|
||||
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
|
||||
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
mappingBehavior: z
|
||||
.nativeEnum(IntegrationMappingBehavior)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
|
||||
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
|
||||
secretGCPLabel: z
|
||||
.object({
|
||||
labelName: z.string(),
|
||||
labelValue: z.string()
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
|
||||
secretAWSTag: z
|
||||
.array(
|
||||
z.object({
|
||||
key: z.string(),
|
||||
value: z.string()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
|
||||
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
|
||||
})
|
||||
.default({})
|
||||
metadata: IntegrationMetadataSchema.default({})
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -160,33 +132,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment),
|
||||
owner: z.string().trim().describe(INTEGRATION.UPDATE.owner),
|
||||
environment: z.string().trim().describe(INTEGRATION.UPDATE.environment),
|
||||
metadata: z
|
||||
.object({
|
||||
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
|
||||
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
mappingBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.mappingBehavior),
|
||||
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
|
||||
secretGCPLabel: z
|
||||
.object({
|
||||
labelName: z.string(),
|
||||
labelValue: z.string()
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
|
||||
secretAWSTag: z
|
||||
.array(
|
||||
z.object({
|
||||
key: z.string(),
|
||||
value: z.string()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
|
||||
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
|
||||
})
|
||||
.optional()
|
||||
metadata: IntegrationMetadataSchema.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -29,7 +29,8 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
|
||||
import: z.object({
|
||||
environment: z.string().trim().describe(SECRET_IMPORTS.CREATE.import.environment),
|
||||
path: z.string().trim().transform(removeTrailingSlash).describe(SECRET_IMPORTS.CREATE.import.path)
|
||||
})
|
||||
}),
|
||||
isReplication: z.boolean().default(false)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -210,6 +211,49 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:secretImportId/replication-resync",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Resync secret replication of secret imports",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
secretImportId: z.string().trim().describe(SECRET_IMPORTS.UPDATE.secretImportId)
|
||||
}),
|
||||
body: z.object({
|
||||
workspaceId: z.string().trim().describe(SECRET_IMPORTS.UPDATE.workspaceId),
|
||||
environment: z.string().trim().describe(SECRET_IMPORTS.UPDATE.environment),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(SECRET_IMPORTS.UPDATE.path)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { message } = await server.services.secretImport.resyncSecretImportReplication({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.secretImportId,
|
||||
...req.body,
|
||||
projectId: req.body.workspaceId
|
||||
});
|
||||
|
||||
return { message };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
@@ -232,11 +276,9 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
|
||||
200: z.object({
|
||||
message: z.string(),
|
||||
secretImports: SecretImportsSchema.omit({ importEnv: true })
|
||||
.merge(
|
||||
z.object({
|
||||
importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() })
|
||||
})
|
||||
)
|
||||
.extend({
|
||||
importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() })
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
|
@@ -45,7 +45,13 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
hashedHex: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: SecretSharingSchema.pick({ name: true, encryptedValue: true, iv: true, tag: true, expiresAt: true })
|
||||
200: SecretSharingSchema.pick({
|
||||
encryptedValue: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
expiresAt: true,
|
||||
expiresAfterViews: true
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
@@ -55,11 +61,11 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
);
|
||||
if (!sharedSecret) return undefined;
|
||||
return {
|
||||
name: sharedSecret.name,
|
||||
encryptedValue: sharedSecret.encryptedValue,
|
||||
iv: sharedSecret.iv,
|
||||
tag: sharedSecret.tag,
|
||||
expiresAt: sharedSecret.expiresAt
|
||||
expiresAt: sharedSecret.expiresAt,
|
||||
expiresAfterViews: sharedSecret.expiresAfterViews
|
||||
};
|
||||
}
|
||||
});
|
||||
@@ -72,14 +78,14 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string(),
|
||||
encryptedValue: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
hashedHex: z.string(),
|
||||
expiresAt: z.string().refine((date) => new Date(date) > new Date(), {
|
||||
message: "Expires at should be a future date"
|
||||
})
|
||||
expiresAt: z
|
||||
.string()
|
||||
.refine((date) => date === undefined || new Date(date) > new Date(), "Expires at should be a future date"),
|
||||
expiresAfterViews: z.number()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -89,19 +95,19 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { name, encryptedValue, iv, tag, hashedHex, expiresAt } = req.body;
|
||||
const { encryptedValue, iv, tag, hashedHex, expiresAt, expiresAfterViews } = req.body;
|
||||
const sharedSecret = await req.server.services.secretSharing.createSharedSecret({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name,
|
||||
encryptedValue,
|
||||
iv,
|
||||
tag,
|
||||
hashedHex,
|
||||
expiresAt: new Date(expiresAt)
|
||||
expiresAt: new Date(expiresAt),
|
||||
expiresAfterViews
|
||||
});
|
||||
return { id: sharedSecret.id };
|
||||
}
|
||||
|
@@ -9,7 +9,6 @@ import {
|
||||
ServiceTokenScopes
|
||||
} from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { CommitType } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
import { RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
@@ -19,6 +18,7 @@ import { getUserAgentType } from "@app/server/plugins/audit-log";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ProjectFilterType } from "@app/services/project/project-types";
|
||||
import { SecretOperations } from "@app/services/secret/secret-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import { secretRawSchema } from "../sanitizedSchemas";
|
||||
@@ -902,7 +902,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
projectId,
|
||||
policy,
|
||||
data: {
|
||||
[CommitType.Create]: [
|
||||
[SecretOperations.Create]: [
|
||||
{
|
||||
secretName: req.params.secretName,
|
||||
secretValueCiphertext,
|
||||
@@ -1084,7 +1084,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
projectId,
|
||||
policy,
|
||||
data: {
|
||||
[CommitType.Update]: [
|
||||
[SecretOperations.Update]: [
|
||||
{
|
||||
secretName: req.params.secretName,
|
||||
newSecretName,
|
||||
@@ -1234,7 +1234,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
projectId,
|
||||
policy,
|
||||
data: {
|
||||
[CommitType.Delete]: [
|
||||
[SecretOperations.Delete]: [
|
||||
{
|
||||
secretName: req.params.secretName
|
||||
}
|
||||
@@ -1364,7 +1364,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
projectId,
|
||||
policy,
|
||||
data: {
|
||||
[CommitType.Create]: inputSecrets
|
||||
[SecretOperations.Create]: inputSecrets
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1491,7 +1491,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
projectId,
|
||||
policy,
|
||||
data: {
|
||||
[CommitType.Update]: inputSecrets.filter(({ type }) => type === "shared")
|
||||
[SecretOperations.Update]: inputSecrets.filter(({ type }) => type === "shared")
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1606,7 +1606,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
projectId,
|
||||
policy,
|
||||
data: {
|
||||
[CommitType.Delete]: inputSecrets.filter(({ type }) => type === "shared")
|
||||
[SecretOperations.Delete]: inputSecrets.filter(({ type }) => type === "shared")
|
||||
}
|
||||
});
|
||||
await server.services.auditLog.createAuditLog({
|
||||
|
@@ -199,6 +199,7 @@ export const integrationAuthServiceFactory = ({
|
||||
projectId,
|
||||
namespace,
|
||||
integration,
|
||||
url,
|
||||
algorithm: SecretEncryptionAlgo.AES_256_GCM,
|
||||
keyEncoding: SecretKeyEncoding.UTF8,
|
||||
...(integration === Integrations.GCP_SECRET_MANAGER
|
||||
|
@@ -30,7 +30,8 @@ export enum Integrations {
|
||||
DIGITAL_OCEAN_APP_PLATFORM = "digital-ocean-app-platform",
|
||||
CLOUD_66 = "cloud-66",
|
||||
NORTHFLANK = "northflank",
|
||||
HASURA_CLOUD = "hasura-cloud"
|
||||
HASURA_CLOUD = "hasura-cloud",
|
||||
RUNDECK = "rundeck"
|
||||
}
|
||||
|
||||
export enum IntegrationType {
|
||||
@@ -368,6 +369,15 @@ export const getIntegrationOptions = async () => {
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
},
|
||||
{
|
||||
name: "Rundeck",
|
||||
slug: "rundeck",
|
||||
image: "Rundeck.svg",
|
||||
isAvailable: true,
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
}
|
||||
];
|
||||
|
||||
|
@@ -27,9 +27,11 @@ import { z } from "zod";
|
||||
import { SecretType, TIntegrationAuths, TIntegrations, TSecrets } from "@app/db/schemas";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types";
|
||||
|
||||
import { TIntegrationDALFactory } from "../integration/integration-dal";
|
||||
import { IntegrationMetadataSchema } from "../integration/integration-schema";
|
||||
import {
|
||||
IntegrationInitialSyncBehavior,
|
||||
IntegrationMappingBehavior,
|
||||
@@ -521,18 +523,42 @@ const syncSecretsAWSParameterStore = async ({
|
||||
.promise();
|
||||
}
|
||||
// case: secret exists in AWS parameter store
|
||||
} else if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
|
||||
// case: secret value doesn't match one in AWS parameter store
|
||||
} else {
|
||||
// -> update secret
|
||||
await ssm
|
||||
.putParameter({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key].value,
|
||||
Overwrite: true
|
||||
// Tags: metadata.secretAWSTag ? [{ Key: metadata.secretAWSTag.key, Value: metadata.secretAWSTag.value }] : []
|
||||
})
|
||||
.promise();
|
||||
if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
|
||||
await ssm
|
||||
.putParameter({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key].value,
|
||||
Overwrite: true
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
|
||||
if (awsParameterStoreSecretsObj[key].Name) {
|
||||
try {
|
||||
await ssm
|
||||
.addTagsToResource({
|
||||
ResourceType: "Parameter",
|
||||
ResourceId: awsParameterStoreSecretsObj[key].Name as string,
|
||||
Tags: metadata.secretAWSTag
|
||||
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({
|
||||
Key: tag.key,
|
||||
Value: tag.value
|
||||
}))
|
||||
: []
|
||||
})
|
||||
.promise();
|
||||
} catch (err) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
if ((err as any).code === "AccessDeniedException") {
|
||||
logger.error(
|
||||
`AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await new Promise((resolve) => {
|
||||
@@ -1338,38 +1364,41 @@ const syncSecretsGitHub = async ({
|
||||
}
|
||||
}
|
||||
|
||||
for await (const encryptedSecret of encryptedSecrets) {
|
||||
if (
|
||||
!(encryptedSecret.name in secrets) &&
|
||||
!(appendices?.prefix !== undefined && !encryptedSecret.name.startsWith(appendices?.prefix)) &&
|
||||
!(appendices?.suffix !== undefined && !encryptedSecret.name.endsWith(appendices?.suffix))
|
||||
) {
|
||||
switch (integration.scope) {
|
||||
case GithubScope.Org: {
|
||||
await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", {
|
||||
org: integration.owner as string,
|
||||
secret_name: encryptedSecret.name
|
||||
});
|
||||
break;
|
||||
}
|
||||
case GithubScope.Env: {
|
||||
await octokit.request(
|
||||
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}",
|
||||
{
|
||||
repository_id: Number(integration.appId),
|
||||
environment_name: integration.targetEnvironmentId as string,
|
||||
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
|
||||
if (metadata.shouldEnableDelete) {
|
||||
for await (const encryptedSecret of encryptedSecrets) {
|
||||
if (
|
||||
!(encryptedSecret.name in secrets) &&
|
||||
!(appendices?.prefix !== undefined && !encryptedSecret.name.startsWith(appendices?.prefix)) &&
|
||||
!(appendices?.suffix !== undefined && !encryptedSecret.name.endsWith(appendices?.suffix))
|
||||
) {
|
||||
switch (integration.scope) {
|
||||
case GithubScope.Org: {
|
||||
await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", {
|
||||
org: integration.owner as string,
|
||||
secret_name: encryptedSecret.name
|
||||
}
|
||||
);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
|
||||
owner: integration.owner as string,
|
||||
repo: integration.app as string,
|
||||
secret_name: encryptedSecret.name
|
||||
});
|
||||
break;
|
||||
});
|
||||
break;
|
||||
}
|
||||
case GithubScope.Env: {
|
||||
await octokit.request(
|
||||
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}",
|
||||
{
|
||||
repository_id: Number(integration.appId),
|
||||
environment_name: integration.targetEnvironmentId as string,
|
||||
secret_name: encryptedSecret.name
|
||||
}
|
||||
);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
|
||||
owner: integration.owner as string,
|
||||
repo: integration.app as string,
|
||||
secret_name: encryptedSecret.name
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3330,6 +3359,82 @@ const syncSecretsHasuraCloud = async ({
|
||||
}
|
||||
};
|
||||
|
||||
/** Sync/push [secrets] to Rundeck
|
||||
* @param {Object} obj
|
||||
* @param {TIntegrations} obj.integration - integration details
|
||||
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
|
||||
* @param {String} obj.accessToken - access token for Rundeck integration
|
||||
*/
|
||||
const syncSecretsRundeck = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: TIntegrations;
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
interface RundeckSecretResource {
|
||||
name: string;
|
||||
}
|
||||
interface RundeckSecretsGetRes {
|
||||
resources: RundeckSecretResource[];
|
||||
}
|
||||
|
||||
let existingRundeckSecrets: string[] = [];
|
||||
|
||||
try {
|
||||
const listResult = await request.get<RundeckSecretsGetRes>(
|
||||
`${integration.url}/api/44/storage/${integration.path}`,
|
||||
{
|
||||
headers: {
|
||||
"X-Rundeck-Auth-Token": accessToken
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
existingRundeckSecrets = listResult.data.resources.map((res) => res.name);
|
||||
} catch (err) {
|
||||
logger.info("No existing rundeck secrets");
|
||||
}
|
||||
|
||||
try {
|
||||
for await (const [key, value] of Object.entries(secrets)) {
|
||||
if (existingRundeckSecrets.includes(key)) {
|
||||
await request.put(`${integration.url}/api/44/storage/${integration.path}/${key}`, value.value, {
|
||||
headers: {
|
||||
"X-Rundeck-Auth-Token": accessToken,
|
||||
"Content-Type": "application/x-rundeck-data-password"
|
||||
}
|
||||
});
|
||||
} else {
|
||||
await request.post(`${integration.url}/api/44/storage/${integration.path}/${key}`, value.value, {
|
||||
headers: {
|
||||
"X-Rundeck-Auth-Token": accessToken,
|
||||
"Content-Type": "application/x-rundeck-data-password"
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for await (const existingSecret of existingRundeckSecrets) {
|
||||
if (!(existingSecret in secrets)) {
|
||||
await request.delete(`${integration.url}/api/44/storage/${integration.path}/${existingSecret}`, {
|
||||
headers: {
|
||||
"X-Rundeck-Auth-Token": accessToken
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
throw new Error(
|
||||
`Ensure that the provided Rundeck URL is accessible by Infisical and that the linked API token has sufficient permissions.\n\n${
|
||||
(err as Error).message
|
||||
}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to [app] in integration named [integration]
|
||||
*
|
||||
@@ -3596,6 +3701,13 @@ export const syncIntegrationSecrets = async ({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case Integrations.RUNDECK:
|
||||
await syncSecretsRundeck({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid integration" });
|
||||
}
|
||||
|
35
backend/src/services/integration/integration-schema.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { INTEGRATION } from "@app/lib/api-docs";
|
||||
|
||||
import { IntegrationMappingBehavior } from "../integration-auth/integration-list";
|
||||
|
||||
export const IntegrationMetadataSchema = z.object({
|
||||
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
|
||||
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
mappingBehavior: z
|
||||
.nativeEnum(IntegrationMappingBehavior)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
|
||||
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
|
||||
secretGCPLabel: z
|
||||
.object({
|
||||
labelName: z.string(),
|
||||
labelValue: z.string()
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
|
||||
secretAWSTag: z
|
||||
.array(
|
||||
z.object({
|
||||
key: z.string(),
|
||||
value: z.string()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
|
||||
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
|
||||
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete)
|
||||
});
|
@@ -43,6 +43,7 @@ export const integrationServiceFactory = ({
|
||||
scope,
|
||||
actorId,
|
||||
region,
|
||||
url,
|
||||
isActive,
|
||||
metadata,
|
||||
secretPath,
|
||||
@@ -87,6 +88,7 @@ export const integrationServiceFactory = ({
|
||||
region,
|
||||
scope,
|
||||
owner,
|
||||
url,
|
||||
appId,
|
||||
path,
|
||||
app,
|
||||
|
@@ -12,6 +12,7 @@ export type TCreateIntegrationDTO = {
|
||||
targetService?: string;
|
||||
targetServiceId?: string;
|
||||
owner?: string;
|
||||
url?: string;
|
||||
path?: string;
|
||||
region?: string;
|
||||
scope?: string;
|
||||
@@ -28,6 +29,7 @@ export type TCreateIntegrationDTO = {
|
||||
}[];
|
||||
kmsKeyId?: string;
|
||||
shouldDisableDelete?: boolean;
|
||||
shouldEnableDelete?: boolean;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
@@ -53,6 +55,7 @@ export type TUpdateIntegrationDTO = {
|
||||
}[];
|
||||
kmsKeyId?: string;
|
||||
shouldDisableDelete?: boolean;
|
||||
shouldEnableDelete?: boolean;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
|
10
backend/src/services/kms/kms-dal.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TKmsDALFactory = ReturnType<typeof kmsDALFactory>;
|
||||
|
||||
export const kmsDALFactory = (db: TDbClient) => {
|
||||
const kmsOrm = ormify(db, TableName.KmsKey);
|
||||
return kmsOrm;
|
||||
};
|
10
backend/src/services/kms/kms-root-config-dal.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TKmsRootConfigDALFactory = ReturnType<typeof kmsRootConfigDALFactory>;
|
||||
|
||||
export const kmsRootConfigDALFactory = (db: TDbClient) => {
|
||||
const kmsOrm = ormify(db, TableName.KmsServerRootConfig);
|
||||
return kmsOrm;
|
||||
};
|
126
backend/src/services/kms/kms-service.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { randomSecureBytes } from "@app/lib/crypto";
|
||||
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TKmsDALFactory } from "./kms-dal";
|
||||
import { TKmsRootConfigDALFactory } from "./kms-root-config-dal";
|
||||
import { TDecryptWithKmsDTO, TEncryptWithKmsDTO, TGenerateKMSDTO } from "./kms-types";
|
||||
|
||||
type TKmsServiceFactoryDep = {
|
||||
kmsDAL: TKmsDALFactory;
|
||||
kmsRootConfigDAL: Pick<TKmsRootConfigDALFactory, "findById" | "create">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "waitTillReady" | "setItemWithExpiry">;
|
||||
};
|
||||
|
||||
export type TKmsServiceFactory = ReturnType<typeof kmsServiceFactory>;
|
||||
|
||||
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
const KMS_ROOT_CREATION_WAIT_KEY = "wait_till_ready_kms_root_key";
|
||||
const KMS_ROOT_CREATION_WAIT_TIME = 10;
|
||||
|
||||
// akhilmhdh: Don't edit this value. This is measured for blob concatination in kms
|
||||
const KMS_VERSION = "v01";
|
||||
const KMS_VERSION_BLOB_LENGTH = 3;
|
||||
export const kmsServiceFactory = ({ kmsDAL, kmsRootConfigDAL, keyStore }: TKmsServiceFactoryDep) => {
|
||||
let ROOT_ENCRYPTION_KEY = Buffer.alloc(0);
|
||||
|
||||
// this is used symmetric encryption
|
||||
const generateKmsKey = async ({ scopeId, scopeType, isReserved = true }: TGenerateKMSDTO) => {
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
const kmsKeyMaterial = randomSecureBytes(32);
|
||||
const encryptedKeyMaterial = cipher.encrypt(kmsKeyMaterial, ROOT_ENCRYPTION_KEY);
|
||||
|
||||
const { encryptedKey, ...doc } = await kmsDAL.create({
|
||||
version: 1,
|
||||
encryptedKey: encryptedKeyMaterial,
|
||||
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
|
||||
isReserved,
|
||||
orgId: scopeType === "org" ? scopeId : undefined,
|
||||
projectId: scopeType === "project" ? scopeId : undefined
|
||||
});
|
||||
return doc;
|
||||
};
|
||||
|
||||
const encrypt = async ({ kmsId, plainText }: TEncryptWithKmsDTO) => {
|
||||
const kmsDoc = await kmsDAL.findById(kmsId);
|
||||
if (!kmsDoc) throw new BadRequestError({ message: "KMS ID not found" });
|
||||
// akhilmhdh: as more encryption are added do a check here on kmsDoc.encryptionAlgorithm
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
|
||||
const kmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
|
||||
const encryptedPlainTextBlob = cipher.encrypt(plainText, kmsKey);
|
||||
|
||||
// Buffer#1 encrypted text + Buffer#2 version number
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
|
||||
return { cipherTextBlob };
|
||||
};
|
||||
|
||||
const decrypt = async ({ cipherTextBlob: versionedCipherTextBlob, kmsId }: TDecryptWithKmsDTO) => {
|
||||
const kmsDoc = await kmsDAL.findById(kmsId);
|
||||
if (!kmsDoc) throw new BadRequestError({ message: "KMS ID not found" });
|
||||
// akhilmhdh: as more encryption are added do a check here on kmsDoc.encryptionAlgorithm
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
const kmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
|
||||
|
||||
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
const decryptedBlob = cipher.decrypt(cipherTextBlob, kmsKey);
|
||||
return decryptedBlob;
|
||||
};
|
||||
|
||||
const startService = async () => {
|
||||
const appCfg = getConfig();
|
||||
// This will switch to a seal process and HMS flow in future
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY;
|
||||
// if root key its base64 encoded
|
||||
const isBase64 = Boolean(appCfg.ROOT_ENCRYPTION_KEY);
|
||||
if (!encryptionKey) throw new Error("Root encryption key not found for KMS service.");
|
||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||
|
||||
const lock = await keyStore.acquireLock([`KMS_ROOT_CFG_LOCK`], 3000, { retryCount: 3 }).catch(() => null);
|
||||
if (!lock) {
|
||||
await keyStore.waitTillReady({
|
||||
key: KMS_ROOT_CREATION_WAIT_KEY,
|
||||
keyCheckCb: (val) => val === "true",
|
||||
waitingCb: () => logger.info("KMS. Waiting for leader to finish creation of KMS Root Key")
|
||||
});
|
||||
}
|
||||
|
||||
// check if KMS root key was already generated and saved in DB
|
||||
const kmsRootConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID);
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
if (kmsRootConfig) {
|
||||
if (lock) await lock.release();
|
||||
logger.info("KMS: Encrypted ROOT Key found from DB. Decrypting.");
|
||||
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
||||
// set the flag so that other instancen nodes can start
|
||||
await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true");
|
||||
logger.info("KMS: Loading ROOT Key into Memory.");
|
||||
ROOT_ENCRYPTION_KEY = decryptedRootKey;
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info("KMS: Generating ROOT Key");
|
||||
const newRootKey = randomSecureBytes(32);
|
||||
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
|
||||
// @ts-expect-error id is kept as fixed for idempotence and to avoid race condition
|
||||
await kmsRootConfigDAL.create({ encryptedRootKey, id: KMS_ROOT_CONFIG_UUID });
|
||||
|
||||
// set the flag so that other instancen nodes can start
|
||||
await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true");
|
||||
logger.info("KMS: Saved and loaded ROOT Key into memory");
|
||||
if (lock) await lock.release();
|
||||
ROOT_ENCRYPTION_KEY = newRootKey;
|
||||
};
|
||||
|
||||
return {
|
||||
startService,
|
||||
generateKmsKey,
|
||||
encrypt,
|
||||
decrypt
|
||||
};
|
||||
};
|
15
backend/src/services/kms/kms-types.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
export type TGenerateKMSDTO = {
|
||||
scopeType: "project" | "org";
|
||||
scopeId: string;
|
||||
isReserved?: boolean;
|
||||
};
|
||||
|
||||
export type TEncryptWithKmsDTO = {
|
||||
kmsId: string;
|
||||
plainText: Buffer;
|
||||
};
|
||||
|
||||
export type TDecryptWithKmsDTO = {
|
||||
kmsId: string;
|
||||
cipherTextBlob: Buffer;
|
||||
};
|
@@ -169,6 +169,7 @@ const sqlFindSecretPathByFolderId = (db: Knex, projectId: string, folderIds: str
|
||||
// this is for root condition
|
||||
// if the given folder id is root folder id then intial path is set as / instead of /root
|
||||
// if not root folder the path here will be /<folder name>
|
||||
depth: 1,
|
||||
path: db.raw(`CONCAT('/', (CASE WHEN "parentId" is NULL THEN '' ELSE ${TableName.SecretFolder}.name END))`),
|
||||
child: db.raw("NULL::uuid"),
|
||||
environmentSlug: `${TableName.Environment}.slug`
|
||||
@@ -185,6 +186,7 @@ const sqlFindSecretPathByFolderId = (db: Knex, projectId: string, folderIds: str
|
||||
.select({
|
||||
// then we join join this folder name behind previous as we are going from child to parent
|
||||
// the root folder check is used to avoid last / and also root name in folders
|
||||
depth: db.raw("parent.depth + 1"),
|
||||
path: db.raw(
|
||||
`CONCAT( CASE
|
||||
WHEN ${TableName.SecretFolder}."parentId" is NULL THEN ''
|
||||
@@ -199,7 +201,7 @@ const sqlFindSecretPathByFolderId = (db: Knex, projectId: string, folderIds: str
|
||||
);
|
||||
})
|
||||
.select("*")
|
||||
.from<TSecretFolders & { child: string | null; path: string; environmentSlug: string }>("parent");
|
||||
.from<TSecretFolders & { child: string | null; path: string; environmentSlug: string; depth: number }>("parent");
|
||||
|
||||
export type TSecretFolderDALFactory = ReturnType<typeof secretFolderDALFactory>;
|
||||
// never change this. If u do write a migration for it
|
||||
@@ -260,12 +262,23 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
const folders = await sqlFindSecretPathByFolderId(tx || db, projectId, folderIds);
|
||||
|
||||
// travelling all the way from leaf node to root contains real path
|
||||
const rootFolders = groupBy(
|
||||
folders.filter(({ parentId }) => parentId === null),
|
||||
(i) => i.child || i.id // root condition then child and parent will null
|
||||
);
|
||||
const actualFolders = groupBy(
|
||||
folders.filter(({ depth }) => depth === 1),
|
||||
(i) => i.id // root condition then child and parent will null
|
||||
);
|
||||
|
||||
return folderIds.map((folderId) => rootFolders[folderId]?.[0]);
|
||||
return folderIds.map((folderId) => {
|
||||
if (!rootFolders[folderId]?.[0]) return;
|
||||
|
||||
const actualId = rootFolders[folderId][0].child || rootFolders[folderId][0].id;
|
||||
const folder = actualFolders[actualId][0];
|
||||
return { ...folder, path: rootFolders[folderId]?.[0].path };
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find by secret path" });
|
||||
}
|
||||
|
@@ -253,7 +253,7 @@ export const secretFolderServiceFactory = ({
|
||||
const env = await projectEnvDAL.findOne({ projectId, slug: environment });
|
||||
if (!env) throw new BadRequestError({ message: "Environment not found", name: "Update folder" });
|
||||
const folder = await folderDAL
|
||||
.findOne({ envId: env.id, id, parentId: parentFolder.id })
|
||||
.findOne({ envId: env.id, id, parentId: parentFolder.id, isReserved: false })
|
||||
// now folder api accepts id based change
|
||||
// this is for cli backward compatiability and when cli removes this, we will remove this logic
|
||||
.catch(() => folderDAL.findOne({ envId: env.id, name: id, parentId: parentFolder.id }));
|
||||
@@ -276,7 +276,11 @@ export const secretFolderServiceFactory = ({
|
||||
}
|
||||
|
||||
const newFolder = await folderDAL.transaction(async (tx) => {
|
||||
const [doc] = await folderDAL.update({ envId: env.id, id: folder.id, parentId: parentFolder.id }, { name }, tx);
|
||||
const [doc] = await folderDAL.update(
|
||||
{ envId: env.id, id: folder.id, parentId: parentFolder.id, isReserved: false },
|
||||
{ name },
|
||||
tx
|
||||
);
|
||||
await folderVersionDAL.create(
|
||||
{
|
||||
name: doc.name,
|
||||
@@ -324,7 +328,12 @@ export const secretFolderServiceFactory = ({
|
||||
if (!parentFolder) throw new BadRequestError({ message: "Secret path not found" });
|
||||
|
||||
const [doc] = await folderDAL.delete(
|
||||
{ envId: env.id, [uuidValidate(idOrName) ? "id" : "name"]: idOrName, parentId: parentFolder.id },
|
||||
{
|
||||
envId: env.id,
|
||||
[uuidValidate(idOrName) ? "id" : "name"]: idOrName,
|
||||
parentId: parentFolder.id,
|
||||
isReserved: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (!doc) throw new BadRequestError({ message: "Folder not found", name: "Delete folder" });
|
||||
@@ -354,7 +363,7 @@ export const secretFolderServiceFactory = ({
|
||||
const parentFolder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!parentFolder) return [];
|
||||
|
||||
const folders = await folderDAL.find({ envId: env.id, parentId: parentFolder.id });
|
||||
const folders = await folderDAL.find({ envId: env.id, parentId: parentFolder.id, isReserved: false });
|
||||
|
||||
return folders;
|
||||
};
|
||||
|
@@ -1,5 +1,9 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export enum ReservedFolders {
|
||||
SecretReplication = "__reserve_replication_"
|
||||
}
|
||||
|
||||
export type TCreateFolderDTO = {
|
||||
environment: string;
|
||||
path: string;
|
||||
|
@@ -15,7 +15,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.SecretFolderVersion)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.where({ parentId: folderId })
|
||||
.where({ parentId: folderId, isReserved: false })
|
||||
.join<TSecretFolderVersions>(
|
||||
(tx || db)(TableName.SecretFolderVersion)
|
||||
.groupBy("envId", "folderId")
|
||||
|
@@ -20,14 +20,14 @@ export const secretImportDALFactory = (db: TDbClient) => {
|
||||
return lastPos?.position || 0;
|
||||
};
|
||||
|
||||
const updateAllPosition = async (folderId: string, pos: number, targetPos: number, tx?: Knex) => {
|
||||
const updateAllPosition = async (folderId: string, pos: number, targetPos: number, positionInc = 1, tx?: Knex) => {
|
||||
try {
|
||||
if (targetPos === -1) {
|
||||
// this means delete
|
||||
await (tx || db)(TableName.SecretImport)
|
||||
.where({ folderId })
|
||||
.andWhere("position", ">", pos)
|
||||
.decrement("position", 1);
|
||||
.decrement("position", positionInc);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -36,13 +36,13 @@ export const secretImportDALFactory = (db: TDbClient) => {
|
||||
.where({ folderId })
|
||||
.where("position", "<=", targetPos)
|
||||
.andWhere("position", ">", pos)
|
||||
.decrement("position", 1);
|
||||
.decrement("position", positionInc);
|
||||
} else {
|
||||
await (tx || db)(TableName.SecretImport)
|
||||
.where({ folderId })
|
||||
.where("position", ">=", targetPos)
|
||||
.andWhere("position", "<", pos)
|
||||
.increment("position", 1);
|
||||
.increment("position", positionInc);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Update position" });
|
||||
@@ -74,6 +74,7 @@ export const secretImportDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.SecretImport)
|
||||
.whereIn("folderId", folderIds)
|
||||
.where("isReplication", false)
|
||||
.join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`)
|
||||
.select(
|
||||
db.ref("*").withSchema(TableName.SecretImport) as unknown as keyof TSecretImports,
|
||||
|
@@ -79,7 +79,7 @@ export const fnSecretsFromImports = async ({
|
||||
let secretsFromDeeperImports: TSecretImportSecrets[] = [];
|
||||
if (deeperImports.length) {
|
||||
secretsFromDeeperImports = await fnSecretsFromImports({
|
||||
allowedImports: deeperImports,
|
||||
allowedImports: deeperImports.filter(({ isReplication }) => !isReplication),
|
||||
secretImportDAL,
|
||||
folderDAL,
|
||||
secretDAL,
|
||||
|
@@ -1,7 +1,12 @@
|
||||
import path from "node:path";
|
||||
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getReplicationFolderName } from "@app/ee/services/secret-replication/secret-replication-service";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
@@ -16,6 +21,7 @@ import {
|
||||
TDeleteSecretImportDTO,
|
||||
TGetSecretImportsDTO,
|
||||
TGetSecretsFromImportDTO,
|
||||
TResyncSecretImportReplicationDTO,
|
||||
TUpdateSecretImportDTO
|
||||
} from "./secret-import-types";
|
||||
|
||||
@@ -26,7 +32,8 @@ type TSecretImportServiceFactoryDep = {
|
||||
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
|
||||
projectEnvDAL: TProjectEnvDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets">;
|
||||
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "replicateSecrets">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
const ERR_SEC_IMP_NOT_FOUND = new BadRequestError({ message: "Secret import not found" });
|
||||
@@ -40,7 +47,8 @@ export const secretImportServiceFactory = ({
|
||||
folderDAL,
|
||||
projectDAL,
|
||||
secretDAL,
|
||||
secretQueueService
|
||||
secretQueueService,
|
||||
licenseService
|
||||
}: TSecretImportServiceFactoryDep) => {
|
||||
const createImport = async ({
|
||||
environment,
|
||||
@@ -50,7 +58,8 @@ export const secretImportServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
path
|
||||
isReplication,
|
||||
path: secretPath
|
||||
}: TCreateSecretImportDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@@ -63,7 +72,7 @@ export const secretImportServiceFactory = ({
|
||||
// check if user has permission to import into destination path
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
// check if user has permission to import from target path
|
||||
@@ -74,10 +83,18 @@ export const secretImportServiceFactory = ({
|
||||
secretPath: data.path
|
||||
})
|
||||
);
|
||||
if (isReplication) {
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.secretApproval) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create secret replication due to plan restriction. Upgrade plan to create replication."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create import" });
|
||||
|
||||
const [importEnv] = await projectEnvDAL.findBySlugs(projectId, [data.environment]);
|
||||
@@ -88,35 +105,62 @@ export const secretImportServiceFactory = ({
|
||||
const existingImport = await secretImportDAL.findOne({
|
||||
folderId: sourceFolder.id,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: path
|
||||
importPath: secretPath
|
||||
});
|
||||
if (existingImport) throw new BadRequestError({ message: "Cyclic import not allowed" });
|
||||
}
|
||||
|
||||
const secImport = await secretImportDAL.transaction(async (tx) => {
|
||||
const lastPos = await secretImportDAL.findLastImportPosition(folder.id, tx);
|
||||
return secretImportDAL.create(
|
||||
const doc = await secretImportDAL.create(
|
||||
{
|
||||
folderId: folder.id,
|
||||
position: lastPos + 1,
|
||||
importEnv: importEnv.id,
|
||||
importPath: data.path
|
||||
importPath: data.path,
|
||||
isReplication
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (doc.isReplication) {
|
||||
await secretImportDAL.create(
|
||||
{
|
||||
folderId: folder.id,
|
||||
position: lastPos + 2,
|
||||
isReserved: true,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: path.join(secretPath, getReplicationFolderName(doc.id))
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
return doc;
|
||||
});
|
||||
|
||||
await secretQueueService.syncSecrets({
|
||||
secretPath: secImport.importPath,
|
||||
projectId,
|
||||
environment: importEnv.slug
|
||||
});
|
||||
if (secImport.isReplication && sourceFolder) {
|
||||
await secretQueueService.replicateSecrets({
|
||||
secretPath: secImport.importPath,
|
||||
projectId,
|
||||
environmentSlug: importEnv.slug,
|
||||
pickOnlyImportIds: [secImport.id],
|
||||
actorId,
|
||||
actor
|
||||
});
|
||||
} else {
|
||||
await secretQueueService.syncSecrets({
|
||||
secretPath,
|
||||
projectId,
|
||||
environmentSlug: environment,
|
||||
actorId,
|
||||
actor
|
||||
});
|
||||
}
|
||||
|
||||
return { ...secImport, importEnv };
|
||||
};
|
||||
|
||||
const updateImport = async ({
|
||||
path,
|
||||
path: secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
actor,
|
||||
@@ -135,10 +179,10 @@ export const secretImportServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update import" });
|
||||
|
||||
const secImpDoc = await secretImportDAL.findOne({ folderId: folder.id, id });
|
||||
@@ -158,7 +202,7 @@ export const secretImportServiceFactory = ({
|
||||
const existingImport = await secretImportDAL.findOne({
|
||||
folderId: sourceFolder.id,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: path
|
||||
importPath: secretPath
|
||||
});
|
||||
if (existingImport) throw new BadRequestError({ message: "Cyclic import not allowed" });
|
||||
}
|
||||
@@ -167,12 +211,31 @@ export const secretImportServiceFactory = ({
|
||||
const secImp = await secretImportDAL.findOne({ folderId: folder.id, id });
|
||||
if (!secImp) throw ERR_SEC_IMP_NOT_FOUND;
|
||||
if (data.position) {
|
||||
await secretImportDAL.updateAllPosition(folder.id, secImp.position, data.position, tx);
|
||||
if (secImp.isReplication) {
|
||||
await secretImportDAL.updateAllPosition(folder.id, secImp.position, data.position, 2, tx);
|
||||
} else {
|
||||
await secretImportDAL.updateAllPosition(folder.id, secImp.position, data.position, 1, tx);
|
||||
}
|
||||
}
|
||||
if (secImp.isReplication) {
|
||||
const replicationFolderPath = path.join(secretPath, getReplicationFolderName(secImp.id));
|
||||
await secretImportDAL.update(
|
||||
{
|
||||
folderId: folder.id,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: replicationFolderPath,
|
||||
isReserved: true
|
||||
},
|
||||
{ position: data?.position ? data.position + 1 : undefined },
|
||||
tx
|
||||
);
|
||||
}
|
||||
const [doc] = await secretImportDAL.update(
|
||||
{ id, folderId: folder.id },
|
||||
{
|
||||
position: data?.position,
|
||||
// when moving replicated import, the position is meant for reserved import
|
||||
// replicated one should always be behind the reserved import
|
||||
position: data.position,
|
||||
importEnv: data?.environment ? importedEnv.id : undefined,
|
||||
importPath: data?.path
|
||||
},
|
||||
@@ -184,7 +247,7 @@ export const secretImportServiceFactory = ({
|
||||
};
|
||||
|
||||
const deleteImport = async ({
|
||||
path,
|
||||
path: secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
actor,
|
||||
@@ -202,16 +265,34 @@ export const secretImportServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Delete import" });
|
||||
|
||||
const secImport = await secretImportDAL.transaction(async (tx) => {
|
||||
const [doc] = await secretImportDAL.delete({ folderId: folder.id, id }, tx);
|
||||
if (!doc) throw new BadRequestError({ name: "Sec imp del", message: "Secret import doc not found" });
|
||||
await secretImportDAL.updateAllPosition(folder.id, doc.position, -1, tx);
|
||||
if (doc.isReplication) {
|
||||
const replicationFolderPath = path.join(secretPath, getReplicationFolderName(doc.id));
|
||||
const replicatedFolder = await folderDAL.findBySecretPath(projectId, environment, replicationFolderPath, tx);
|
||||
if (replicatedFolder) {
|
||||
await secretImportDAL.delete(
|
||||
{
|
||||
folderId: folder.id,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: replicationFolderPath,
|
||||
isReserved: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
await folderDAL.deleteById(replicatedFolder.id, tx);
|
||||
}
|
||||
await secretImportDAL.updateAllPosition(folder.id, doc.position, -1, 2, tx);
|
||||
} else {
|
||||
await secretImportDAL.updateAllPosition(folder.id, doc.position, -1, 1, tx);
|
||||
}
|
||||
|
||||
const importEnv = await projectEnvDAL.findById(doc.importEnv);
|
||||
if (!importEnv) throw new BadRequestError({ error: "Imported env not found", name: "Create import" });
|
||||
@@ -219,16 +300,91 @@ export const secretImportServiceFactory = ({
|
||||
});
|
||||
|
||||
await secretQueueService.syncSecrets({
|
||||
secretPath: path,
|
||||
secretPath,
|
||||
projectId,
|
||||
environment
|
||||
environmentSlug: environment,
|
||||
actor,
|
||||
actorId
|
||||
});
|
||||
|
||||
return secImport;
|
||||
};
|
||||
|
||||
const resyncSecretImportReplication = async ({
|
||||
environment,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
path: secretPath,
|
||||
id: secretImportDocId
|
||||
}: TResyncSecretImportReplicationDTO) => {
|
||||
const { permission, membership } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// check if user has permission to import into destination path
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.secretApproval) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create secret replication due to plan restriction. Upgrade plan to create replication."
|
||||
});
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update import" });
|
||||
|
||||
const [secretImportDoc] = await secretImportDAL.find({
|
||||
folderId: folder.id,
|
||||
[`${TableName.SecretImport}.id` as "id"]: secretImportDocId
|
||||
});
|
||||
if (!secretImportDoc) throw new BadRequestError({ message: "Failed to find secret import" });
|
||||
|
||||
if (!secretImportDoc.isReplication) throw new BadRequestError({ message: "Import is not in replication mode" });
|
||||
|
||||
// check if user has permission to import from target path
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: secretImportDoc.importEnv.slug,
|
||||
secretPath: secretImportDoc.importPath
|
||||
})
|
||||
);
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const sourceFolder = await folderDAL.findBySecretPath(
|
||||
projectId,
|
||||
secretImportDoc.importEnv.slug,
|
||||
secretImportDoc.importPath
|
||||
);
|
||||
|
||||
if (membership && sourceFolder) {
|
||||
await secretQueueService.replicateSecrets({
|
||||
secretPath: secretImportDoc.importPath,
|
||||
projectId,
|
||||
environmentSlug: secretImportDoc.importEnv.slug,
|
||||
pickOnlyImportIds: [secretImportDoc.id],
|
||||
actorId,
|
||||
actor
|
||||
});
|
||||
}
|
||||
|
||||
return { message: "replication started" };
|
||||
};
|
||||
|
||||
const getImports = async ({
|
||||
path,
|
||||
path: secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
actor,
|
||||
@@ -245,10 +401,10 @@ export const secretImportServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Get imports" });
|
||||
|
||||
const secImports = await secretImportDAL.find({ folderId: folder.id });
|
||||
@@ -256,7 +412,7 @@ export const secretImportServiceFactory = ({
|
||||
};
|
||||
|
||||
const getSecretsFromImports = async ({
|
||||
path,
|
||||
path: secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
actor,
|
||||
@@ -273,13 +429,13 @@ export const secretImportServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) return [];
|
||||
// this will already order by position
|
||||
// so anything based on this order will also be in right position
|
||||
const secretImports = await secretImportDAL.find({ folderId: folder.id });
|
||||
const secretImports = await secretImportDAL.find({ folderId: folder.id, isReplication: false });
|
||||
|
||||
const allowedImports = secretImports.filter(({ importEnv, importPath }) =>
|
||||
permission.can(
|
||||
@@ -299,6 +455,7 @@ export const secretImportServiceFactory = ({
|
||||
deleteImport,
|
||||
getImports,
|
||||
getSecretsFromImports,
|
||||
resyncSecretImportReplication,
|
||||
fnSecretsFromImports
|
||||
};
|
||||
};
|
||||
|
@@ -7,6 +7,7 @@ export type TCreateSecretImportDTO = {
|
||||
environment: string;
|
||||
path: string;
|
||||
};
|
||||
isReplication?: boolean;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateSecretImportDTO = {
|
||||
@@ -16,6 +17,12 @@ export type TUpdateSecretImportDTO = {
|
||||
data: Partial<{ environment: string; path: string; position: number }>;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TResyncSecretImportReplicationDTO = {
|
||||
environment: string;
|
||||
path: string;
|
||||
id: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TDeleteSecretImportDTO = {
|
||||
environment: string;
|
||||
path: string;
|
||||
|
@@ -16,17 +16,28 @@ export const secretSharingServiceFactory = ({
|
||||
secretSharingDAL
|
||||
}: TSecretSharingServiceFactoryDep) => {
|
||||
const createSharedSecret = async (createSharedSecretInput: TCreateSharedSecretDTO) => {
|
||||
const { actor, actorId, orgId, actorAuthMethod, actorOrgId, name, encryptedValue, iv, tag, hashedHex, expiresAt } =
|
||||
createSharedSecretInput;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
if (!permission) throw new UnauthorizedError({ name: "User not in org" });
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
name,
|
||||
const {
|
||||
actor,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
encryptedValue,
|
||||
iv,
|
||||
tag,
|
||||
hashedHex,
|
||||
expiresAt,
|
||||
expiresAfterViews
|
||||
} = createSharedSecretInput;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
if (!permission) throw new UnauthorizedError({ name: "User not in org" });
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
encryptedValue,
|
||||
iv,
|
||||
tag,
|
||||
hashedHex,
|
||||
expiresAt,
|
||||
expiresAfterViews,
|
||||
userId: actorId,
|
||||
orgId
|
||||
});
|
||||
@@ -43,9 +54,16 @@ export const secretSharingServiceFactory = ({
|
||||
|
||||
const getActiveSharedSecretByIdAndHashedHex = async (sharedSecretId: string, hashedHex: string) => {
|
||||
const sharedSecret = await secretSharingDAL.findOne({ id: sharedSecretId, hashedHex });
|
||||
if (sharedSecret && sharedSecret.expiresAt < new Date()) {
|
||||
if (sharedSecret.expiresAt && sharedSecret.expiresAt < new Date()) {
|
||||
return;
|
||||
}
|
||||
if (sharedSecret.expiresAfterViews != null && sharedSecret.expiresAfterViews >= 0) {
|
||||
if (sharedSecret.expiresAfterViews === 0) {
|
||||
await secretSharingDAL.deleteById(sharedSecretId);
|
||||
return;
|
||||
}
|
||||
await secretSharingDAL.updateById(sharedSecretId, { $decr: { expiresAfterViews: 1 } });
|
||||
}
|
||||
return sharedSecret;
|
||||
};
|
||||
|
||||
|
@@ -9,12 +9,12 @@ export type TSharedSecretPermission = {
|
||||
};
|
||||
|
||||
export type TCreateSharedSecretDTO = {
|
||||
name: string;
|
||||
encryptedValue: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
hashedHex: string;
|
||||
expiresAt: Date;
|
||||
expiresAfterViews: number;
|
||||
} & TSharedSecretPermission;
|
||||
|
||||
export type TDeleteSharedSecretDTO = {
|
||||
|
@@ -32,6 +32,8 @@ import {
|
||||
TCreateManySecretsRawFn,
|
||||
TCreateManySecretsRawFnFactory,
|
||||
TFnSecretBlindIndexCheck,
|
||||
TFnSecretBlindIndexCheckV2,
|
||||
TFnSecretBulkDelete,
|
||||
TFnSecretBulkInsert,
|
||||
TFnSecretBulkUpdate,
|
||||
TUpdateManySecretsRawFn,
|
||||
@@ -149,7 +151,8 @@ export const recursivelyGetSecretPaths = ({
|
||||
|
||||
// Fetch all folders in env once with a single query
|
||||
const folders = await folderDAL.find({
|
||||
envId: env.id
|
||||
envId: env.id,
|
||||
isReserved: false
|
||||
});
|
||||
|
||||
// Build the folder hierarchy map
|
||||
@@ -396,6 +399,30 @@ export const decryptSecretRaw = (
|
||||
};
|
||||
};
|
||||
|
||||
// this is used when secret blind index already exist
|
||||
// mainly for secret approval
|
||||
export const fnSecretBlindIndexCheckV2 = async ({
|
||||
inputSecrets,
|
||||
folderId,
|
||||
userId,
|
||||
secretDAL
|
||||
}: TFnSecretBlindIndexCheckV2) => {
|
||||
if (inputSecrets.some(({ type }) => type === SecretType.Personal) && !userId) {
|
||||
throw new BadRequestError({ message: "Missing user id for personal secret" });
|
||||
}
|
||||
const secrets = await secretDAL.findByBlindIndexes(
|
||||
folderId,
|
||||
inputSecrets.map(({ secretBlindIndex, type }) => ({
|
||||
blindIndex: secretBlindIndex,
|
||||
type: type || SecretType.Shared
|
||||
})),
|
||||
userId
|
||||
);
|
||||
const secsGroupedByBlindIndex = groupBy(secrets, (i) => i.secretBlindIndex as string);
|
||||
|
||||
return { secsGroupedByBlindIndex, secrets };
|
||||
};
|
||||
|
||||
/**
|
||||
* Grabs and processes nested secret references from a string
|
||||
*
|
||||
@@ -598,6 +625,35 @@ export const fnSecretBulkUpdate = async ({
|
||||
return newSecrets.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
};
|
||||
|
||||
export const fnSecretBulkDelete = async ({
|
||||
folderId,
|
||||
inputSecrets,
|
||||
tx,
|
||||
actorId,
|
||||
secretDAL,
|
||||
secretQueueService
|
||||
}: TFnSecretBulkDelete) => {
|
||||
const deletedSecrets = await secretDAL.deleteMany(
|
||||
inputSecrets.map(({ type, secretBlindIndex }) => ({
|
||||
blindIndex: secretBlindIndex,
|
||||
type
|
||||
})),
|
||||
folderId,
|
||||
actorId,
|
||||
tx
|
||||
);
|
||||
|
||||
await Promise.allSettled(
|
||||
deletedSecrets
|
||||
.filter(({ secretReminderRepeatDays }) => Boolean(secretReminderRepeatDays))
|
||||
.map(({ id, secretReminderRepeatDays }) =>
|
||||
secretQueueService.removeSecretReminder({ secretId: id, repeatDays: secretReminderRepeatDays as number })
|
||||
)
|
||||
);
|
||||
|
||||
return deletedSecrets;
|
||||
};
|
||||
|
||||
export const createManySecretsRawFnFactory = ({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
|
@@ -28,7 +28,12 @@ import { TWebhookDALFactory } from "../webhook/webhook-dal";
|
||||
import { fnTriggerWebhook } from "../webhook/webhook-fns";
|
||||
import { TSecretDALFactory } from "./secret-dal";
|
||||
import { interpolateSecrets } from "./secret-fns";
|
||||
import { TCreateSecretReminderDTO, THandleReminderDTO, TRemoveSecretReminderDTO } from "./secret-types";
|
||||
import {
|
||||
TCreateSecretReminderDTO,
|
||||
THandleReminderDTO,
|
||||
TRemoveSecretReminderDTO,
|
||||
TSyncSecretsDTO
|
||||
} from "./secret-types";
|
||||
|
||||
export type TSecretQueueFactory = ReturnType<typeof secretQueueFactory>;
|
||||
type TSecretQueueFactoryDep = {
|
||||
@@ -59,8 +64,10 @@ export type TGetSecrets = {
|
||||
};
|
||||
|
||||
const MAX_SYNC_SECRET_DEPTH = 5;
|
||||
const uniqueIntegrationKey = (environment: string, secretPath: string) => `integration-${environment}-${secretPath}`;
|
||||
export const uniqueSecretQueueKey = (environment: string, secretPath: string) =>
|
||||
`secret-queue-dedupe-${environment}-${secretPath}`;
|
||||
|
||||
type TIntegrationSecret = Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>;
|
||||
export const secretQueueFactory = ({
|
||||
queueService,
|
||||
integrationDAL,
|
||||
@@ -81,68 +88,6 @@ export const secretQueueFactory = ({
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
}: TSecretQueueFactoryDep) => {
|
||||
const createManySecretsRawFn = createManySecretsRawFnFactory({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL
|
||||
});
|
||||
|
||||
const updateManySecretsRawFn = updateManySecretsRawFnFactory({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL
|
||||
});
|
||||
|
||||
const syncIntegrations = async (dto: TGetSecrets & { deDupeQueue?: Record<string, boolean> }) => {
|
||||
await queueService.queue(QueueName.IntegrationSync, QueueJobs.IntegrationSync, dto, {
|
||||
attempts: 3,
|
||||
delay: 1000,
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 3000
|
||||
},
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true
|
||||
});
|
||||
};
|
||||
|
||||
const syncSecrets = async ({
|
||||
deDupeQueue = {},
|
||||
...dto
|
||||
}: TGetSecrets & { depth?: number; deDupeQueue?: Record<string, boolean> }) => {
|
||||
const deDuplicationKey = uniqueIntegrationKey(dto.environment, dto.secretPath);
|
||||
if (deDupeQueue?.[deDuplicationKey]) {
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
deDupeQueue[deDuplicationKey] = true;
|
||||
logger.info(
|
||||
`syncSecrets: syncing project secrets where [projectId=${dto.projectId}] [environment=${dto.environment}] [path=${dto.secretPath}]`
|
||||
);
|
||||
await queueService.queue(QueueName.SecretWebhook, QueueJobs.SecWebhook, dto, {
|
||||
jobId: `secret-webhook-${dto.environment}-${dto.projectId}-${dto.secretPath}`,
|
||||
removeOnFail: true,
|
||||
removeOnComplete: true,
|
||||
delay: 1000,
|
||||
attempts: 5,
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 3000
|
||||
}
|
||||
});
|
||||
await syncIntegrations({ ...dto, deDupeQueue });
|
||||
};
|
||||
|
||||
const removeSecretReminder = async (dto: TRemoveSecretReminderDTO) => {
|
||||
const appCfg = getConfig();
|
||||
await queueService.stopRepeatableJob(
|
||||
@@ -237,8 +182,27 @@ export const secretQueueFactory = ({
|
||||
}
|
||||
}
|
||||
};
|
||||
const createManySecretsRawFn = createManySecretsRawFnFactory({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL
|
||||
});
|
||||
|
||||
type Content = Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>;
|
||||
const updateManySecretsRawFn = updateManySecretsRawFnFactory({
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretBlindIndexDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL
|
||||
});
|
||||
|
||||
/**
|
||||
* Return the secrets in a given [folderId] including secrets from
|
||||
@@ -251,7 +215,7 @@ export const secretQueueFactory = ({
|
||||
key: string;
|
||||
depth: number;
|
||||
}) => {
|
||||
let content: Content = {};
|
||||
let content: TIntegrationSecret = {};
|
||||
if (dto.depth > MAX_SYNC_SECRET_DEPTH) {
|
||||
logger.info(
|
||||
`getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]`
|
||||
@@ -301,7 +265,7 @@ export const secretQueueFactory = ({
|
||||
await expandSecrets(content);
|
||||
|
||||
// check if current folder has any imports from other folders
|
||||
const secretImport = await secretImportDAL.find({ folderId: dto.folderId });
|
||||
const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
|
||||
|
||||
// if no imports then return secrets in the current folder
|
||||
if (!secretImport) return content;
|
||||
@@ -333,8 +297,122 @@ export const secretQueueFactory = ({
|
||||
return content;
|
||||
};
|
||||
|
||||
const syncIntegrations = async (dto: TGetSecrets & { deDupeQueue?: Record<string, boolean> }) => {
|
||||
await queueService.queue(QueueName.IntegrationSync, QueueJobs.IntegrationSync, dto, {
|
||||
attempts: 3,
|
||||
delay: 1000,
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 3000
|
||||
},
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true
|
||||
});
|
||||
};
|
||||
|
||||
const replicateSecrets = async (dto: Omit<TSyncSecretsDTO, "deDupeQueue">) => {
|
||||
await queueService.queue(QueueName.SecretReplication, QueueJobs.SecretReplication, dto, {
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 2000
|
||||
},
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true
|
||||
});
|
||||
};
|
||||
|
||||
const syncSecrets = async <T extends boolean = false>({
|
||||
// seperate de-dupe queue for integration sync and replication sync
|
||||
_deDupeQueue: deDupeQueue = {},
|
||||
_depth: depth = 0,
|
||||
_deDupeReplicationQueue: deDupeReplicationQueue = {},
|
||||
...dto
|
||||
}: TSyncSecretsDTO<T>) => {
|
||||
logger.info(
|
||||
`syncSecrets: syncing project secrets where [projectId=${dto.projectId}] [environment=${dto.environmentSlug}] [path=${dto.secretPath}]`
|
||||
);
|
||||
const deDuplicationKey = uniqueSecretQueueKey(dto.environmentSlug, dto.secretPath);
|
||||
if (
|
||||
!dto.excludeReplication
|
||||
? deDupeReplicationQueue?.[deDuplicationKey]
|
||||
: deDupeQueue?.[deDuplicationKey] || depth > MAX_SYNC_SECRET_DEPTH
|
||||
) {
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
deDupeQueue[deDuplicationKey] = true;
|
||||
// eslint-disable-next-line
|
||||
deDupeReplicationQueue[deDuplicationKey] = true;
|
||||
await queueService.queue(
|
||||
QueueName.SecretSync,
|
||||
QueueJobs.SecretSync,
|
||||
{
|
||||
...dto,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_deDupeReplicationQueue: deDupeReplicationQueue,
|
||||
_depth: depth
|
||||
} as TSyncSecretsDTO,
|
||||
{
|
||||
removeOnFail: true,
|
||||
removeOnComplete: true,
|
||||
delay: 1000,
|
||||
attempts: 5,
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 3000
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
queueService.start(QueueName.SecretSync, async (job) => {
|
||||
const {
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_deDupeReplicationQueue: deDupeReplicationQueue,
|
||||
_depth: depth,
|
||||
secretPath,
|
||||
projectId,
|
||||
environmentSlug: environment,
|
||||
excludeReplication,
|
||||
actorId,
|
||||
actor
|
||||
} = job.data;
|
||||
|
||||
await queueService.queue(
|
||||
QueueName.SecretWebhook,
|
||||
QueueJobs.SecWebhook,
|
||||
{ environment, projectId, secretPath },
|
||||
{
|
||||
jobId: `secret-webhook-${environment}-${projectId}-${secretPath}`,
|
||||
removeOnFail: { count: 5 },
|
||||
removeOnComplete: true,
|
||||
delay: 1000,
|
||||
attempts: 5,
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 3000
|
||||
}
|
||||
}
|
||||
);
|
||||
await syncIntegrations({ secretPath, projectId, environment, deDupeQueue });
|
||||
if (!excludeReplication) {
|
||||
await replicateSecrets({
|
||||
_deDupeReplicationQueue: deDupeReplicationQueue,
|
||||
_depth: depth,
|
||||
projectId,
|
||||
secretPath,
|
||||
actorId,
|
||||
actor,
|
||||
excludeReplication,
|
||||
environmentSlug: environment
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
queueService.start(QueueName.IntegrationSync, async (job) => {
|
||||
const { environment, projectId, secretPath, depth = 1, deDupeQueue = {} } = job.data;
|
||||
if (depth > MAX_SYNC_SECRET_DEPTH) return;
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) {
|
||||
@@ -348,7 +426,8 @@ export const secretQueueFactory = ({
|
||||
const linkSourceDto = {
|
||||
projectId,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: secretPath
|
||||
importPath: secretPath,
|
||||
isReplication: false
|
||||
};
|
||||
const imports = await secretImportDAL.find(linkSourceDto);
|
||||
|
||||
@@ -356,30 +435,31 @@ export const secretQueueFactory = ({
|
||||
// keep calling sync secret for all the imports made
|
||||
const importedFolderIds = unique(imports, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
|
||||
const foldersGroupedById = groupBy(importedFolders, (i) => i.child || i.id);
|
||||
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
imports
|
||||
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0].path))
|
||||
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueIntegrationKey(
|
||||
foldersGroupedById[folderId][0].environmentSlug,
|
||||
foldersGroupedById[folderId][0].path
|
||||
uniqueSecretQueueKey(
|
||||
foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
foldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
depth: depth + 1,
|
||||
projectId,
|
||||
secretPath: foldersGroupedById[folderId][0].path,
|
||||
environment: foldersGroupedById[folderId][0].environmentSlug,
|
||||
deDupeQueue
|
||||
secretPath: foldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
@@ -393,30 +473,31 @@ export const secretQueueFactory = ({
|
||||
if (secretReferences.length) {
|
||||
const referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const referencedFolders = await folderDAL.findSecretPathByFolderIds(projectId, referencedFolderIds);
|
||||
const referencedFoldersGroupedById = groupBy(referencedFolders, (i) => i.child || i.id);
|
||||
const referencedFoldersGroupedById = groupBy(referencedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
secretReferences
|
||||
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0].path))
|
||||
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0]?.path))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueIntegrationKey(
|
||||
referencedFoldersGroupedById[folderId][0].environmentSlug,
|
||||
referencedFoldersGroupedById[folderId][0].path
|
||||
uniqueSecretQueueKey(
|
||||
referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
referencedFoldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
depth: depth + 1,
|
||||
projectId,
|
||||
secretPath: referencedFoldersGroupedById[folderId][0].path,
|
||||
environment: referencedFoldersGroupedById[folderId][0].environmentSlug,
|
||||
deDupeQueue
|
||||
secretPath: referencedFoldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
@@ -546,10 +627,11 @@ export const secretQueueFactory = ({
|
||||
|
||||
return {
|
||||
// depth is internal only field thus no need to make it available outside
|
||||
syncSecrets: (dto: TGetSecrets) => syncSecrets(dto),
|
||||
syncSecrets,
|
||||
syncIntegrations,
|
||||
addSecretReminder,
|
||||
removeSecretReminder,
|
||||
handleSecretReminder
|
||||
handleSecretReminder,
|
||||
replicateSecrets
|
||||
};
|
||||
};
|
||||
|
@@ -35,6 +35,7 @@ import { TSecretDALFactory } from "./secret-dal";
|
||||
import {
|
||||
decryptSecretRaw,
|
||||
fnSecretBlindIndexCheck,
|
||||
fnSecretBulkDelete,
|
||||
fnSecretBulkInsert,
|
||||
fnSecretBulkUpdate,
|
||||
getAllNestedSecretReferences,
|
||||
@@ -53,8 +54,6 @@ import {
|
||||
TDeleteManySecretRawDTO,
|
||||
TDeleteSecretDTO,
|
||||
TDeleteSecretRawDTO,
|
||||
TFnSecretBlindIndexCheckV2,
|
||||
TFnSecretBulkDelete,
|
||||
TGetASecretDTO,
|
||||
TGetASecretRawDTO,
|
||||
TGetSecretsDTO,
|
||||
@@ -139,53 +138,6 @@ export const secretServiceFactory = ({
|
||||
return secretBlindIndex;
|
||||
};
|
||||
|
||||
const fnSecretBulkDelete = async ({ folderId, inputSecrets, tx, actorId }: TFnSecretBulkDelete) => {
|
||||
const deletedSecrets = await secretDAL.deleteMany(
|
||||
inputSecrets.map(({ type, secretBlindIndex }) => ({
|
||||
blindIndex: secretBlindIndex,
|
||||
type
|
||||
})),
|
||||
folderId,
|
||||
actorId,
|
||||
tx
|
||||
);
|
||||
|
||||
for (const s of deletedSecrets) {
|
||||
if (s.secretReminderRepeatDays) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await secretQueueService
|
||||
.removeSecretReminder({
|
||||
secretId: s.id,
|
||||
repeatDays: s.secretReminderRepeatDays
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error(err, `Failed to delete secret reminder for secret with ID ${s?.id}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return deletedSecrets;
|
||||
};
|
||||
|
||||
// this is used when secret blind index already exist
|
||||
// mainly for secret approval
|
||||
const fnSecretBlindIndexCheckV2 = async ({ inputSecrets, folderId, userId }: TFnSecretBlindIndexCheckV2) => {
|
||||
if (inputSecrets.some(({ type }) => type === SecretType.Personal) && !userId) {
|
||||
throw new BadRequestError({ message: "Missing user id for personal secret" });
|
||||
}
|
||||
const secrets = await secretDAL.findByBlindIndexes(
|
||||
folderId,
|
||||
inputSecrets.map(({ secretBlindIndex, type }) => ({
|
||||
blindIndex: secretBlindIndex,
|
||||
type: type || SecretType.Shared
|
||||
})),
|
||||
userId
|
||||
);
|
||||
const secsGroupedByBlindIndex = groupBy(secrets, (i) => i.secretBlindIndex as string);
|
||||
|
||||
return { secsGroupedByBlindIndex, secrets };
|
||||
};
|
||||
|
||||
const createSecret = async ({
|
||||
path,
|
||||
actor,
|
||||
@@ -283,8 +235,13 @@ export const secretServiceFactory = ({
|
||||
);
|
||||
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
await secretQueueService.syncSecrets({
|
||||
secretPath: path,
|
||||
actorId,
|
||||
actor,
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
return { ...secret[0], environment, workspace: projectId, tags, secretPath: path };
|
||||
};
|
||||
|
||||
@@ -413,8 +370,13 @@ export const secretServiceFactory = ({
|
||||
);
|
||||
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
await secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath: path,
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
return { ...updatedSecret[0], workspace: projectId, environment, secretPath: path };
|
||||
};
|
||||
|
||||
@@ -470,6 +432,8 @@ export const secretServiceFactory = ({
|
||||
projectId,
|
||||
folderId,
|
||||
actorId,
|
||||
secretDAL,
|
||||
secretQueueService,
|
||||
inputSecrets: [
|
||||
{
|
||||
type: inputSecret.type as SecretType,
|
||||
@@ -481,8 +445,13 @@ export const secretServiceFactory = ({
|
||||
);
|
||||
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
|
||||
await secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath: path,
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment, secretPath: path };
|
||||
};
|
||||
@@ -551,7 +520,8 @@ export const secretServiceFactory = ({
|
||||
|
||||
if (includeImports) {
|
||||
const secretImports = await secretImportDAL.findByFolderIds(paths.map((p) => p.folderId));
|
||||
const allowedImports = secretImports.filter(({ importEnv, importPath }) =>
|
||||
const allowedImports = secretImports.filter(({ importEnv, importPath, isReplication }) =>
|
||||
!isReplication &&
|
||||
// if its service token allow full access over imported one
|
||||
actor === ActorType.SERVICE
|
||||
? true
|
||||
@@ -656,7 +626,7 @@ export const secretServiceFactory = ({
|
||||
// then search for imported secrets
|
||||
// here we consider the import order also thus starting from bottom
|
||||
if (!secret && includeImports) {
|
||||
const secretImports = await secretImportDAL.find({ folderId });
|
||||
const secretImports = await secretImportDAL.find({ folderId, isReplication: false });
|
||||
const allowedImports = secretImports.filter(({ importEnv, importPath }) =>
|
||||
// if its service token allow full access over imported one
|
||||
actor === ActorType.SERVICE
|
||||
@@ -767,7 +737,13 @@ export const secretServiceFactory = ({
|
||||
);
|
||||
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
await secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath: path,
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
|
||||
return newSecrets;
|
||||
};
|
||||
@@ -867,7 +843,13 @@ export const secretServiceFactory = ({
|
||||
);
|
||||
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
await secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath: path,
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
|
||||
return secrets;
|
||||
};
|
||||
@@ -917,6 +899,8 @@ export const secretServiceFactory = ({
|
||||
|
||||
const secretsDeleted = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkDelete({
|
||||
secretDAL,
|
||||
secretQueueService,
|
||||
inputSecrets: inputSecrets.map(({ type, secretName }) => ({
|
||||
secretBlindIndex: keyName2BlindIndex[secretName],
|
||||
type
|
||||
@@ -929,7 +913,13 @@ export const secretServiceFactory = ({
|
||||
);
|
||||
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
await secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath: path,
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
|
||||
return secretsDeleted;
|
||||
};
|
||||
@@ -1109,9 +1099,6 @@ export const secretServiceFactory = ({
|
||||
skipMultilineEncoding
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secret.folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return decryptSecretRaw(secret, botKey);
|
||||
};
|
||||
|
||||
@@ -1150,8 +1137,6 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secret.folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return decryptSecretRaw(secret, botKey);
|
||||
};
|
||||
|
||||
@@ -1181,9 +1166,6 @@ export const secretServiceFactory = ({
|
||||
actorAuthMethod
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secret.folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return decryptSecretRaw(secret, botKey);
|
||||
};
|
||||
|
||||
@@ -1232,9 +1214,6 @@ export const secretServiceFactory = ({
|
||||
})
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) =>
|
||||
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
|
||||
);
|
||||
@@ -1286,9 +1265,6 @@ export const secretServiceFactory = ({
|
||||
})
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) =>
|
||||
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
|
||||
);
|
||||
@@ -1322,9 +1298,6 @@ export const secretServiceFactory = ({
|
||||
secrets: inputSecrets.map(({ secretKey }) => ({ secretName: secretKey, type: SecretType.Shared }))
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) =>
|
||||
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
|
||||
);
|
||||
@@ -1448,7 +1421,12 @@ export const secretServiceFactory = ({
|
||||
);
|
||||
|
||||
await snapshotService.performSnapshot(folder.id);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId: project.id, environment });
|
||||
await secretQueueService.syncSecrets({
|
||||
secretPath,
|
||||
projectId: project.id,
|
||||
environmentSlug: environment,
|
||||
excludeReplication: true
|
||||
});
|
||||
|
||||
return {
|
||||
...updatedSecret[0],
|
||||
@@ -1550,7 +1528,12 @@ export const secretServiceFactory = ({
|
||||
);
|
||||
|
||||
await snapshotService.performSnapshot(folder.id);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId: project.id, environment });
|
||||
await secretQueueService.syncSecrets({
|
||||
secretPath,
|
||||
projectId: project.id,
|
||||
environmentSlug: environment,
|
||||
excludeReplication: true
|
||||
});
|
||||
|
||||
return {
|
||||
...updatedSecret[0],
|
||||
@@ -1624,12 +1607,6 @@ export const secretServiceFactory = ({
|
||||
updateManySecretsRaw,
|
||||
deleteManySecretsRaw,
|
||||
getSecretVersions,
|
||||
backfillSecretReferences,
|
||||
// external services function
|
||||
fnSecretBulkDelete,
|
||||
fnSecretBulkUpdate,
|
||||
fnSecretBlindIndexCheck,
|
||||
fnSecretBulkInsert,
|
||||
fnSecretBlindIndexCheckV2
|
||||
backfillSecretReferences
|
||||
};
|
||||
};
|
||||
|
@@ -11,6 +11,8 @@ import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/se
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
|
||||
import { ActorType } from "../auth/auth-type";
|
||||
|
||||
type TPartialSecret = Pick<TSecrets, "id" | "secretReminderRepeatDays" | "secretReminderNote">;
|
||||
|
||||
type TPartialInputSecret = Pick<TSecrets, "type" | "secretReminderNote" | "secretReminderRepeatDays" | "id">;
|
||||
@@ -264,6 +266,10 @@ export type TFnSecretBulkDelete = {
|
||||
inputSecrets: Array<{ type: SecretType; secretBlindIndex: string }>;
|
||||
actorId: string;
|
||||
tx?: Knex;
|
||||
secretDAL: Pick<TSecretDALFactory, "deleteMany">;
|
||||
secretQueueService: {
|
||||
removeSecretReminder: (data: TRemoveSecretReminderDTO) => Promise<void>;
|
||||
};
|
||||
};
|
||||
|
||||
export type TFnSecretBlindIndexCheck = {
|
||||
@@ -277,6 +283,7 @@ export type TFnSecretBlindIndexCheck = {
|
||||
|
||||
// when blind index is already present
|
||||
export type TFnSecretBlindIndexCheckV2 = {
|
||||
secretDAL: Pick<TSecretDALFactory, "findByBlindIndexes">;
|
||||
folderId: string;
|
||||
userId?: string;
|
||||
inputSecrets: Array<{ secretBlindIndex: string; type?: SecretType }>;
|
||||
@@ -363,3 +370,27 @@ export type TUpdateManySecretsRawFn = {
|
||||
}[];
|
||||
userId?: string;
|
||||
};
|
||||
|
||||
export enum SecretOperations {
|
||||
Create = "create",
|
||||
Update = "update",
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export type TSyncSecretsDTO<T extends boolean = false> = {
|
||||
_deDupeQueue?: Record<string, boolean>;
|
||||
_deDupeReplicationQueue?: Record<string, boolean>;
|
||||
_depth?: number;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
environmentSlug: string;
|
||||
// cases for just doing sync integration and webhook
|
||||
excludeReplication?: T;
|
||||
} & (T extends true
|
||||
? object
|
||||
: {
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
// used for import creation to trigger replication
|
||||
pickOnlyImportIds?: string[];
|
||||
});
|
||||
|
@@ -89,6 +89,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findLatestVersionMany = async (folderId: string, secretIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
if (!secretIds.length) return {};
|
||||
const docs: Array<TSecretVersions & { max: number }> = await (tx || db)(TableName.SecretVersion)
|
||||
.where("folderId", folderId)
|
||||
.whereIn(`${TableName.SecretVersion}.secretId`, secretIds)
|
||||
|
@@ -9,14 +9,6 @@ description: "Learn the fundamentals of secret referencing and importing in Infi
|
||||
Infisical's secret referencing functionality makes it possible to reference the value of a "base" secret when defining the value of another secret.
|
||||
This means that updating the value of a base secret propagates directly to other secrets whose values depend on the base secret.
|
||||
|
||||
<Note>
|
||||
Currently, the secret referencing feature is only supported by the
|
||||
[Infisical CLI](/cli/overview), [native integrations](/integrations/overview) and [Infisical Agent](/infisical-agent/overview).
|
||||
|
||||
We intend to add support for it to the [Node SDK](https://infisical.com/docs/sdks/languages/node),
|
||||
[Python SDK](https://infisical.com/docs/sdks/languages/python), and [Java SDK](https://infisical.com/docs/sdks/languages/java) this quarter.
|
||||
</Note>
|
||||
|
||||

|
||||
|
||||
Since secret referencing works by reconstructing values back on the client side, the client, be it a user, service token, or a machine identity, fetching back secrets
|
||||
|
@@ -1,37 +1,36 @@
|
||||
---
|
||||
title: "Secret Sharing"
|
||||
sidebarTitle: "Secret Sharing"
|
||||
description: "Learn how to share time-bound secrets securely with anyone on the internet."
|
||||
description: "Learn how to share time & view-count bound secrets securely with anyone on the internet."
|
||||
---
|
||||
|
||||
Developers frequently need to share secrets with team members, contractors, or other third parties, which can be risky due to potential leaks or misuse.
|
||||
Infisical offers a secure solution for sharing secrets over the internet in a time-bound manner.
|
||||
Infisical offers a secure solution for sharing secrets over the internet in a time and view count bound manner.
|
||||
|
||||
With its zero-knowledge architecture, secrets shared via Infisical remain unreadable even to Infisical itself.
|
||||
|
||||
## Share a Secret
|
||||
|
||||
1. Navigate to the **Projects** page.
|
||||
1. Navigate to the **Organization** page.
|
||||
2. Click on the **Secret Sharing** tab from the sidebar.
|
||||
|
||||

|
||||
|
||||
3. Click on the **Share Secret** button.
|
||||
|
||||
<Note>
|
||||
Infisical does not have access to the shared secrets. This is a part of our zero
|
||||
knowledge architecture.
|
||||
Infisical does not have access to the shared secrets. This is a part of our
|
||||
zero knowledge architecture.
|
||||
</Note>
|
||||
|
||||
4. Enter the secret you want to share and set the expiration time. Click on the **Share Secret** button.
|
||||
3. Click on the **Share Secret** button. Set the secret, its expiration time as well as the number of views allowed. It expires as soon as any of the conditions are met.
|
||||
|
||||

|
||||

|
||||
|
||||
<Note>
|
||||
Secret once set cannot be changed. This is to ensure that the secret is not
|
||||
tampered with.
|
||||
</Note>
|
||||
|
||||
5. Copy the link and share it with the intended recipient. Anyone with the link can access the secret before its expiration time. Hence, it is recommended to share the link only with the intended recipient.
|
||||
5. Copy the link and share it with the intended recipient. Anyone with the link can access the secret before its expiration condition. Hence, it is recommended to share the link only with the intended recipient.
|
||||
|
||||

|
||||
|
||||
|
BIN
docs/images/integrations/rundeck/integrations-rundeck-auth.png
Normal file
After Width: | Height: | Size: 521 KiB |
BIN
docs/images/integrations/rundeck/integrations-rundeck-create.png
Normal file
After Width: | Height: | Size: 529 KiB |
BIN
docs/images/integrations/rundeck/integrations-rundeck-token.png
Normal file
After Width: | Height: | Size: 308 KiB |
BIN
docs/images/integrations/rundeck/integrations-rundeck.png
Normal file
After Width: | Height: | Size: 707 KiB |
BIN
docs/images/platform/secret-sharing/create-new-secret.png
Normal file
After Width: | Height: | Size: 106 KiB |
Before Width: | Height: | Size: 45 KiB |
Before Width: | Height: | Size: 157 KiB After Width: | Height: | Size: 542 KiB |
Before Width: | Height: | Size: 221 KiB After Width: | Height: | Size: 467 KiB |
39
docs/integrations/cicd/rundeck.mdx
Normal file
@@ -0,0 +1,39 @@
|
||||
---
|
||||
title: "Rundeck"
|
||||
description: "How to sync secrets from Infisical to Rundeck"
|
||||
---
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- Set up and add envars to [Infisical Cloud](https://app.infisical.com)
|
||||
|
||||
<Steps>
|
||||
<Step title="Authorize Infisical for Rundeck">
|
||||
Obtain a User API Token in the Profile settings of Rundeck
|
||||
|
||||

|
||||
|
||||
Navigate to your project's integrations tab in Infisical.
|
||||
|
||||

|
||||
|
||||
Press on the Rundeck tile and input your Rundeck instance Base URL and User API token to grant Infisical access to manage Rundeck keys
|
||||
|
||||

|
||||
|
||||
<Info>
|
||||
If this is your project's first cloud integration, then you'll have to grant
|
||||
Infisical access to your project's environment variables. Although this step
|
||||
breaks E2EE, it's necessary for Infisical to sync the environment variables to
|
||||
the cloud platform.
|
||||
</Info>
|
||||
|
||||
</Step>
|
||||
<Step title="Start integration">
|
||||
Select which Infisical environment secrets you want to sync to a Rundeck Key Storage Path and press create integration to start syncing secrets to Rundeck.
|
||||
|
||||

|
||||

|
||||
|
||||
</Step>
|
||||
</Steps>
|
@@ -28,6 +28,7 @@ Prerequisites:
|
||||
"Action": [
|
||||
"ssm:PutParameter",
|
||||
"ssm:DeleteParameter",
|
||||
"ssm:GetParameters",
|
||||
"ssm:GetParametersByPath",
|
||||
"ssm:DeleteParameters",
|
||||
"ssm:AddTagsToResource", // if you need to add tags to secrets
|
||||
|
@@ -26,14 +26,14 @@ Missing an integration? [Throw in a request](https://github.com/Infisical/infisi
|
||||
| [Supabase](/integrations/cloud/supabase) | Cloud | Available |
|
||||
| [Northflank](/integrations/cloud/northflank) | Cloud | Available |
|
||||
| [Cloudflare Pages](/integrations/cloud/cloudflare-pages) | Cloud | Available |
|
||||
| [Cloudflare Workers](/integrations/cloud/cloudflare-workers) | Cloud | Available |
|
||||
| [Cloudflare Workers](/integrations/cloud/cloudflare-workers) | Cloud | Available |
|
||||
| [Checkly](/integrations/cloud/checkly) | Cloud | Available |
|
||||
| [Qovery](/integrations/cloud/qovery) | Cloud | Available |
|
||||
| [Qovery](/integrations/cloud/qovery) | Cloud | Available |
|
||||
| [HashiCorp Vault](/integrations/cloud/hashicorp-vault) | Cloud | Available |
|
||||
| [AWS Parameter Store](/integrations/cloud/aws-parameter-store) | Cloud | Available |
|
||||
| [AWS Secrets Manager](/integrations/cloud/aws-secret-manager) | Cloud | Available |
|
||||
| [AWS Secrets Manager](/integrations/cloud/aws-secret-manager) | Cloud | Available |
|
||||
| [Azure Key Vault](/integrations/cloud/azure-key-vault) | Cloud | Available |
|
||||
| [GCP Secret Manager](/integrations/cloud/gcp-secret-manager) | Cloud | Available |
|
||||
| [GCP Secret Manager](/integrations/cloud/gcp-secret-manager) | Cloud | Available |
|
||||
| [Windmill](/integrations/cloud/windmill) | Cloud | Available |
|
||||
| [BitBucket](/integrations/cicd/bitbucket) | CI/CD | Available |
|
||||
| [Codefresh](/integrations/cicd/codefresh) | CI/CD | Available |
|
||||
@@ -41,6 +41,7 @@ Missing an integration? [Throw in a request](https://github.com/Infisical/infisi
|
||||
| [GitLab](/integrations/cicd/gitlab) | CI/CD | Available |
|
||||
| [CircleCI](/integrations/cicd/circleci) | CI/CD | Available |
|
||||
| [Travis CI](/integrations/cicd/travisci) | CI/CD | Available |
|
||||
| [Rundeck](/integrations/cicd/rundeck) | CI/CD | Available |
|
||||
| [React](/integrations/frameworks/react) | Framework | Available |
|
||||
| [Vue](/integrations/frameworks/vue) | Framework | Available |
|
||||
| [Express](/integrations/frameworks/express) | Framework | Available |
|
||||
|
@@ -32,10 +32,7 @@
|
||||
"thumbsRating": true
|
||||
},
|
||||
"api": {
|
||||
"baseUrl": [
|
||||
"https://app.infisical.com",
|
||||
"http://localhost:8080"
|
||||
]
|
||||
"baseUrl": ["https://app.infisical.com", "http://localhost:8080"]
|
||||
},
|
||||
"topbarLinks": [
|
||||
{
|
||||
@@ -76,9 +73,7 @@
|
||||
"documentation/getting-started/introduction",
|
||||
{
|
||||
"group": "Quickstart",
|
||||
"pages": [
|
||||
"documentation/guides/local-development"
|
||||
]
|
||||
"pages": ["documentation/guides/local-development"]
|
||||
},
|
||||
{
|
||||
"group": "Guides",
|
||||
@@ -124,7 +119,9 @@
|
||||
"documentation/platform/access-controls/temporary-access",
|
||||
"documentation/platform/access-controls/access-requests",
|
||||
"documentation/platform/pr-workflows",
|
||||
"documentation/platform/audit-logs"
|
||||
"documentation/platform/audit-logs",
|
||||
"documentation/platform/audit-log-streams",
|
||||
"documentation/platform/groups"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -137,7 +134,6 @@
|
||||
"documentation/platform/secret-rotation/aws-iam"
|
||||
]
|
||||
},
|
||||
"documentation/platform/secret-sharing",
|
||||
{
|
||||
"group": "Dynamic Secrets",
|
||||
"pages": [
|
||||
@@ -149,8 +145,7 @@
|
||||
"documentation/platform/dynamic-secrets/aws-iam"
|
||||
]
|
||||
},
|
||||
"documentation/platform/groups",
|
||||
"documentation/platform/audit-log-streams"
|
||||
"documentation/platform/secret-sharing"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -221,9 +216,7 @@
|
||||
},
|
||||
{
|
||||
"group": "Reference architectures",
|
||||
"pages": [
|
||||
"self-hosting/reference-architectures/aws-ecs"
|
||||
]
|
||||
"pages": ["self-hosting/reference-architectures/aws-ecs"]
|
||||
},
|
||||
"self-hosting/ee",
|
||||
"self-hosting/faq"
|
||||
@@ -343,6 +336,7 @@
|
||||
"pages": [
|
||||
"integrations/cicd/circleci",
|
||||
"integrations/cicd/travisci",
|
||||
"integrations/cicd/rundeck",
|
||||
"integrations/cicd/codefresh",
|
||||
"integrations/cloud/checkly"
|
||||
]
|
||||
@@ -379,15 +373,11 @@
|
||||
},
|
||||
{
|
||||
"group": "Build Tool Integrations",
|
||||
"pages": [
|
||||
"integrations/build-tools/gradle"
|
||||
]
|
||||
"pages": ["integrations/build-tools/gradle"]
|
||||
},
|
||||
{
|
||||
"group": "",
|
||||
"pages": [
|
||||
"sdks/overview"
|
||||
]
|
||||
"pages": ["sdks/overview"]
|
||||
},
|
||||
{
|
||||
"group": "SDK's",
|
||||
@@ -405,9 +395,7 @@
|
||||
"api-reference/overview/authentication",
|
||||
{
|
||||
"group": "Examples",
|
||||
"pages": [
|
||||
"api-reference/overview/examples/integration"
|
||||
]
|
||||
"pages": ["api-reference/overview/examples/integration"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -563,15 +551,11 @@
|
||||
},
|
||||
{
|
||||
"group": "Service Tokens",
|
||||
"pages": [
|
||||
"api-reference/endpoints/service-tokens/get"
|
||||
]
|
||||
"pages": ["api-reference/endpoints/service-tokens/get"]
|
||||
},
|
||||
{
|
||||
"group": "Audit Logs",
|
||||
"pages": [
|
||||
"api-reference/endpoints/audit-logs/export-audit-log"
|
||||
]
|
||||
"pages": ["api-reference/endpoints/audit-logs/export-audit-log"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -587,9 +571,7 @@
|
||||
},
|
||||
{
|
||||
"group": "",
|
||||
"pages": [
|
||||
"changelog/overview"
|
||||
]
|
||||
"pages": ["changelog/overview"]
|
||||
},
|
||||
{
|
||||
"group": "Contributing",
|
||||
@@ -613,9 +595,7 @@
|
||||
},
|
||||
{
|
||||
"group": "Contributing to SDK",
|
||||
"pages": [
|
||||
"contributing/sdk/developing"
|
||||
]
|
||||
"pages": ["contributing/sdk/developing"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -33,7 +33,7 @@ description: "Learn how to use Helm chart to install Infisical on your Kubernete
|
||||
pullPolicy: IfNotPresent
|
||||
```
|
||||
<Warning>
|
||||
Do you not use the latest docker image tag in production deployments as they can introduce unexpected changes
|
||||
Do not use the latest docker image tag in production deployments as they can introduce unexpected changes
|
||||
</Warning>
|
||||
</Step>
|
||||
|
||||
|
@@ -32,7 +32,8 @@ const integrationSlugNameMapping: Mapping = {
|
||||
northflank: "Northflank",
|
||||
windmill: "Windmill",
|
||||
"gcp-secret-manager": "GCP Secret Manager",
|
||||
"hasura-cloud": "Hasura Cloud"
|
||||
"hasura-cloud": "Hasura Cloud",
|
||||
rundeck: "Rundeck"
|
||||
};
|
||||
|
||||
const envMapping: Mapping = {
|
||||
|
1
frontend/public/images/integrations/Rundeck.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" viewBox="45.359 114.637 60.122 58.576"><path d="M46.83 113.864l7.608 12.01H92.5l-7.543-12.01zm15.26 23.98l3.684 5.754-3.968 6.32h38.4l3.815-6.017-3.815-5.907h-38.04zm-7.826 24.13l-7.455 11.77v.24h38.148l7.564-12.012z" fill="#f91629"/></svg>
|
After Width: | Height: | Size: 303 B |
Before Width: | Height: | Size: 1.3 MiB After Width: | Height: | Size: 368 KiB |
Before Width: | Height: | Size: 510 KiB After Width: | Height: | Size: 212 KiB |
@@ -62,6 +62,7 @@ export const Select = forwardRef<HTMLButtonElement, SelectProps>(
|
||||
<SelectPrimitive.Content
|
||||
className={twMerge(
|
||||
"relative top-1 z-[100] overflow-hidden rounded-md border border-mineshaft-600 bg-mineshaft-900 font-inter text-bunker-100 shadow-md",
|
||||
position === "popper" && "max-h-72",
|
||||
dropdownContainerClassName
|
||||
)}
|
||||
position={position}
|
||||
@@ -113,7 +114,7 @@ export const SelectItem = forwardRef<HTMLDivElement, SelectItemProps>(
|
||||
outline-none transition-all hover:bg-mineshaft-500 data-[highlighted]:bg-mineshaft-700/80`,
|
||||
isSelected && "bg-primary",
|
||||
isDisabled &&
|
||||
"cursor-not-allowed text-gray-600 hover:bg-transparent hover:text-mineshaft-600",
|
||||
"cursor-not-allowed text-gray-600 hover:bg-transparent hover:text-mineshaft-600",
|
||||
className
|
||||
)}
|
||||
ref={forwardedRef}
|
||||
|
5
frontend/src/helpers/string.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export const removeTrailingSlash = (str: string) => {
|
||||
if (str === "/") return str;
|
||||
|
||||
return str.endsWith("/") ? str.slice(0, -1) : str;
|
||||
};
|
@@ -7,6 +7,7 @@ export type IntegrationAuth = {
|
||||
updatedAt: string;
|
||||
algorithm: string;
|
||||
keyEncoding: string;
|
||||
url?: string;
|
||||
teamId?: string;
|
||||
};
|
||||
|
||||
|
@@ -41,6 +41,7 @@ export const useCreateIntegration = () => {
|
||||
owner,
|
||||
path,
|
||||
region,
|
||||
url,
|
||||
scope,
|
||||
secretPath,
|
||||
metadata
|
||||
@@ -56,6 +57,7 @@ export const useCreateIntegration = () => {
|
||||
targetService?: string;
|
||||
targetServiceId?: string;
|
||||
owner?: string;
|
||||
url?: string;
|
||||
path?: string;
|
||||
region?: string;
|
||||
scope?: string;
|
||||
@@ -71,6 +73,7 @@ export const useCreateIntegration = () => {
|
||||
}[];
|
||||
kmsKeyId?: string;
|
||||
shouldDisableDelete?: boolean;
|
||||
shouldEnableDelete?: boolean;
|
||||
};
|
||||
}) => {
|
||||
const {
|
||||
@@ -85,6 +88,7 @@ export const useCreateIntegration = () => {
|
||||
targetEnvironmentId,
|
||||
targetService,
|
||||
targetServiceId,
|
||||
url,
|
||||
owner,
|
||||
path,
|
||||
scope,
|
||||
|
@@ -220,6 +220,7 @@ export const useGetSecretApprovalRequestCount = ({
|
||||
}) =>
|
||||
useQuery({
|
||||
queryKey: secretApprovalRequestKeys.count({ workspaceId }),
|
||||
refetchInterval: 5000,
|
||||
queryFn: () => fetchSecretApprovalRequestCount({ workspaceId }),
|
||||
enabled: Boolean(workspaceId) && (options?.enabled ?? true)
|
||||
});
|
||||
|
@@ -44,6 +44,7 @@ export type TSecretApprovalSecChange = {
|
||||
|
||||
export type TSecretApprovalRequest<J extends unknown = EncryptedSecret> = {
|
||||
id: string;
|
||||
isReplicated?: boolean;
|
||||
slug: string;
|
||||
createdAt: string;
|
||||
committerId: string;
|
||||
|
@@ -1,3 +1,7 @@
|
||||
export enum ReservedFolders {
|
||||
SecretReplication = "__reserve_replication_"
|
||||
}
|
||||
|
||||
export type TSecretFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
|
@@ -1,4 +1,9 @@
|
||||
export { useCreateSecretImport, useDeleteSecretImport, useUpdateSecretImport } from "./mutation";
|
||||
export {
|
||||
useCreateSecretImport,
|
||||
useDeleteSecretImport,
|
||||
useResyncSecretReplication,
|
||||
useUpdateSecretImport
|
||||
} from "./mutation";
|
||||
export {
|
||||
useGetImportedFoldersByEnv,
|
||||
useGetImportedSecretsAllEnvs,
|
||||
|
@@ -3,18 +3,24 @@ import { useMutation, useQueryClient } from "@tanstack/react-query";
|
||||
import { apiRequest } from "@app/config/request";
|
||||
|
||||
import { secretImportKeys } from "./queries";
|
||||
import { TCreateSecretImportDTO, TDeleteSecretImportDTO, TUpdateSecretImportDTO } from "./types";
|
||||
import {
|
||||
TCreateSecretImportDTO,
|
||||
TDeleteSecretImportDTO,
|
||||
TResyncSecretReplicationDTO,
|
||||
TUpdateSecretImportDTO
|
||||
} from "./types";
|
||||
|
||||
export const useCreateSecretImport = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation<{}, {}, TCreateSecretImportDTO>({
|
||||
mutationFn: async ({ import: secretImport, environment, projectId, path }) => {
|
||||
mutationFn: async ({ import: secretImport, environment, isReplication, projectId, path }) => {
|
||||
const { data } = await apiRequest.post("/api/v1/secret-imports", {
|
||||
import: secretImport,
|
||||
environment,
|
||||
workspaceId: projectId,
|
||||
path
|
||||
path,
|
||||
isReplication
|
||||
});
|
||||
return data;
|
||||
},
|
||||
@@ -53,6 +59,19 @@ export const useUpdateSecretImport = () => {
|
||||
});
|
||||
};
|
||||
|
||||
export const useResyncSecretReplication = () => {
|
||||
return useMutation<{}, {}, TResyncSecretReplicationDTO>({
|
||||
mutationFn: async ({ environment, projectId, path, id }) => {
|
||||
const { data } = await apiRequest.post(`/api/v1/secret-imports/${id}/replication-resync`, {
|
||||
environment,
|
||||
path,
|
||||
workspaceId: projectId
|
||||
});
|
||||
return data;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const useDeleteSecretImport = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
|
@@ -10,6 +10,11 @@ export type TSecretImport = {
|
||||
position: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
isReserved?: boolean;
|
||||
isReplication?: boolean;
|
||||
isReplicationSuccess?: boolean;
|
||||
replicationStatus?: string;
|
||||
lastReplicated?: string;
|
||||
};
|
||||
|
||||
export type TGetImportedFoldersByEnvDTO = {
|
||||
@@ -60,6 +65,7 @@ export type TCreateSecretImportDTO = {
|
||||
environment: string;
|
||||
path: string;
|
||||
};
|
||||
isReplication?: boolean;
|
||||
};
|
||||
|
||||
export type TUpdateSecretImportDTO = {
|
||||
@@ -74,6 +80,13 @@ export type TUpdateSecretImportDTO = {
|
||||
}>;
|
||||
};
|
||||
|
||||
export type TResyncSecretReplicationDTO = {
|
||||
id: string;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
path?: string;
|
||||
};
|
||||
|
||||
export type TDeleteSecretImportDTO = {
|
||||
id: string;
|
||||
projectId: string;
|
||||
|