Compare commits

..

1 Commits

Author SHA1 Message Date
github-actions
20720a2bca chore: renamed new migration files to latest timestamp (gh-action) 2024-05-31 15:34:27 +00:00
172 changed files with 841 additions and 4818 deletions

View File

@@ -63,7 +63,3 @@ CLIENT_SECRET_GITHUB_LOGIN=
CLIENT_ID_GITLAB_LOGIN=
CLIENT_SECRET_GITLAB_LOGIN=
CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=

View File

@@ -40,7 +40,6 @@ jobs:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
JWT_AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
@@ -74,4 +73,4 @@ jobs:
run: |
docker-compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api
docker remove infisical-api

View File

@@ -22,9 +22,6 @@ jobs:
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
goreleaser:
runs-on: ubuntu-20.04
@@ -59,7 +56,7 @@ jobs:
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}

View File

@@ -20,12 +20,7 @@ on:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
@@ -48,8 +43,5 @@ jobs:
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

View File

@@ -1,7 +1,7 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
ARG SAML_ORG_SLUG=saml-org-slug-default
FROM node:20-alpine AS base
@@ -36,8 +36,8 @@ ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
# Build
RUN npm run build
@@ -113,9 +113,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
WORKDIR /

View File

@@ -85,13 +85,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
Linux/macOS:
```console
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Windows Command Prompt:
```console
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Create an account at `http://localhost:80`

View File

@@ -1,5 +1,4 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { Lock } from "@app/lib/red-lock";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
@@ -26,12 +25,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
},
incrementBy: async () => {
return 1;
},
acquireLock: () => {
return Promise.resolve({
release: () => {}
}) as Promise<Lock>;
},
waitTillReady: async () => {}
}
};
};

View File

@@ -51,7 +51,7 @@
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"mysql2": "^3.9.7",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",
@@ -10290,10 +10290,9 @@
}
},
"node_modules/mysql2": {
"version": "3.9.8",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.8.tgz",
"integrity": "sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==",
"license": "MIT",
"version": "3.9.7",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.7.tgz",
"integrity": "sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw==",
"dependencies": {
"denque": "^2.1.0",
"generate-function": "^2.3.1",

View File

@@ -112,7 +112,7 @@
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"mysql2": "^3.9.7",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",

View File

@@ -35,8 +35,6 @@ const getZodPrimitiveType = (type: string) => {
return "z.coerce.number()";
case "text":
return "z.string()";
case "bytea":
return "zodBuffer";
default:
throw new Error(`Invalid type: ${type}`);
}
@@ -98,15 +96,10 @@ const main = async () => {
const columnNames = Object.keys(columns);
let schema = "";
const zodImportSet = new Set<string>();
for (let colNum = 0; colNum < columnNames.length; colNum++) {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}
// don't put optional on id
if (colInfo.defaultValue && columnName !== "id") {
const { defaultValue } = colInfo;
@@ -128,8 +121,6 @@ const main = async () => {
.split("_")
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
const zodImports = Array.from(zodImportSet);
// the insert and update are changed to zod input type to use default cases
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
@@ -140,8 +131,6 @@ const main = async () => {
import { z } from "zod";
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}});

View File

@@ -52,7 +52,6 @@ import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
@@ -109,7 +108,6 @@ declare module "fastify" {
projectKey: TProjectKeyServiceFactory;
projectRole: TProjectRoleServiceFactory;
secret: TSecretServiceFactory;
secretReplication: TSecretReplicationServiceFactory;
secretTag: TSecretTagServiceFactory;
secretImport: TSecretImportServiceFactory;
projectBot: TProjectBotServiceFactory;

View File

@@ -98,15 +98,6 @@ import {
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
TKmsKeyVersions,
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate,
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate,
TLdapConfigs,
TLdapConfigsInsert,
TLdapConfigsUpdate,
@@ -185,9 +176,6 @@ import {
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate,
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate,
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate,
@@ -252,6 +240,7 @@ import {
TWebhooksInsert,
TWebhooksUpdate
} from "@app/db/schemas";
import { TSecretReferences, TSecretReferencesInsert, TSecretReferencesUpdate } from "@app/db/schemas/secret-references";
declare module "knex/types/tables" {
interface Tables {
@@ -525,13 +514,5 @@ declare module "knex/types/tables" {
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate
>;
// KMS service
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate
>;
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
}
}

View File

@@ -1,85 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
TableName.SecretImport,
"isReplicationSuccess"
);
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
TableName.SecretImport,
"replicationStatus"
);
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
if (await knex.schema.hasTable(TableName.SecretImport)) {
await knex.schema.alterTable(TableName.SecretImport, (t) => {
if (!doesSecretImportIsReplicationExist) t.boolean("isReplication").defaultTo(false);
if (!doesSecretImportIsReplicationSuccessExist) t.boolean("isReplicationSuccess").nullable();
if (!doesSecretImportReplicationStatusExist) t.text("replicationStatus").nullable();
if (!doesSecretImportLastReplicatedExist) t.datetime("lastReplicated").nullable();
if (!doesSecretImportIsReservedExist) t.boolean("isReserved").defaultTo(false);
});
}
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
if (await knex.schema.hasTable(TableName.SecretFolder)) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
if (!doesSecretFolderReservedExist) t.boolean("isReserved").defaultTo(false);
});
}
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"isReplicated"
);
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
if (!doesSecretApprovalRequestIsReplicatedExist) t.boolean("isReplicated");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
TableName.SecretImport,
"isReplicationSuccess"
);
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
TableName.SecretImport,
"replicationStatus"
);
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
if (await knex.schema.hasTable(TableName.SecretImport)) {
await knex.schema.alterTable(TableName.SecretImport, (t) => {
if (doesSecretImportIsReplicationExist) t.dropColumn("isReplication");
if (doesSecretImportIsReplicationSuccessExist) t.dropColumn("isReplicationSuccess");
if (doesSecretImportReplicationStatusExist) t.dropColumn("replicationStatus");
if (doesSecretImportLastReplicatedExist) t.dropColumn("lastReplicated");
if (doesSecretImportIsReservedExist) t.dropColumn("isReserved");
});
}
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
if (await knex.schema.hasTable(TableName.SecretFolder)) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
if (doesSecretFolderReservedExist) t.dropColumn("isReserved");
});
}
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"isReplicated"
);
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
if (doesSecretApprovalRequestIsReplicatedExist) t.dropColumn("isReplicated");
});
}
}

View File

@@ -1,56 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.KmsServerRootConfig))) {
await knex.schema.createTable(TableName.KmsServerRootConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedRootKey").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
if (!(await knex.schema.hasTable(TableName.KmsKey))) {
await knex.schema.createTable(TableName.KmsKey, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedKey").notNullable();
t.string("encryptionAlgorithm").notNullable();
t.integer("version").defaultTo(1).notNullable();
t.string("description");
t.boolean("isDisabled").defaultTo(false);
t.boolean("isReserved").defaultTo(true);
t.string("projectId");
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.KmsKey);
if (!(await knex.schema.hasTable(TableName.KmsKeyVersion))) {
await knex.schema.createTable(TableName.KmsKeyVersion, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedKey").notNullable();
t.integer("version").notNullable();
t.uuid("kmsKeyId").notNullable();
t.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.KmsKeyVersion);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.KmsServerRootConfig);
await dropOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
await knex.schema.dropTableIfExists(TableName.KmsKeyVersion);
await dropOnUpdateTrigger(knex, TableName.KmsKeyVersion);
await knex.schema.dropTableIfExists(TableName.KmsKey);
await dropOnUpdateTrigger(knex, TableName.KmsKey);
}

View File

@@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (!hasConsecutiveFailedPasswordAttempts) {
tb.integer("consecutiveFailedPasswordAttempts").defaultTo(0);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (hasConsecutiveFailedPasswordAttempts) {
tb.dropColumn("consecutiveFailedPasswordAttempts");
}
});
}

View File

@@ -30,9 +30,6 @@ export * from "./identity-universal-auths";
export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./kms-key-versions";
export * from "./kms-keys";
export * from "./kms-root-config";
export * from "./ldap-configs";
export * from "./ldap-group-maps";
export * from "./models";
@@ -60,7 +57,6 @@ export * from "./secret-blind-indexes";
export * from "./secret-folder-versions";
export * from "./secret-folders";
export * from "./secret-imports";
export * from "./secret-references";
export * from "./secret-rotation-outputs";
export * from "./secret-rotations";
export * from "./secret-scanning-git-risks";

View File

@@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeyVersionsSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
version: z.number(),
kmsKeyId: z.string().uuid()
});
export type TKmsKeyVersions = z.infer<typeof KmsKeyVersionsSchema>;
export type TKmsKeyVersionsInsert = Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>;
export type TKmsKeyVersionsUpdate = Partial<Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>>;

View File

@@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeysSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
encryptionAlgorithm: z.string(),
version: z.number().default(1),
description: z.string().nullable().optional(),
isDisabled: z.boolean().default(false).nullable().optional(),
isReserved: z.boolean().default(true).nullable().optional(),
projectId: z.string().nullable().optional(),
orgId: z.string().uuid().nullable().optional()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
export type TKmsKeysInsert = Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>;
export type TKmsKeysUpdate = Partial<Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>>;

View File

@@ -1,19 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsRootConfigSchema = z.object({
id: z.string().uuid(),
encryptedRootKey: zodBuffer
});
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
export type TKmsRootConfigInsert = Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>;
export type TKmsRootConfigUpdate = Partial<Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>>;

View File

@@ -81,11 +81,7 @@ export enum TableName {
DynamicSecretLease = "dynamic_secret_leases",
// junction tables with tags
JnSecretTag = "secret_tag_junction",
SecretVersionTag = "secret_version_tag_junction",
// KMS Service
KmsServerRootConfig = "kms_root_config",
KmsKey = "kms_keys",
KmsKeyVersion = "kms_key_versions"
SecretVersionTag = "secret_version_tag_junction"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";

View File

@@ -18,8 +18,7 @@ export const SecretApprovalRequestsSchema = z.object({
statusChangeBy: z.string().uuid().nullable().optional(),
committerId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
isReplicated: z.boolean().nullable().optional()
updatedAt: z.date()
});
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;

View File

@@ -14,8 +14,7 @@ export const SecretFoldersSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
parentId: z.string().uuid().nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

View File

@@ -15,12 +15,7 @@ export const SecretImportsSchema = z.object({
position: z.number(),
createdAt: z.date(),
updatedAt: z.date(),
folderId: z.string().uuid(),
isReplication: z.boolean().default(false).nullable().optional(),
isReplicationSuccess: z.boolean().nullable().optional(),
replicationStatus: z.string().nullable().optional(),
lastReplicated: z.date().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
folderId: z.string().uuid()
});
export type TSecretImports = z.infer<typeof SecretImportsSchema>;

View File

@@ -25,8 +25,7 @@ export const UsersSchema = z.object({
isEmailVerified: z.boolean().default(false).nullable().optional(),
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
isLocked: z.boolean().default(false).nullable().optional(),
temporaryLockDateEnd: z.date().nullable().optional(),
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
temporaryLockDateEnd: z.date().nullable().optional()
});
export type TUsers = z.infer<typeof UsersSchema>;

View File

@@ -362,7 +362,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
const groups = await req.server.services.scim.listScimGroups({
orgId: req.permission.orgId,
startIndex: req.query.startIndex,
filter: req.query.filter,
limit: req.query.count
});

View File

@@ -1,7 +1,6 @@
import { nanoid } from "nanoid";
import { z } from "zod";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
@@ -20,11 +19,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string(),
secretPath: z
.string()
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val)),
secretPath: z.string().optional().nullable(),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1)
})
@@ -68,11 +63,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
name: z.string().optional(),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1),
secretPath: z
.string()
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
secretPath: z.string().optional().nullable()
})
.refine((data) => data.approvals <= data.approvers.length, {
path: ["approvals"],
@@ -166,7 +157,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
querystring: z.object({
workspaceId: z.string().trim(),
environment: z.string().trim(),
secretPath: z.string().trim().transform(removeTrailingSlash)
secretPath: z.string().trim()
}),
response: {
200: z.object({

View File

@@ -32,20 +32,22 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}),
response: {
200: z.object({
approvals: SecretApprovalRequestsSchema.extend({
// secretPath: z.string(),
policy: z.object({
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
secretPath: z.string().optional().nullable()
}),
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
approvers: z.string().array()
}).array()
approvals: SecretApprovalRequestsSchema.merge(
z.object({
// secretPath: z.string(),
policy: z.object({
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
secretPath: z.string().optional().nullable()
}),
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
approvers: z.string().array()
})
).array()
})
}
},

View File

@@ -77,7 +77,7 @@ type TLdapConfigServiceFactoryDep = {
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
@@ -510,7 +510,6 @@ export const ldapConfigServiceFactory = ({
return newUserAlias;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const user = await userDAL.transaction(async (tx) => {
const newUser = await userDAL.findOne({ id: userAlias.userId }, tx);

View File

@@ -50,7 +50,7 @@ type TSamlConfigServiceFactoryDep = {
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
};
@@ -449,7 +449,6 @@ export const samlConfigServiceFactory = ({
return newUser;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted);
const providerAuthToken = jwt.sign(

View File

@@ -18,20 +18,6 @@ export const buildScimUserList = ({
};
};
export const parseScimFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
} else if (parsedName === "displayName") {
attributeName = "name";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
export const buildScimUser = ({
orgMembershipId,
username,

View File

@@ -30,7 +30,7 @@ import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList } from "./scim-fns";
import {
TCreateScimGroupDTO,
TCreateScimTokenDTO,
@@ -184,6 +184,18 @@ export const scimServiceFactory = ({
status: 403
});
const parseFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
const findOpts = {
...(startIndex && { offset: startIndex - 1 }),
...(limit && { limit })
@@ -192,7 +204,7 @@ export const scimServiceFactory = ({
const users = await orgDAL.findMembership(
{
[`${TableName.OrgMembership}.orgId` as "id"]: orgId,
...parseScimFilter(filter)
...parseFilter(filter)
},
findOpts
);
@@ -379,7 +391,7 @@ export const scimServiceFactory = ({
);
}
}
await licenseService.updateSubscriptionOrgMemberCount(org.id);
return { user, orgMembership };
});
@@ -545,7 +557,7 @@ export const scimServiceFactory = ({
return {}; // intentionally return empty object upon success
};
const listScimGroups = async ({ orgId, startIndex, limit, filter }: TListScimGroupsDTO) => {
const listScimGroups = async ({ orgId, startIndex, limit }: TListScimGroupsDTO) => {
const plan = await licenseService.getPlan(orgId);
if (!plan.groups)
throw new BadRequestError({
@@ -568,8 +580,7 @@ export const scimServiceFactory = ({
const groups = await groupDAL.findGroups(
{
orgId,
...(filter && parseScimFilter(filter))
orgId
},
{
offset: startIndex - 1,

View File

@@ -66,7 +66,6 @@ export type TDeleteScimUserDTO = {
export type TListScimGroupsDTO = {
startIndex: number;
filter?: string;
limit: number;
orgId: string;
};

View File

@@ -4,7 +4,6 @@ import picomatch from "picomatch";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { containsGlobPatterns } from "@app/lib/picomatch";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
@@ -208,8 +207,7 @@ export const secretApprovalPolicyServiceFactory = ({
return sapPolicies;
};
const getSecretApprovalPolicy = async (projectId: string, environment: string, path: string) => {
const secretPath = removeTrailingSlash(path);
const getSecretApprovalPolicy = async (projectId: string, environment: string, secretPath: string) => {
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) throw new BadRequestError({ message: "Environment not found" });

View File

@@ -15,16 +15,9 @@ import { ActorType } from "@app/services/auth/auth-type";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import {
fnSecretBlindIndexCheck,
fnSecretBlindIndexCheckV2,
fnSecretBulkDelete,
fnSecretBulkInsert,
fnSecretBulkUpdate,
getAllNestedSecretReferences
} from "@app/services/secret/secret-fns";
import { getAllNestedSecretReferences } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory } from "@app/services/secret/secret-queue";
import { SecretOperations } from "@app/services/secret/secret-types";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
@@ -39,6 +32,7 @@ import { TSecretApprovalRequestReviewerDALFactory } from "./secret-approval-requ
import { TSecretApprovalRequestSecretDALFactory } from "./secret-approval-request-secret-dal";
import {
ApprovalStatus,
CommitType,
RequestState,
TApprovalRequestCountDTO,
TGenerateSecretApprovalRequestDTO,
@@ -51,11 +45,10 @@ import {
type TSecretApprovalRequestServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretApprovalRequestDAL: TSecretApprovalRequestDALFactory;
secretApprovalRequestSecretDAL: TSecretApprovalRequestSecretDALFactory;
secretApprovalRequestReviewerDAL: TSecretApprovalRequestReviewerDALFactory;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findSecretPathByFolderIds">;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findById" | "findSecretPathByFolderIds">;
secretDAL: TSecretDALFactory;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret">;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
@@ -63,7 +56,16 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "removeSecretReminder">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretService: Pick<
TSecretServiceFactory,
| "fnSecretBulkInsert"
| "fnSecretBulkUpdate"
| "fnSecretBlindIndexCheck"
| "fnSecretBulkDelete"
| "fnSecretBlindIndexCheckV2"
>;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets">;
};
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
@@ -80,6 +82,7 @@ export const secretApprovalRequestServiceFactory = ({
projectDAL,
permissionService,
snapshotService,
secretService,
secretVersionDAL,
secretQueueService,
projectBotService
@@ -299,12 +302,11 @@ export const secretApprovalRequestServiceFactory = ({
const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
if (!secretApprovalSecrets) throw new BadRequestError({ message: "No secrets found" });
const conflicts: Array<{ secretId: string; op: SecretOperations }> = [];
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Create);
const conflicts: Array<{ secretId: string; op: CommitType }> = [];
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Create);
if (secretCreationCommits.length) {
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
folderId,
secretDAL,
inputSecrets: secretCreationCommits.map(({ secretBlindIndex }) => {
if (!secretBlindIndex) {
throw new BadRequestError({
@@ -317,19 +319,17 @@ export const secretApprovalRequestServiceFactory = ({
secretCreationCommits
.filter(({ secretBlindIndex }) => conflictGroupByBlindIndex[secretBlindIndex || ""])
.forEach((el) => {
conflicts.push({ op: SecretOperations.Create, secretId: el.id });
conflicts.push({ op: CommitType.Create, secretId: el.id });
});
secretCreationCommits = secretCreationCommits.filter(
({ secretBlindIndex }) => !conflictGroupByBlindIndex[secretBlindIndex || ""]
);
}
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Update);
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Update);
if (secretUpdationCommits.length) {
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
folderId,
secretDAL,
userId: "",
inputSecrets: secretUpdationCommits
.filter(({ secretBlindIndex, secret }) => secret && secret.secretBlindIndex !== secretBlindIndex)
.map(({ secretBlindIndex }) => {
@@ -347,7 +347,7 @@ export const secretApprovalRequestServiceFactory = ({
(secretBlindIndex && conflictGroupByBlindIndex[secretBlindIndex]) || !secretId
)
.forEach((el) => {
conflicts.push({ op: SecretOperations.Update, secretId: el.id });
conflicts.push({ op: CommitType.Update, secretId: el.id });
});
secretUpdationCommits = secretUpdationCommits.filter(
@@ -356,11 +356,11 @@ export const secretApprovalRequestServiceFactory = ({
);
}
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete);
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Delete);
const botKey = await projectBotService.getBotKey(projectId).catch(() => null);
const mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => {
const newSecrets = secretCreationCommits.length
? await fnSecretBulkInsert({
? await secretService.fnSecretBulkInsert({
tx,
folderId,
inputSecrets: secretCreationCommits.map((el) => ({
@@ -403,7 +403,7 @@ export const secretApprovalRequestServiceFactory = ({
})
: [];
const updatedSecrets = secretUpdationCommits.length
? await fnSecretBulkUpdate({
? await secretService.fnSecretBulkUpdate({
folderId,
projectId,
tx,
@@ -449,13 +449,11 @@ export const secretApprovalRequestServiceFactory = ({
})
: [];
const deletedSecret = secretDeletionCommits.length
? await fnSecretBulkDelete({
? await secretService.fnSecretBulkDelete({
projectId,
folderId,
tx,
actorId: "",
secretDAL,
secretQueueService,
inputSecrets: secretDeletionCommits.map(({ secretBlindIndex }) => {
if (!secretBlindIndex) {
throw new BadRequestError({
@@ -482,14 +480,12 @@ export const secretApprovalRequestServiceFactory = ({
};
});
await snapshotService.performSnapshot(folderId);
const [folder] = await folderDAL.findSecretPathByFolderIds(projectId, [folderId]);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const folder = await folderDAL.findById(folderId);
// TODO(akhilmhdh-pg): change query to do secret path from folder
await secretQueueService.syncSecrets({
projectId,
secretPath: folder.path,
environmentSlug: folder.environmentSlug,
actorId,
actor
secretPath: "/",
environment: folder?.environment.envSlug as string
});
return mergeStatus;
};
@@ -537,9 +533,9 @@ export const secretApprovalRequestServiceFactory = ({
const commits: Omit<TSecretApprovalRequestsSecretsInsert, "requestId">[] = [];
const commitTagIds: Record<string, string[]> = {};
// for created secret approval change
const createdSecrets = data[SecretOperations.Create];
const createdSecrets = data[CommitType.Create];
if (createdSecrets && createdSecrets?.length) {
const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: createdSecrets,
folderId,
isNew: true,
@@ -550,7 +546,7 @@ export const secretApprovalRequestServiceFactory = ({
commits.push(
...createdSecrets.map(({ secretName, ...el }) => ({
...el,
op: SecretOperations.Create as const,
op: CommitType.Create as const,
version: 1,
secretBlindIndex: keyName2BlindIndex[secretName],
algorithm: SecretEncryptionAlgo.AES_256_GCM,
@@ -562,12 +558,12 @@ export const secretApprovalRequestServiceFactory = ({
});
}
// not secret approval for update operations
const updatedSecrets = data[SecretOperations.Update];
const updatedSecrets = data[CommitType.Update];
if (updatedSecrets && updatedSecrets?.length) {
// get all blind index
// Find all those secrets
// if not throw not found
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: updatedSecrets,
folderId,
isNew: false,
@@ -578,8 +574,8 @@ export const secretApprovalRequestServiceFactory = ({
// now find any secret that needs to update its name
// same process as above
const nameUpdatedSecrets = updatedSecrets.filter(({ newSecretName }) => Boolean(newSecretName));
const { keyName2BlindIndex: newKeyName2BlindIndex } = await fnSecretBlindIndexCheck({
inputSecrets: nameUpdatedSecrets.map(({ newSecretName }) => ({ secretName: newSecretName as string })),
const { keyName2BlindIndex: newKeyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: nameUpdatedSecrets,
folderId,
isNew: true,
blindIndexCfg,
@@ -596,14 +592,14 @@ export const secretApprovalRequestServiceFactory = ({
const secretId = secsGroupedByBlindIndex[keyName2BlindIndex[secretName]][0].id;
const secretBlindIndex =
newSecretName && newKeyName2BlindIndex[newSecretName]
? newKeyName2BlindIndex?.[newSecretName]
? newKeyName2BlindIndex?.[secretName]
: keyName2BlindIndex[secretName];
// add tags
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
return {
...latestSecretVersions[secretId],
...el,
op: SecretOperations.Update as const,
op: CommitType.Update as const,
secret: secretId,
secretVersion: latestSecretVersions[secretId].id,
secretBlindIndex,
@@ -613,12 +609,12 @@ export const secretApprovalRequestServiceFactory = ({
);
}
// deleted secrets
const deletedSecrets = data[SecretOperations.Delete];
const deletedSecrets = data[CommitType.Delete];
if (deletedSecrets && deletedSecrets.length) {
// get all blind index
// Find all those secrets
// if not throw not found
const { keyName2BlindIndex, secrets } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex, secrets } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: deletedSecrets,
folderId,
isNew: false,
@@ -639,7 +635,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!latestSecretVersions[secretId].secretBlindIndex)
throw new BadRequestError({ message: "Failed to find secret blind index" });
return {
op: SecretOperations.Delete as const,
op: CommitType.Delete as const,
...latestSecretVersions[secretId],
secretBlindIndex: latestSecretVersions[secretId].secretBlindIndex as string,
secret: secretId,

View File

@@ -1,6 +1,11 @@
import { TImmutableDBKeys, TSecretApprovalPolicies, TSecretApprovalRequestsSecrets } from "@app/db/schemas";
import { TProjectPermission } from "@app/lib/types";
import { SecretOperations } from "@app/services/secret/secret-types";
export enum CommitType {
Create = "create",
Update = "update",
Delete = "delete"
}
export enum RequestState {
Open = "open",
@@ -13,14 +18,14 @@ export enum ApprovalStatus {
REJECTED = "rejected"
}
export type TApprovalCreateSecret = Omit<
type TApprovalCreateSecret = Omit<
TSecretApprovalRequestsSecrets,
TImmutableDBKeys | "version" | "algorithm" | "keyEncoding" | "requestId" | "op" | "secretVersion" | "secretBlindIndex"
> & {
secretName: string;
tagIds?: string[];
};
export type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
secretName: string;
newSecretName?: string;
tagIds?: string[];
@@ -31,9 +36,9 @@ export type TGenerateSecretApprovalRequestDTO = {
secretPath: string;
policy: TSecretApprovalPolicies;
data: {
[SecretOperations.Create]?: TApprovalCreateSecret[];
[SecretOperations.Update]?: TApprovalUpdateSecret[];
[SecretOperations.Delete]?: { secretName: string }[];
[CommitType.Create]?: TApprovalCreateSecret[];
[CommitType.Update]?: TApprovalUpdateSecret[];
[CommitType.Delete]?: { secretName: string }[];
};
} & TProjectPermission;

View File

@@ -1 +0,0 @@
export const MAX_REPLICATION_DEPTH = 5;

View File

@@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TSecretReplicationDALFactory = ReturnType<typeof secretReplicationDALFactory>;
export const secretReplicationDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.SecretVersion);
return orm;
};

View File

@@ -1,485 +0,0 @@
import { SecretType, TSecrets } from "@app/db/schemas";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { groupBy, unique } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue";
import { SecretOperations } from "@app/services/secret/secret-types";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { ReservedFolders } from "@app/services/secret-folder/secret-folder-types";
import { TSecretImportDALFactory } from "@app/services/secret-import/secret-import-dal";
import { fnSecretsFromImports } from "@app/services/secret-import/secret-import-fns";
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { MAX_REPLICATION_DEPTH } from "./secret-replication-constants";
type TSecretReplicationServiceFactoryDep = {
secretDAL: Pick<
TSecretDALFactory,
"find" | "findByBlindIndexes" | "insertMany" | "bulkUpdate" | "delete" | "upsertSecretReferences" | "transaction"
>;
secretVersionDAL: Pick<TSecretVersionDALFactory, "find" | "insertMany" | "update" | "findLatestVersionMany">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds">;
folderDAL: Pick<
TSecretFolderDALFactory,
"findSecretPathByFolderIds" | "findBySecretPath" | "create" | "findOne" | "findByManySecretPath"
>;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "find" | "insertMany">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "replicateSecrets">;
queueService: Pick<TQueueServiceFactory, "start" | "listen" | "queue" | "stopJobById">;
secretApprovalPolicyService: Pick<TSecretApprovalPolicyServiceFactory, "getSecretApprovalPolicy">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "find">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findOne">;
secretApprovalRequestSecretDAL: Pick<
TSecretApprovalRequestSecretDALFactory,
"insertMany" | "insertApprovalSecretTags"
>;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
};
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
const SECRET_IMPORT_SUCCESS_LOCK = 10;
const keystoreReplicationSuccessKey = (jobId: string, secretImportId: string) => `${jobId}-${secretImportId}`;
const getReplicationKeyLockPrefix = (projectId: string, environmentSlug: string, secretPath: string) =>
`REPLICATION_SECRET_${projectId}-${environmentSlug}-${secretPath}`;
export const getReplicationFolderName = (importId: string) => `${ReservedFolders.SecretReplication}${importId}`;
const getDecryptedKeyValue = (key: string, secret: TSecrets) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
key
});
const secretValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretValueCiphertext,
iv: secret.secretValueIV,
tag: secret.secretValueTag,
key
});
return { key: secretKey, value: secretValue };
};
export const secretReplicationServiceFactory = ({
secretDAL,
queueService,
secretVersionDAL,
secretImportDAL,
keyStore,
secretVersionTagDAL,
secretTagDAL,
folderDAL,
secretApprovalPolicyService,
secretApprovalRequestSecretDAL,
secretApprovalRequestDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
}: TSecretReplicationServiceFactoryDep) => {
const getReplicatedSecrets = (
botKey: string,
localSecrets: TSecrets[],
importedSecrets: { secrets: TSecrets[] }[]
) => {
const deDupe = new Set<string>();
const secrets = localSecrets
.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex))
.map((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
deDupe.add(decryptedSecret.key);
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
});
for (let i = importedSecrets.length - 1; i >= 0; i = -1) {
importedSecrets[i].secrets.forEach((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
if (deDupe.has(decryptedSecret.key) || !el.secretBlindIndex) {
return;
}
deDupe.add(decryptedSecret.key);
secrets.push({ ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value });
});
}
return secrets;
};
// IMPORTANT NOTE BEFORE READING THE FUNCTION
// SOURCE - Where secrets are copied from
// DESTINATION - Where the replicated imports that points to SOURCE from Destination
queueService.start(QueueName.SecretReplication, async (job) => {
logger.info(job.data, "Replication started");
const {
secretPath,
environmentSlug,
projectId,
actorId,
actor,
pickOnlyImportIds,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue,
_depth: depth = 0
} = job.data;
if (depth > MAX_REPLICATION_DEPTH) return;
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, secretPath);
if (!folder) return;
// the the replicated imports made to the source. These are the destinations
const destinationSecretImports = await secretImportDAL.find({
importPath: secretPath,
importEnv: folder.envId
});
// CASE: normal mode <- link import <- replicated import
const nonReplicatedDestinationImports = destinationSecretImports.filter(({ isReplication }) => !isReplication);
if (nonReplicatedDestinationImports.length) {
// keep calling sync secret for all the imports made
const importedFolderIds = unique(nonReplicatedDestinationImports, (i) => i.folderId).map(
({ folderId }) => folderId
);
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
await Promise.all(
nonReplicatedDestinationImports
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
// filter out already synced ones
.filter(
({ folderId }) =>
!deDupeQueue?.[
uniqueSecretQueueKey(
foldersGroupedById[folderId][0]?.environmentSlug as string,
foldersGroupedById[folderId][0]?.path as string
)
]
)
.map(({ folderId }) =>
secretQueueService.replicateSecrets({
projectId,
secretPath: foldersGroupedById[folderId][0]?.path as string,
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
actorId,
actor,
_depth: depth + 1,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue
})
)
);
}
let destinationReplicatedSecretImports = destinationSecretImports.filter(({ isReplication }) =>
Boolean(isReplication)
);
destinationReplicatedSecretImports = pickOnlyImportIds
? destinationReplicatedSecretImports.filter(({ id }) => pickOnlyImportIds?.includes(id))
: destinationReplicatedSecretImports;
if (!destinationReplicatedSecretImports.length) return;
const botKey = await projectBotService.getBotKey(projectId);
// these are the secrets to be added in replicated folders
const sourceLocalSecrets = await secretDAL.find({ folderId: folder.id, type: SecretType.Shared });
const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id });
const sourceImportedSecrets = await fnSecretsFromImports({
allowedImports: sourceSecretImports,
secretDAL,
folderDAL,
secretImportDAL
});
// secrets that gets replicated across imports
const sourceSecrets = getReplicatedSecrets(botKey, sourceLocalSecrets, sourceImportedSecrets);
const sourceSecretsGroupByBlindIndex = groupBy(sourceSecrets, (i) => i.secretBlindIndex as string);
const lock = await keyStore.acquireLock(
[getReplicationKeyLockPrefix(projectId, environmentSlug, secretPath)],
5000
);
try {
/* eslint-disable no-await-in-loop */
for (const destinationSecretImport of destinationReplicatedSecretImports) {
try {
const hasJobCompleted = await keyStore.getItem(
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
KeyStorePrefixes.SecretReplication
);
if (hasJobCompleted) {
logger.info(
{ jobId: job.id, importId: destinationSecretImport.id },
"Skipping this job as this has been successfully replicated."
);
// eslint-disable-next-line
continue;
}
const [destinationFolder] = await folderDAL.findSecretPathByFolderIds(projectId, [
destinationSecretImport.folderId
]);
if (!destinationFolder) throw new BadRequestError({ message: "Imported folder not found" });
let destinationReplicationFolder = await folderDAL.findOne({
parentId: destinationFolder.id,
name: getReplicationFolderName(destinationSecretImport.id),
isReserved: true
});
if (!destinationReplicationFolder) {
destinationReplicationFolder = await folderDAL.create({
parentId: destinationFolder.id,
name: getReplicationFolderName(destinationSecretImport.id),
envId: destinationFolder.envId,
isReserved: true
});
}
const destinationReplicationFolderId = destinationReplicationFolder.id;
const destinationLocalSecretsFromDB = await secretDAL.find({
folderId: destinationReplicationFolderId
});
const destinationLocalSecrets = destinationLocalSecretsFromDB.map((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
});
const destinationLocalSecretsGroupedByBlindIndex = groupBy(
destinationLocalSecrets.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex)),
(i) => i.secretBlindIndex as string
);
const locallyCreatedSecrets = sourceSecrets
.filter(
({ secretBlindIndex }) => !destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]
)
.map((el) => ({ ...el, operation: SecretOperations.Create })); // rewrite update ops to create
const locallyUpdatedSecrets = sourceSecrets
.filter(
({ secretBlindIndex, secretKey, secretValue }) =>
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0] &&
// if key or value changed
(destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretKey !== secretKey ||
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretValue !==
secretValue)
)
.map((el) => ({ ...el, operation: SecretOperations.Update })); // rewrite update ops to create
const locallyDeletedSecrets = destinationLocalSecrets
.filter(({ secretBlindIndex }) => !sourceSecretsGroupByBlindIndex[secretBlindIndex as string]?.[0])
.map((el) => ({ ...el, operation: SecretOperations.Delete }));
const isEmtpy =
locallyCreatedSecrets.length + locallyUpdatedSecrets.length + locallyDeletedSecrets.length === 0;
// eslint-disable-next-line
if (isEmtpy) continue;
const policy = await secretApprovalPolicyService.getSecretApprovalPolicy(
projectId,
destinationFolder.environmentSlug,
destinationFolder.path
);
// this means it should be a approval request rather than direct replication
if (policy && actor === ActorType.USER) {
const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId });
if (!membership) {
logger.error("Project membership not found in %s for user %s", projectId, actorId);
return;
}
const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id);
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
destinationReplicationFolderId,
localSecretsLatestVersions
);
await secretApprovalRequestDAL.transaction(async (tx) => {
const approvalRequestDoc = await secretApprovalRequestDAL.create(
{
folderId: destinationReplicationFolderId,
slug: alphaNumericNanoId(),
policyId: policy.id,
status: "open",
hasMerged: false,
committerId: membership.id,
isReplicated: true
},
tx
);
const commits = locallyCreatedSecrets
.concat(locallyUpdatedSecrets)
.concat(locallyDeletedSecrets)
.map((doc) => {
const { operation } = doc;
const localSecret = destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string]?.[0];
return {
op: operation,
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
requestId: approvalRequestDoc.id,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding,
// except create operation other two needs the secret id and version id
...(operation !== SecretOperations.Create
? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id }
: {})
};
});
const approvalCommits = await secretApprovalRequestSecretDAL.insertMany(commits, tx);
return { ...approvalRequestDoc, commits: approvalCommits };
});
} else {
await secretDAL.transaction(async (tx) => {
if (locallyCreatedSecrets.length) {
await fnSecretBulkInsert({
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding
};
})
});
}
if (locallyUpdatedSecrets.length) {
await fnSecretBulkUpdate({
projectId,
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyUpdatedSecrets.map((doc) => {
return {
filter: {
folderId: destinationReplicationFolderId,
id: destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id
},
data: {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding
}
};
})
});
}
if (locallyDeletedSecrets.length) {
await secretDAL.delete(
{
$in: {
id: locallyDeletedSecrets.map(({ id }) => id)
},
folderId: destinationReplicationFolderId
},
tx
);
}
});
await secretQueueService.syncSecrets({
projectId,
secretPath: destinationFolder.path,
environmentSlug: destinationFolder.environmentSlug,
actorId,
actor,
_depth: depth + 1,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue
});
}
// this is used to avoid multiple times generating secret approval by failed one
await keyStore.setItemWithExpiry(
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
SECRET_IMPORT_SUCCESS_LOCK,
1,
KeyStorePrefixes.SecretReplication
);
await secretImportDAL.updateById(destinationSecretImport.id, {
lastReplicated: new Date(),
replicationStatus: null,
isReplicationSuccess: true
});
} catch (err) {
logger.error(
err,
`Failed to replicate secret with import id=[${destinationSecretImport.id}] env=[${destinationSecretImport.importEnv.slug}] path=[${destinationSecretImport.importPath}]`
);
await secretImportDAL.updateById(destinationSecretImport.id, {
lastReplicated: new Date(),
replicationStatus: (err as Error)?.message.slice(0, 500),
isReplicationSuccess: false
});
}
}
/* eslint-enable no-await-in-loop */
} finally {
await lock.release();
logger.info(job.data, "Replication finished");
}
});
queueService.listen(QueueName.SecretReplication, "failed", (job, err) => {
logger.error(err, "Failed to replicate secret", job?.data);
});
};

View File

@@ -1,3 +0,0 @@
export type TSyncSecretReplicationDTO = {
id: string;
};

View File

@@ -220,7 +220,7 @@ export const secretSnapshotServiceFactory = ({
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
// this will remove all secrets and folders on child
// due to sql foreign key and link list connection removing the folders removes everything below too
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId }, tx);
const deletedTopLevelFolders = groupBy(
deletedFolders.filter(({ parentId }) => parentId === snapshot.folderId),
(item) => item.id

View File

@@ -1,75 +1,20 @@
import { Redis } from "ioredis";
import { Redlock, Settings } from "@app/lib/red-lock";
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
// all the key prefixes used must be set here to avoid conflict
export enum KeyStorePrefixes {
SecretReplication = "secret-replication-import-lock"
}
type TWaitTillReady = {
key: string;
waitingCb?: () => void;
keyCheckCb: (val: string | null) => boolean;
waitIteration?: number;
delay?: number;
jitter?: number;
};
export const keyStoreFactory = (redisUrl: string) => {
const redis = new Redis(redisUrl);
const redisLock = new Redlock([redis], { retryCount: 2, retryDelay: 200 });
const setItem = async (key: string, value: string | number | Buffer, prefix?: string) =>
redis.set(prefix ? `${prefix}:${key}` : key, value);
const setItem = async (key: string, value: string | number | Buffer) => redis.set(key, value);
const getItem = async (key: string, prefix?: string) => redis.get(prefix ? `${prefix}:${key}` : key);
const getItem = async (key: string) => redis.get(key);
const setItemWithExpiry = async (
key: string,
exp: number | string,
value: string | number | Buffer,
prefix?: string
) => redis.setex(prefix ? `${prefix}:${key}` : key, exp, value);
const setItemWithExpiry = async (key: string, exp: number | string, value: string | number | Buffer) =>
redis.setex(key, exp, value);
const deleteItem = async (key: string) => redis.del(key);
const incrementBy = async (key: string, value: number) => redis.incrby(key, value);
const waitTillReady = async ({
key,
waitingCb,
keyCheckCb,
waitIteration = 10,
delay = 1000,
jitter = 200
}: TWaitTillReady) => {
let attempts = 0;
let isReady = keyCheckCb(await getItem(key));
while (!isReady) {
if (attempts > waitIteration) return;
// eslint-disable-next-line
await new Promise((resolve) => {
waitingCb?.();
setTimeout(resolve, Math.max(0, delay + Math.floor((Math.random() * 2 - 1) * jitter)));
});
attempts += 1;
// eslint-disable-next-line
isReady = keyCheckCb(await getItem(key, "wait_till_ready"));
}
};
return {
setItem,
getItem,
setItemWithExpiry,
deleteItem,
incrementBy,
acquireLock(resources: string[], duration: number, settings?: Partial<Settings>) {
return redisLock.acquire(resources, duration, settings);
},
waitTillReady
};
return { setItem, getItem, setItemWithExpiry, deleteItem, incrementBy };
};

View File

@@ -386,8 +386,6 @@ export const SECRET_IMPORTS = {
environment: "The slug of the environment to import into.",
path: "The path to import into.",
workspaceId: "The ID of the project you are working in.",
isReplication:
"When true, secrets from the source will be automatically sent to the destination. If approval policies exist at the destination, the secrets will be sent as approval requests instead of being applied immediately.",
import: {
environment: "The slug of the environment to import from.",
path: "The path to import from."
@@ -663,7 +661,6 @@ export const INTEGRATION = {
targetServiceId:
"The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank",
owner: "External integration providers service entity owner. Used in Github.",
url: "The self-hosted URL of the platform to integrate with",
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault",
region: "AWS region to sync secrets to.",
scope: "Scope of the provider. Used by Github, Qovery",
@@ -676,10 +673,7 @@ export const INTEGRATION = {
secretGCPLabel: "The label for GCP secrets.",
secretAWSTag: "The tags for AWS secrets.",
kmsKeyId: "The ID of the encryption key from AWS KMS.",
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
shouldEnableDelete: "The flag to enable deletion of secrets"
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store."
}
},
UPDATE: {

View File

@@ -39,9 +39,7 @@ const envSchema = z
HTTPS_ENABLED: zodStrBool,
// smtp options
SMTP_HOST: zpStr(z.string().optional()),
SMTP_IGNORE_TLS: zodStrBool.default("false"),
SMTP_REQUIRE_TLS: zodStrBool.default("true"),
SMTP_TLS_REJECT_UNAUTHORIZED: zodStrBool.default("true"),
SMTP_SECURE: zodStrBool,
SMTP_PORT: z.coerce.number().default(587),
SMTP_USERNAME: zpStr(z.string().optional()),
SMTP_PASSWORD: zpStr(z.string().optional()),
@@ -77,7 +75,6 @@ const envSchema = z
.optional()
.default(process.env.URL_GITLAB_LOGIN ?? GITLAB_URL)
), // fallback since URL_GITLAB_LOGIN has been renamed
DEFAULT_SAML_ORG_SLUG: zpStr(z.string().optional()).default(process.env.NEXT_PUBLIC_SAML_ORG_SLUG),
// integration client secrets
// heroku
CLIENT_ID_HEROKU: zpStr(z.string().optional()),
@@ -122,8 +119,7 @@ const envSchema = z
.transform((val) => val === "true")
.optional(),
INFISICAL_CLOUD: zodStrBool.default("false"),
MAINTENANCE_MODE: zodStrBool.default("false"),
CAPTCHA_SECRET: zpStr(z.string().optional())
MAINTENANCE_MODE: zodStrBool.default("false")
})
.transform((data) => ({
...data,
@@ -135,8 +131,7 @@ const envSchema = z
isSecretScanningConfigured:
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET)
}));
let envCfg: Readonly<z.infer<typeof envSchema>>;
@@ -155,20 +150,13 @@ export const initEnvConfig = (logger: Logger) => {
return envCfg;
};
export const formatSmtpConfig = () => {
return {
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_PORT === 465,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`,
ignoreTLS: envCfg.SMTP_IGNORE_TLS,
requireTLS: envCfg.SMTP_REQUIRE_TLS,
tls: {
rejectUnauthorized: envCfg.SMTP_TLS_REJECT_UNAUTHORIZED
}
};
};
export const formatSmtpConfig = () => ({
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_SECURE,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`
});

View File

@@ -1,49 +0,0 @@
import crypto from "crypto";
import { SymmetricEncryption, TSymmetricEncryptionFns } from "./types";
const getIvLength = () => {
return 12;
};
const getTagLength = () => {
return 16;
};
export const symmetricCipherService = (type: SymmetricEncryption): TSymmetricEncryptionFns => {
const IV_LENGTH = getIvLength();
const TAG_LENGTH = getTagLength();
const encrypt = (text: Buffer, key: Buffer) => {
const iv = crypto.randomBytes(IV_LENGTH);
const cipher = crypto.createCipheriv(type, key, iv);
let encrypted = cipher.update(text);
encrypted = Buffer.concat([encrypted, cipher.final()]);
// Get the authentication tag
const tag = cipher.getAuthTag();
// Concatenate IV, encrypted text, and tag into a single buffer
const ciphertextBlob = Buffer.concat([iv, encrypted, tag]);
return ciphertextBlob;
};
const decrypt = (ciphertextBlob: Buffer, key: Buffer) => {
// Extract the IV, encrypted text, and tag from the buffer
const iv = ciphertextBlob.subarray(0, IV_LENGTH);
const tag = ciphertextBlob.subarray(-TAG_LENGTH);
const encrypted = ciphertextBlob.subarray(IV_LENGTH, -TAG_LENGTH);
const decipher = crypto.createDecipheriv(type, key, iv);
decipher.setAuthTag(tag);
const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
return decrypted;
};
return {
encrypt,
decrypt
};
};

View File

@@ -1,2 +0,0 @@
export { symmetricCipherService } from "./cipher";
export { SymmetricEncryption } from "./types";

View File

@@ -1,9 +0,0 @@
export enum SymmetricEncryption {
AES_GCM_256 = "aes-256-gcm",
AES_GCM_128 = "aes-128-gcm"
}
export type TSymmetricEncryptionFns = {
encrypt: (text: Buffer, key: Buffer) => Buffer;
decrypt: (blob: Buffer, key: Buffer) => Buffer;
};

View File

@@ -11,8 +11,6 @@ import { getConfig } from "../config/env";
export const decodeBase64 = (s: string) => naclUtils.decodeBase64(s);
export const encodeBase64 = (u: Uint8Array) => naclUtils.encodeBase64(u);
export const randomSecureBytes = (length = 32) => crypto.randomBytes(length);
export type TDecryptSymmetricInput = {
ciphertext: string;
iv: string;

View File

@@ -9,8 +9,7 @@ export {
encryptAsymmetric,
encryptSymmetric,
encryptSymmetric128BitHexKeyUTF8,
generateAsymmetricKeyPair,
randomSecureBytes
generateAsymmetricKeyPair
} from "./encryption";
export {
decryptIntegrationAuths,

View File

@@ -1,682 +0,0 @@
/* eslint-disable */
// Source code credits: https://github.com/mike-marcacci/node-redlock
// Taken to avoid external dependency
import { randomBytes, createHash } from "crypto";
import { EventEmitter } from "events";
// AbortController became available as a global in node version 16. Once version
// 14 reaches its end-of-life, this can be removed.
import { Redis as IORedisClient, Cluster as IORedisCluster } from "ioredis";
type Client = IORedisClient | IORedisCluster;
// Define script constants.
const ACQUIRE_SCRIPT = `
-- Return 0 if an entry already exists.
for i, key in ipairs(KEYS) do
if redis.call("exists", key) == 1 then
return 0
end
end
-- Create an entry for each provided key.
for i, key in ipairs(KEYS) do
redis.call("set", key, ARGV[1], "PX", ARGV[2])
end
-- Return the number of entries added.
return #KEYS
`;
const EXTEND_SCRIPT = `
-- Return 0 if an entry exists with a *different* lock value.
for i, key in ipairs(KEYS) do
if redis.call("get", key) ~= ARGV[1] then
return 0
end
end
-- Update the entry for each provided key.
for i, key in ipairs(KEYS) do
redis.call("set", key, ARGV[1], "PX", ARGV[2])
end
-- Return the number of entries updated.
return #KEYS
`;
const RELEASE_SCRIPT = `
local count = 0
for i, key in ipairs(KEYS) do
-- Only remove entries for *this* lock value.
if redis.call("get", key) == ARGV[1] then
redis.pcall("del", key)
count = count + 1
end
end
-- Return the number of entries removed.
return count
`;
export type ClientExecutionResult =
| {
client: Client;
vote: "for";
value: number;
}
| {
client: Client;
vote: "against";
error: Error;
};
/*
* This object contains a summary of results.
*/
export type ExecutionStats = {
readonly membershipSize: number;
readonly quorumSize: number;
readonly votesFor: Set<Client>;
readonly votesAgainst: Map<Client, Error>;
};
/*
* This object contains a summary of results. Because the result of an attempt
* can sometimes be determined before all requests are finished, each attempt
* contains a Promise that will resolve ExecutionStats once all requests are
* finished. A rejection of these promises should be considered undefined
* behavior and should cause a crash.
*/
export type ExecutionResult = {
attempts: ReadonlyArray<Promise<ExecutionStats>>;
start: number;
};
/**
*
*/
export interface Settings {
readonly driftFactor: number;
readonly retryCount: number;
readonly retryDelay: number;
readonly retryJitter: number;
readonly automaticExtensionThreshold: number;
}
// Define default settings.
const defaultSettings: Readonly<Settings> = {
driftFactor: 0.01,
retryCount: 10,
retryDelay: 200,
retryJitter: 100,
automaticExtensionThreshold: 500
};
// Modifyng this object is forbidden.
Object.freeze(defaultSettings);
/*
* This error indicates a failure due to the existence of another lock for one
* or more of the requested resources.
*/
export class ResourceLockedError extends Error {
constructor(public readonly message: string) {
super();
this.name = "ResourceLockedError";
}
}
/*
* This error indicates a failure of an operation to pass with a quorum.
*/
export class ExecutionError extends Error {
constructor(
public readonly message: string,
public readonly attempts: ReadonlyArray<Promise<ExecutionStats>>
) {
super();
this.name = "ExecutionError";
}
}
/*
* An object of this type is returned when a resource is successfully locked. It
* contains convenience methods `release` and `extend` which perform the
* associated Redlock method on itself.
*/
export class Lock {
constructor(
public readonly redlock: Redlock,
public readonly resources: string[],
public readonly value: string,
public readonly attempts: ReadonlyArray<Promise<ExecutionStats>>,
public expiration: number
) {}
async release(): Promise<ExecutionResult> {
return this.redlock.release(this);
}
async extend(duration: number): Promise<Lock> {
return this.redlock.extend(this, duration);
}
}
export type RedlockAbortSignal = AbortSignal & { error?: Error };
/**
* A redlock object is instantiated with an array of at least one redis client
* and an optional `options` object. Properties of the Redlock object should NOT
* be changed after it is first used, as doing so could have unintended
* consequences for live locks.
*/
export class Redlock extends EventEmitter {
public readonly clients: Set<Client>;
public readonly settings: Settings;
public readonly scripts: {
readonly acquireScript: { value: string; hash: string };
readonly extendScript: { value: string; hash: string };
readonly releaseScript: { value: string; hash: string };
};
public constructor(
clients: Iterable<Client>,
settings: Partial<Settings> = {},
scripts: {
readonly acquireScript?: string | ((script: string) => string);
readonly extendScript?: string | ((script: string) => string);
readonly releaseScript?: string | ((script: string) => string);
} = {}
) {
super();
// Prevent crashes on error events.
this.on("error", () => {
// Because redlock is designed for high availability, it does not care if
// a minority of redis instances/clusters fail at an operation.
//
// However, it can be helpful to monitor and log such cases. Redlock emits
// an "error" event whenever it encounters an error, even if the error is
// ignored in its normal operation.
//
// This function serves to prevent node's default behavior of crashing
// when an "error" event is emitted in the absence of listeners.
});
// Create a new array of client, to ensure no accidental mutation.
this.clients = new Set(clients);
if (this.clients.size === 0) {
throw new Error("Redlock must be instantiated with at least one redis client.");
}
// Customize the settings for this instance.
this.settings = {
driftFactor: typeof settings.driftFactor === "number" ? settings.driftFactor : defaultSettings.driftFactor,
retryCount: typeof settings.retryCount === "number" ? settings.retryCount : defaultSettings.retryCount,
retryDelay: typeof settings.retryDelay === "number" ? settings.retryDelay : defaultSettings.retryDelay,
retryJitter: typeof settings.retryJitter === "number" ? settings.retryJitter : defaultSettings.retryJitter,
automaticExtensionThreshold:
typeof settings.automaticExtensionThreshold === "number"
? settings.automaticExtensionThreshold
: defaultSettings.automaticExtensionThreshold
};
// Use custom scripts and script modifiers.
const acquireScript =
typeof scripts.acquireScript === "function" ? scripts.acquireScript(ACQUIRE_SCRIPT) : ACQUIRE_SCRIPT;
const extendScript =
typeof scripts.extendScript === "function" ? scripts.extendScript(EXTEND_SCRIPT) : EXTEND_SCRIPT;
const releaseScript =
typeof scripts.releaseScript === "function" ? scripts.releaseScript(RELEASE_SCRIPT) : RELEASE_SCRIPT;
this.scripts = {
acquireScript: {
value: acquireScript,
hash: this._hash(acquireScript)
},
extendScript: {
value: extendScript,
hash: this._hash(extendScript)
},
releaseScript: {
value: releaseScript,
hash: this._hash(releaseScript)
}
};
}
/**
* Generate a sha1 hash compatible with redis evalsha.
*/
private _hash(value: string): string {
return createHash("sha1").update(value).digest("hex");
}
/**
* Generate a cryptographically random string.
*/
private _random(): string {
return randomBytes(16).toString("hex");
}
/**
* This method runs `.quit()` on all client connections.
*/
public async quit(): Promise<void> {
const results = [];
for (const client of this.clients) {
results.push(client.quit());
}
await Promise.all(results);
}
/**
* This method acquires a locks on the resources for the duration specified by
* the `duration`.
*/
public async acquire(resources: string[], duration: number, settings?: Partial<Settings>): Promise<Lock> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
const value = this._random();
try {
const { attempts, start } = await this._execute(
this.scripts.acquireScript,
resources,
[value, duration],
settings
);
// Add 2 milliseconds to the drift to account for Redis expires precision,
// which is 1 ms, plus the configured allowable drift factor.
const drift = Math.round((settings?.driftFactor ?? this.settings.driftFactor) * duration) + 2;
return new Lock(this, resources, value, attempts, start + duration - drift);
} catch (error) {
// If there was an error acquiring the lock, release any partial lock
// state that may exist on a minority of clients.
await this._execute(this.scripts.releaseScript, resources, [value], {
retryCount: 0
}).catch(() => {
// Any error here will be ignored.
});
throw error;
}
}
/**
* This method unlocks the provided lock from all servers still persisting it.
* It will fail with an error if it is unable to release the lock on a quorum
* of nodes, but will make no attempt to restore the lock in the case of a
* failure to release. It is safe to re-attempt a release or to ignore the
* error, as the lock will automatically expire after its timeout.
*/
public async release(lock: Lock, settings?: Partial<Settings>): Promise<ExecutionResult> {
// Immediately invalidate the lock.
lock.expiration = 0;
// Attempt to release the lock.
return this._execute(this.scripts.releaseScript, lock.resources, [lock.value], settings);
}
/**
* This method extends a valid lock by the provided `duration`.
*/
public async extend(existing: Lock, duration: number, settings?: Partial<Settings>): Promise<Lock> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
// The lock has already expired.
if (existing.expiration < Date.now()) {
throw new ExecutionError("Cannot extend an already-expired lock.", []);
}
const { attempts, start } = await this._execute(
this.scripts.extendScript,
existing.resources,
[existing.value, duration],
settings
);
// Invalidate the existing lock.
existing.expiration = 0;
// Add 2 milliseconds to the drift to account for Redis expires precision,
// which is 1 ms, plus the configured allowable drift factor.
const drift = Math.round((settings?.driftFactor ?? this.settings.driftFactor) * duration) + 2;
const replacement = new Lock(this, existing.resources, existing.value, attempts, start + duration - drift);
return replacement;
}
/**
* Execute a script on all clients. The resulting promise is resolved or
* rejected as soon as this quorum is reached; the resolution or rejection
* will contains a `stats` property that is resolved once all votes are in.
*/
private async _execute(
script: { value: string; hash: string },
keys: string[],
args: (string | number)[],
_settings?: Partial<Settings>
): Promise<ExecutionResult> {
const settings = _settings
? {
...this.settings,
..._settings
}
: this.settings;
// For the purpose of easy config serialization, we treat a retryCount of
// -1 a equivalent to Infinity.
const maxAttempts = settings.retryCount === -1 ? Infinity : settings.retryCount + 1;
const attempts: Promise<ExecutionStats>[] = [];
while (true) {
const { vote, stats, start } = await this._attemptOperation(script, keys, args);
attempts.push(stats);
// The operation achieved a quorum in favor.
if (vote === "for") {
return { attempts, start };
}
// Wait before reattempting.
if (attempts.length < maxAttempts) {
await new Promise((resolve) => {
setTimeout(
resolve,
Math.max(0, settings.retryDelay + Math.floor((Math.random() * 2 - 1) * settings.retryJitter)),
undefined
);
});
} else {
throw new ExecutionError("The operation was unable to achieve a quorum during its retry window.", attempts);
}
}
}
private async _attemptOperation(
script: { value: string; hash: string },
keys: string[],
args: (string | number)[]
): Promise<
| { vote: "for"; stats: Promise<ExecutionStats>; start: number }
| { vote: "against"; stats: Promise<ExecutionStats>; start: number }
> {
const start = Date.now();
return await new Promise((resolve) => {
const clientResults = [];
for (const client of this.clients) {
clientResults.push(this._attemptOperationOnClient(client, script, keys, args));
}
const stats: ExecutionStats = {
membershipSize: clientResults.length,
quorumSize: Math.floor(clientResults.length / 2) + 1,
votesFor: new Set<Client>(),
votesAgainst: new Map<Client, Error>()
};
let done: () => void;
const statsPromise = new Promise<typeof stats>((resolve) => {
done = () => resolve(stats);
});
// This is the expected flow for all successful and unsuccessful requests.
const onResultResolve = (clientResult: ClientExecutionResult): void => {
switch (clientResult.vote) {
case "for":
stats.votesFor.add(clientResult.client);
break;
case "against":
stats.votesAgainst.set(clientResult.client, clientResult.error);
break;
}
// A quorum has determined a success.
if (stats.votesFor.size === stats.quorumSize) {
resolve({
vote: "for",
stats: statsPromise,
start
});
}
// A quorum has determined a failure.
if (stats.votesAgainst.size === stats.quorumSize) {
resolve({
vote: "against",
stats: statsPromise,
start
});
}
// All votes are in.
if (stats.votesFor.size + stats.votesAgainst.size === stats.membershipSize) {
done();
}
};
// This is unexpected and should crash to prevent undefined behavior.
const onResultReject = (error: Error): void => {
throw error;
};
for (const result of clientResults) {
result.then(onResultResolve, onResultReject);
}
});
}
private async _attemptOperationOnClient(
client: Client,
script: { value: string; hash: string },
keys: string[],
args: (string | number)[]
): Promise<ClientExecutionResult> {
try {
let result: number;
try {
// Attempt to evaluate the script by its hash.
// @ts-expect-error
const shaResult = (await client.evalsha(script.hash, keys.length, [...keys, ...args])) as unknown;
if (typeof shaResult !== "number") {
throw new Error(`Unexpected result of type ${typeof shaResult} returned from redis.`);
}
result = shaResult;
} catch (error) {
// If the redis server does not already have the script cached,
// reattempt the request with the script's raw text.
if (!(error instanceof Error) || !error.message.startsWith("NOSCRIPT")) {
throw error;
}
// @ts-expect-error
const rawResult = (await client.eval(script.value, keys.length, [...keys, ...args])) as unknown;
if (typeof rawResult !== "number") {
throw new Error(`Unexpected result of type ${typeof rawResult} returned from redis.`);
}
result = rawResult;
}
// One or more of the resources was already locked.
if (result !== keys.length) {
throw new ResourceLockedError(
`The operation was applied to: ${result} of the ${keys.length} requested resources.`
);
}
return {
vote: "for",
client,
value: result
};
} catch (error) {
if (!(error instanceof Error)) {
throw new Error(`Unexpected type ${typeof error} thrown with value: ${error}`);
}
// Emit the error on the redlock instance for observability.
this.emit("error", error);
return {
vote: "against",
client,
error
};
}
}
/**
* Wrap and execute a routine in the context of an auto-extending lock,
* returning a promise of the routine's value. In the case that auto-extension
* fails, an AbortSignal will be updated to indicate that abortion of the
* routine is in order, and to pass along the encountered error.
*
* @example
* ```ts
* await redlock.using([senderId, recipientId], 5000, { retryCount: 5 }, async (signal) => {
* const senderBalance = await getBalance(senderId);
* const recipientBalance = await getBalance(recipientId);
*
* if (senderBalance < amountToSend) {
* throw new Error("Insufficient balance.");
* }
*
* // The abort signal will be true if:
* // 1. the above took long enough that the lock needed to be extended
* // 2. redlock was unable to extend the lock
* //
* // In such a case, exclusivity can no longer be guaranteed for further
* // operations, and should be handled as an exceptional case.
* if (signal.aborted) {
* throw signal.error;
* }
*
* await setBalances([
* {id: senderId, balance: senderBalance - amountToSend},
* {id: recipientId, balance: recipientBalance + amountToSend},
* ]);
* });
* ```
*/
public async using<T>(
resources: string[],
duration: number,
settings: Partial<Settings>,
routine?: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T>;
public async using<T>(
resources: string[],
duration: number,
routine: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T>;
public async using<T>(
resources: string[],
duration: number,
settingsOrRoutine: undefined | Partial<Settings> | ((signal: RedlockAbortSignal) => Promise<T>),
optionalRoutine?: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
const settings =
settingsOrRoutine && typeof settingsOrRoutine !== "function"
? {
...this.settings,
...settingsOrRoutine
}
: this.settings;
const routine = optionalRoutine ?? settingsOrRoutine;
if (typeof routine !== "function") {
throw new Error("INVARIANT: routine is not a function.");
}
if (settings.automaticExtensionThreshold > duration - 100) {
throw new Error(
"A lock `duration` must be at least 100ms greater than the `automaticExtensionThreshold` setting."
);
}
// The AbortController/AbortSignal pattern allows the routine to be notified
// of a failure to extend the lock, and subsequent expiration. In the event
// of an abort, the error object will be made available at `signal.error`.
const controller = new AbortController();
const signal = controller.signal as RedlockAbortSignal;
function queue(): void {
timeout = setTimeout(
() => (extension = extend()),
lock.expiration - Date.now() - settings.automaticExtensionThreshold
);
}
async function extend(): Promise<void> {
timeout = undefined;
try {
lock = await lock.extend(duration);
queue();
} catch (error) {
if (!(error instanceof Error)) {
throw new Error(`Unexpected thrown ${typeof error}: ${error}.`);
}
if (lock.expiration > Date.now()) {
return (extension = extend());
}
signal.error = error instanceof Error ? error : new Error(`${error}`);
controller.abort();
}
}
let timeout: undefined | NodeJS.Timeout;
let extension: undefined | Promise<void>;
let lock = await this.acquire(resources, duration, settings);
queue();
try {
return await routine(signal);
} finally {
// Clean up the timer.
if (timeout) {
clearTimeout(timeout);
timeout = undefined;
}
// Wait for an in-flight extension to finish.
if (extension) {
await extension.catch(() => {
// An error here doesn't matter at all, because the routine has
// already completed, and a release will be attempted regardless. The
// only reason for waiting here is to prevent possible contention
// between the extension and release.
});
}
await lock.release();
}
}
}

View File

@@ -7,7 +7,3 @@ export const zpStr = <T extends ZodTypeAny>(schema: T, opt: { stripNull: boolean
if (typeof val !== "string") return val;
return val.trim() || undefined;
}, schema);
export const zodBuffer = z.custom<Buffer>((data) => Buffer.isBuffer(data) || data instanceof Uint8Array, {
message: "Expected binary data (Buffer Or Uint8Array)"
});

View File

@@ -7,7 +7,6 @@ import {
TScanFullRepoEventPayload,
TScanPushEventPayload
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { TSyncSecretsDTO } from "@app/services/secret/secret-types";
export enum QueueName {
SecretRotation = "secret-rotation",
@@ -22,9 +21,7 @@ export enum QueueName {
SecretFullRepoScan = "secret-full-repo-scan",
SecretPushEventScan = "secret-push-event-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost",
DynamicSecretRevocation = "dynamic-secret-revocation",
SecretReplication = "secret-replication",
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
DynamicSecretRevocation = "dynamic-secret-revocation"
}
export enum QueueJobs {
@@ -40,9 +37,7 @@ export enum QueueJobs {
SecretScan = "secret-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost-job",
DynamicSecretRevocation = "dynamic-secret-revocation",
DynamicSecretPruning = "dynamic-secret-pruning",
SecretReplication = "secret-replication",
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
DynamicSecretPruning = "dynamic-secret-pruning"
}
export type TQueueJobTypes = {
@@ -121,14 +116,6 @@ export type TQueueJobTypes = {
dynamicSecretCfgId: string;
};
};
[QueueName.SecretReplication]: {
name: QueueJobs.SecretReplication;
payload: TSyncSecretsDTO;
};
[QueueName.SecretSync]: {
name: QueueJobs.SecretSync;
payload: TSyncSecretsDTO;
};
};
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
@@ -145,7 +132,7 @@ export const queueServiceFactory = (redisUrl: string) => {
const start = <T extends QueueName>(
name: T,
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>, token?: string) => Promise<void>,
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>) => Promise<void>,
queueSettings: Omit<QueueOptions, "connection"> = {}
) => {
if (queueContainer[name]) {
@@ -179,7 +166,7 @@ export const queueServiceFactory = (redisUrl: string) => {
name: T,
job: TQueueJobTypes[T]["name"],
data: TQueueJobTypes[T]["payload"],
opts?: JobsOptions & { jobId?: string }
opts: JobsOptions & { jobId?: string }
) => {
const q = queueContainer[name];

View File

@@ -5,6 +5,7 @@ import { createTransport } from "nodemailer";
import { formatSmtpConfig, getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { getTlsOption } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
type BootstrapOpt = {
@@ -43,7 +44,7 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
console.info("Testing smtp connection");
const smtpCfg = formatSmtpConfig();
await createTransport(smtpCfg)
await createTransport({ ...smtpCfg, ...getTlsOption(smtpCfg.host, smtpCfg.secure) })
.verify()
.then(async () => {
console.info("SMTP successfully connected");

View File

@@ -28,7 +28,7 @@ export const readLimit: RateLimitOptions = {
// POST, PATCH, PUT, DELETE endpoints
export const writeLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: 200, // (too low, FA having issues so increasing it - maidul)
max: 50,
keyGenerator: (req) => req.realIp
};

View File

@@ -44,7 +44,6 @@ import { secretApprovalRequestDALFactory } from "@app/ee/services/secret-approva
import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal";
import { secretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { secretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { secretReplicationServiceFactory } from "@app/ee/services/secret-replication/secret-replication-service";
import { secretRotationDALFactory } from "@app/ee/services/secret-rotation/secret-rotation-dal";
import { secretRotationQueueFactory } from "@app/ee/services/secret-rotation/secret-rotation-queue";
import { secretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
@@ -97,9 +96,6 @@ import { integrationDALFactory } from "@app/services/integration/integration-dal
import { integrationServiceFactory } from "@app/services/integration/integration-service";
import { integrationAuthDALFactory } from "@app/services/integration-auth/integration-auth-dal";
import { integrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { kmsDALFactory } from "@app/services/kms/kms-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { incidentContactDALFactory } from "@app/services/org/incident-contacts-dal";
import { orgBotDALFactory } from "@app/services/org/org-bot-dal";
import { orgDALFactory } from "@app/services/org/org-dal";
@@ -244,8 +240,8 @@ export const registerRoutes = async (
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
const secretApprovalRequestSecretDAL = secretApprovalRequestSecretDALFactory(db);
const sarReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
const sarSecretDAL = secretApprovalRequestSecretDALFactory(db);
const secretRotationDAL = secretRotationDALFactory(db);
const snapshotDAL = snapshotDALFactory(db);
@@ -264,9 +260,6 @@ export const registerRoutes = async (
const dynamicSecretDAL = dynamicSecretDALFactory(db);
const dynamicSecretLeaseDAL = dynamicSecretLeaseDALFactory(db);
const kmsDAL = kmsDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const permissionService = permissionServiceFactory({
permissionDAL,
orgRoleDAL,
@@ -275,12 +268,6 @@ export const registerRoutes = async (
projectDAL
});
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL
});
const trustedIpService = trustedIpServiceFactory({
licenseService,
projectDAL,
@@ -301,7 +288,7 @@ export const registerRoutes = async (
permissionService,
auditLogStreamDAL
});
const secretApprovalPolicyService = secretApprovalPolicyServiceFactory({
const sapService = secretApprovalPolicyServiceFactory({
projectMembershipDAL,
projectEnvDAL,
secretApprovalPolicyApproverDAL: sapApproverDAL,
@@ -502,7 +489,7 @@ export const registerRoutes = async (
projectBotDAL,
projectMembershipDAL,
secretApprovalRequestDAL,
secretApprovalSecretDAL: secretApprovalRequestSecretDAL,
secretApprovalSecretDAL: sarSecretDAL,
projectUserMembershipRoleDAL
});
@@ -600,7 +587,6 @@ export const registerRoutes = async (
secretVersionTagDAL
});
const secretImportService = secretImportServiceFactory({
licenseService,
projectEnvDAL,
folderDAL,
permissionService,
@@ -635,18 +621,19 @@ export const registerRoutes = async (
secretSharingDAL
});
const secretApprovalRequestService = secretApprovalRequestServiceFactory({
const sarService = secretApprovalRequestServiceFactory({
permissionService,
projectBotService,
folderDAL,
secretDAL,
secretTagDAL,
secretApprovalRequestSecretDAL,
secretApprovalRequestReviewerDAL,
secretApprovalRequestSecretDAL: sarSecretDAL,
secretApprovalRequestReviewerDAL: sarReviewerDAL,
projectDAL,
secretVersionDAL,
secretBlindIndexDAL,
secretApprovalRequestDAL,
secretService,
snapshotService,
secretVersionTagDAL,
secretQueueService
@@ -675,23 +662,6 @@ export const registerRoutes = async (
accessApprovalPolicyApproverDAL
});
const secretReplicationService = secretReplicationServiceFactory({
secretTagDAL,
secretVersionTagDAL,
secretDAL,
secretVersionDAL,
secretImportDAL,
keyStore,
queueService,
folderDAL,
secretApprovalPolicyService,
secretBlindIndexDAL,
secretApprovalRequestDAL,
secretApprovalRequestSecretDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
});
const secretRotationQueue = secretRotationQueueFactory({
telemetryService,
secretRotationDAL,
@@ -835,7 +805,6 @@ export const registerRoutes = async (
await telemetryQueue.startTelemetryCheck();
await dailyResourceCleanUp.startCleanUp();
await kmsService.startService();
// inject all services
server.decorate<FastifyZodProvider["services"]>("services", {
@@ -857,7 +826,6 @@ export const registerRoutes = async (
projectEnv: projectEnvService,
projectRole: projectRoleService,
secret: secretService,
secretReplication: secretReplicationService,
secretTag: secretTagService,
folder: folderService,
secretImport: secretImportService,
@@ -874,10 +842,10 @@ export const registerRoutes = async (
identityGcpAuth: identityGcpAuthService,
identityAwsAuth: identityAwsAuthService,
identityAzureAuth: identityAzureAuthService,
secretApprovalPolicy: sapService,
accessApprovalPolicy: accessApprovalPolicyService,
accessApprovalRequest: accessApprovalRequestService,
secretApprovalPolicy: secretApprovalPolicyService,
secretApprovalRequest: secretApprovalRequestService,
secretApprovalRequest: sarService,
secretRotation: secretRotationService,
dynamicSecret: dynamicSecretService,
dynamicSecretLease: dynamicSecretLeaseService,
@@ -919,8 +887,7 @@ export const registerRoutes = async (
emailConfigured: z.boolean().optional(),
inviteOnlySignup: z.boolean().optional(),
redisConfigured: z.boolean().optional(),
secretScanningConfigured: z.boolean().optional(),
samlDefaultOrgSlug: z.string().optional()
secretScanningConfigured: z.boolean().optional()
})
}
},
@@ -933,8 +900,7 @@ export const registerRoutes = async (
emailConfigured: cfg.isSmtpConfigured,
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
redisConfigured: cfg.isRedisConfigured,
secretScanningConfigured: cfg.isSecretScanningConfigured,
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug
secretScanningConfigured: cfg.isSecretScanningConfigured
};
}
});

View File

@@ -8,7 +8,7 @@ import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema";
import { IntegrationMappingBehavior } from "@app/services/integration-auth/integration-list";
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
@@ -42,11 +42,39 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetService: z.string().trim().optional().describe(INTEGRATION.CREATE.targetService),
targetServiceId: z.string().trim().optional().describe(INTEGRATION.CREATE.targetServiceId),
owner: z.string().trim().optional().describe(INTEGRATION.CREATE.owner),
url: z.string().trim().optional().describe(INTEGRATION.CREATE.url),
path: z.string().trim().optional().describe(INTEGRATION.CREATE.path),
region: z.string().trim().optional().describe(INTEGRATION.CREATE.region),
scope: z.string().trim().optional().describe(INTEGRATION.CREATE.scope),
metadata: IntegrationMetadataSchema.default({})
metadata: z
.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z
.nativeEnum(IntegrationMappingBehavior)
.optional()
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
})
.default({})
}),
response: {
200: z.object({
@@ -132,7 +160,33 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment),
owner: z.string().trim().describe(INTEGRATION.UPDATE.owner),
environment: z.string().trim().describe(INTEGRATION.UPDATE.environment),
metadata: IntegrationMetadataSchema.optional()
metadata: z
.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
})
.optional()
}),
response: {
200: z.object({

View File

@@ -29,8 +29,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
import: z.object({
environment: z.string().trim().describe(SECRET_IMPORTS.CREATE.import.environment),
path: z.string().trim().transform(removeTrailingSlash).describe(SECRET_IMPORTS.CREATE.import.path)
}),
isReplication: z.boolean().default(false).describe(SECRET_IMPORTS.CREATE.isReplication)
})
}),
response: {
200: z.object({
@@ -211,49 +210,6 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
}
});
server.route({
method: "POST",
url: "/:secretImportId/replication-resync",
config: {
rateLimit: secretsLimit
},
schema: {
description: "Resync secret replication of secret imports",
security: [
{
bearerAuth: []
}
],
params: z.object({
secretImportId: z.string().trim().describe(SECRET_IMPORTS.UPDATE.secretImportId)
}),
body: z.object({
workspaceId: z.string().trim().describe(SECRET_IMPORTS.UPDATE.workspaceId),
environment: z.string().trim().describe(SECRET_IMPORTS.UPDATE.environment),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(SECRET_IMPORTS.UPDATE.path)
}),
response: {
200: z.object({
message: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { message } = await server.services.secretImport.resyncSecretImportReplication({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.secretImportId,
...req.body,
projectId: req.body.workspaceId
});
return { message };
}
});
server.route({
method: "GET",
url: "/",
@@ -276,9 +232,11 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
200: z.object({
message: z.string(),
secretImports: SecretImportsSchema.omit({ importEnv: true })
.extend({
importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() })
})
.merge(
z.object({
importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() })
})
)
.array()
})
}

View File

@@ -80,8 +80,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
body: z.object({
email: z.string().trim(),
providerAuthToken: z.string().trim().optional(),
clientProof: z.string().trim(),
captchaToken: z.string().trim().optional()
clientProof: z.string().trim()
}),
response: {
200: z.discriminatedUnion("mfaEnabled", [
@@ -107,7 +106,6 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const data = await server.services.login.loginExchangeClientProof({
captchaToken: req.body.captchaToken,
email: req.body.email,
ip: req.realIp,
userAgent,

View File

@@ -9,6 +9,7 @@ import {
ServiceTokenScopes
} from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CommitType } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
import { RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
@@ -18,7 +19,6 @@ import { getUserAgentType } from "@app/server/plugins/audit-log";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { ProjectFilterType } from "@app/services/project/project-types";
import { SecretOperations } from "@app/services/secret/secret-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
import { secretRawSchema } from "../sanitizedSchemas";
@@ -902,7 +902,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId,
policy,
data: {
[SecretOperations.Create]: [
[CommitType.Create]: [
{
secretName: req.params.secretName,
secretValueCiphertext,
@@ -1084,7 +1084,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId,
policy,
data: {
[SecretOperations.Update]: [
[CommitType.Update]: [
{
secretName: req.params.secretName,
newSecretName,
@@ -1234,7 +1234,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId,
policy,
data: {
[SecretOperations.Delete]: [
[CommitType.Delete]: [
{
secretName: req.params.secretName
}
@@ -1364,7 +1364,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId,
policy,
data: {
[SecretOperations.Create]: inputSecrets
[CommitType.Create]: inputSecrets
}
});
@@ -1491,7 +1491,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId,
policy,
data: {
[SecretOperations.Update]: inputSecrets.filter(({ type }) => type === "shared")
[CommitType.Update]: inputSecrets.filter(({ type }) => type === "shared")
}
});
@@ -1606,7 +1606,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId,
policy,
data: {
[SecretOperations.Delete]: inputSecrets.filter(({ type }) => type === "shared")
[CommitType.Delete]: inputSecrets.filter(({ type }) => type === "shared")
}
});
await server.services.auditLog.createAuditLog({

View File

@@ -3,7 +3,6 @@ import jwt from "jsonwebtoken";
import { TUsers, UserDeviceSchema } from "@app/db/schemas";
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/errors";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
@@ -177,16 +176,12 @@ export const authLoginServiceFactory = ({
clientProof,
ip,
userAgent,
providerAuthToken,
captchaToken
providerAuthToken
}: TLoginClientProofDTO) => {
const appCfg = getConfig();
const userEnc = await userDAL.findUserEncKeyByUsername({
username: email
});
if (!userEnc) throw new Error("Failed to find user");
const user = await userDAL.findById(userEnc.userId);
const cfg = getConfig();
let authMethod = AuthMethod.EMAIL;
@@ -201,31 +196,6 @@ export const authLoginServiceFactory = ({
}
}
if (
user.consecutiveFailedPasswordAttempts &&
user.consecutiveFailedPasswordAttempts >= 10 &&
Boolean(appCfg.CAPTCHA_SECRET)
) {
if (!captchaToken) {
throw new BadRequestError({
name: "Captcha Required",
message: "Accomplish the required captcha by logging in via Web"
});
}
// validate captcha token
const response = await request.postForm<{ success: boolean }>("https://api.hcaptcha.com/siteverify", {
response: captchaToken,
secret: appCfg.CAPTCHA_SECRET
});
if (!response.data.success) {
throw new BadRequestError({
name: "Invalid Captcha"
});
}
}
if (!userEnc.serverPrivateKey || !userEnc.clientPublicKey) throw new Error("Failed to authenticate. Try again?");
const isValidClientProof = await srpCheckClientProof(
userEnc.salt,
@@ -234,31 +204,15 @@ export const authLoginServiceFactory = ({
userEnc.clientPublicKey,
clientProof
);
if (!isValidClientProof) {
await userDAL.update(
{ id: userEnc.userId },
{
$incr: {
consecutiveFailedPasswordAttempts: 1
}
}
);
throw new Error("Failed to authenticate. Try again?");
}
if (!isValidClientProof) throw new Error("Failed to authenticate. Try again?");
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
serverPrivateKey: null,
clientPublicKey: null
});
await userDAL.updateById(userEnc.userId, {
consecutiveFailedPasswordAttempts: 0
});
// send multi factor auth token if they it enabled
if (userEnc.isMfaEnabled && userEnc.email) {
const user = await userDAL.findById(userEnc.userId);
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
const mfaToken = jwt.sign(

View File

@@ -12,7 +12,6 @@ export type TLoginClientProofDTO = {
providerAuthToken?: string;
ip: string;
userAgent: string;
captchaToken?: string;
};
export type TVerifyMfaTokenDTO = {

View File

@@ -231,7 +231,7 @@ export const authSignupServiceFactory = ({
const accessToken = jwt.sign(
{
authMethod: authMethod || AuthMethod.EMAIL,
authMethod: AuthMethod.EMAIL,
authTokenType: AuthTokenType.ACCESS_TOKEN,
userId: updateduser.info.id,
tokenVersionId: tokenSession.id,
@@ -244,7 +244,7 @@ export const authSignupServiceFactory = ({
const refreshToken = jwt.sign(
{
authMethod: authMethod || AuthMethod.EMAIL,
authMethod: AuthMethod.EMAIL,
authTokenType: AuthTokenType.REFRESH_TOKEN,
userId: updateduser.info.id,
tokenVersionId: tokenSession.id,

View File

@@ -199,7 +199,6 @@ export const integrationAuthServiceFactory = ({
projectId,
namespace,
integration,
url,
algorithm: SecretEncryptionAlgo.AES_256_GCM,
keyEncoding: SecretKeyEncoding.UTF8,
...(integration === Integrations.GCP_SECRET_MANAGER

View File

@@ -30,8 +30,7 @@ export enum Integrations {
DIGITAL_OCEAN_APP_PLATFORM = "digital-ocean-app-platform",
CLOUD_66 = "cloud-66",
NORTHFLANK = "northflank",
HASURA_CLOUD = "hasura-cloud",
RUNDECK = "rundeck"
HASURA_CLOUD = "hasura-cloud"
}
export enum IntegrationType {
@@ -369,15 +368,6 @@ export const getIntegrationOptions = async () => {
type: "pat",
clientId: "",
docsLink: ""
},
{
name: "Rundeck",
slug: "rundeck",
image: "Rundeck.svg",
isAvailable: true,
type: "pat",
clientId: "",
docsLink: ""
}
];

View File

@@ -27,11 +27,9 @@ import { z } from "zod";
import { SecretType, TIntegrationAuths, TIntegrations, TSecrets } from "@app/db/schemas";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types";
import { TIntegrationDALFactory } from "../integration/integration-dal";
import { IntegrationMetadataSchema } from "../integration/integration-schema";
import {
IntegrationInitialSyncBehavior,
IntegrationMappingBehavior,
@@ -523,42 +521,18 @@ const syncSecretsAWSParameterStore = async ({
.promise();
}
// case: secret exists in AWS parameter store
} else {
} else if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
// case: secret value doesn't match one in AWS parameter store
// -> update secret
if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
await ssm
.putParameter({
Name: `${integration.path}${key}`,
Type: "SecureString",
Value: secrets[key].value,
Overwrite: true
})
.promise();
}
if (awsParameterStoreSecretsObj[key].Name) {
try {
await ssm
.addTagsToResource({
ResourceType: "Parameter",
ResourceId: awsParameterStoreSecretsObj[key].Name as string,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({
Key: tag.key,
Value: tag.value
}))
: []
})
.promise();
} catch (err) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if ((err as any).code === "AccessDeniedException") {
logger.error(
`AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)`
);
}
}
}
await ssm
.putParameter({
Name: `${integration.path}${key}`,
Type: "SecureString",
Value: secrets[key].value,
Overwrite: true
// Tags: metadata.secretAWSTag ? [{ Key: metadata.secretAWSTag.key, Value: metadata.secretAWSTag.value }] : []
})
.promise();
}
await new Promise((resolve) => {
@@ -1364,41 +1338,38 @@ const syncSecretsGitHub = async ({
}
}
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
if (metadata.shouldEnableDelete) {
for await (const encryptedSecret of encryptedSecrets) {
if (
!(encryptedSecret.name in secrets) &&
!(appendices?.prefix !== undefined && !encryptedSecret.name.startsWith(appendices?.prefix)) &&
!(appendices?.suffix !== undefined && !encryptedSecret.name.endsWith(appendices?.suffix))
) {
switch (integration.scope) {
case GithubScope.Org: {
await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", {
org: integration.owner as string,
for await (const encryptedSecret of encryptedSecrets) {
if (
!(encryptedSecret.name in secrets) &&
!(appendices?.prefix !== undefined && !encryptedSecret.name.startsWith(appendices?.prefix)) &&
!(appendices?.suffix !== undefined && !encryptedSecret.name.endsWith(appendices?.suffix))
) {
switch (integration.scope) {
case GithubScope.Org: {
await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", {
org: integration.owner as string,
secret_name: encryptedSecret.name
});
break;
}
case GithubScope.Env: {
await octokit.request(
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}",
{
repository_id: Number(integration.appId),
environment_name: integration.targetEnvironmentId as string,
secret_name: encryptedSecret.name
});
break;
}
case GithubScope.Env: {
await octokit.request(
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}",
{
repository_id: Number(integration.appId),
environment_name: integration.targetEnvironmentId as string,
secret_name: encryptedSecret.name
}
);
break;
}
default: {
await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
owner: integration.owner as string,
repo: integration.app as string,
secret_name: encryptedSecret.name
});
break;
}
}
);
break;
}
default: {
await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
owner: integration.owner as string,
repo: integration.app as string,
secret_name: encryptedSecret.name
});
break;
}
}
}
@@ -1921,13 +1892,13 @@ const syncSecretsGitLab = async ({
return allEnvVariables;
};
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
const allEnvVariables = await getAllEnvVariables(integration?.appId as string, accessToken);
const getSecretsRes: GitLabSecret[] = allEnvVariables
.filter((secret: GitLabSecret) => secret.environment_scope === integration.targetEnvironment)
.filter((gitLabSecret) => {
let isValid = true;
const metadata = z.record(z.any()).parse(integration.metadata);
if (metadata.secretPrefix && !gitLabSecret.key.startsWith(metadata.secretPrefix)) {
isValid = false;
}
@@ -1947,8 +1918,8 @@ const syncSecretsGitLab = async ({
{
key,
value: secrets[key].value,
protected: Boolean(metadata.shouldProtectSecrets),
masked: Boolean(metadata.shouldMaskSecrets),
protected: false,
masked: false,
raw: false,
environment_scope: integration.targetEnvironment
},
@@ -1965,9 +1936,7 @@ const syncSecretsGitLab = async ({
`${gitLabApiUrl}/v4/projects/${integration?.appId}/variables/${existingSecret.key}?filter[environment_scope]=${integration.targetEnvironment}`,
{
...existingSecret,
value: secrets[existingSecret.key].value,
protected: Boolean(metadata.shouldProtectSecrets),
masked: Boolean(metadata.shouldMaskSecrets)
value: secrets[existingSecret.key].value
},
{
headers: {
@@ -2756,20 +2725,6 @@ const syncSecretsCloudflarePages = async ({
}
}
);
const metadata = z.record(z.any()).parse(integration.metadata);
if (metadata.shouldAutoRedeploy) {
await request.post(
`${IntegrationUrls.CLOUDFLARE_PAGES_API_URL}/client/v4/accounts/${accessId}/pages/projects/${integration.app}/deployments`,
{},
{
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: "application/json"
}
}
);
}
};
/**
@@ -3375,82 +3330,6 @@ const syncSecretsHasuraCloud = async ({
}
};
/** Sync/push [secrets] to Rundeck
* @param {Object} obj
* @param {TIntegrations} obj.integration - integration details
* @param {Object} obj.secrets - secrets to push to integration (object where keys are secret keys and values are secret values)
* @param {String} obj.accessToken - access token for Rundeck integration
*/
const syncSecretsRundeck = async ({
integration,
secrets,
accessToken
}: {
integration: TIntegrations;
secrets: Record<string, { value: string; comment?: string }>;
accessToken: string;
}) => {
interface RundeckSecretResource {
name: string;
}
interface RundeckSecretsGetRes {
resources: RundeckSecretResource[];
}
let existingRundeckSecrets: string[] = [];
try {
const listResult = await request.get<RundeckSecretsGetRes>(
`${integration.url}/api/44/storage/${integration.path}`,
{
headers: {
"X-Rundeck-Auth-Token": accessToken
}
}
);
existingRundeckSecrets = listResult.data.resources.map((res) => res.name);
} catch (err) {
logger.info("No existing rundeck secrets");
}
try {
for await (const [key, value] of Object.entries(secrets)) {
if (existingRundeckSecrets.includes(key)) {
await request.put(`${integration.url}/api/44/storage/${integration.path}/${key}`, value.value, {
headers: {
"X-Rundeck-Auth-Token": accessToken,
"Content-Type": "application/x-rundeck-data-password"
}
});
} else {
await request.post(`${integration.url}/api/44/storage/${integration.path}/${key}`, value.value, {
headers: {
"X-Rundeck-Auth-Token": accessToken,
"Content-Type": "application/x-rundeck-data-password"
}
});
}
}
for await (const existingSecret of existingRundeckSecrets) {
if (!(existingSecret in secrets)) {
await request.delete(`${integration.url}/api/44/storage/${integration.path}/${existingSecret}`, {
headers: {
"X-Rundeck-Auth-Token": accessToken
}
});
}
}
} catch (err: unknown) {
throw new Error(
`Ensure that the provided Rundeck URL is accessible by Infisical and that the linked API token has sufficient permissions.\n\n${
(err as Error).message
}`
);
}
};
/**
* Sync/push [secrets] to [app] in integration named [integration]
*
@@ -3717,13 +3596,6 @@ export const syncIntegrationSecrets = async ({
accessToken
});
break;
case Integrations.RUNDECK:
await syncSecretsRundeck({
integration,
secrets,
accessToken
});
break;
default:
throw new BadRequestError({ message: "Invalid integration" });
}

View File

@@ -1,37 +0,0 @@
import { z } from "zod";
import { INTEGRATION } from "@app/lib/api-docs";
import { IntegrationMappingBehavior } from "../integration-auth/integration-list";
export const IntegrationMetadataSchema = z.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z
.nativeEnum(IntegrationMappingBehavior)
.optional()
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete),
shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets),
shouldProtectSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldProtectSecrets)
});

View File

@@ -43,7 +43,6 @@ export const integrationServiceFactory = ({
scope,
actorId,
region,
url,
isActive,
metadata,
secretPath,
@@ -88,7 +87,6 @@ export const integrationServiceFactory = ({
region,
scope,
owner,
url,
appId,
path,
app,

View File

@@ -12,7 +12,6 @@ export type TCreateIntegrationDTO = {
targetService?: string;
targetServiceId?: string;
owner?: string;
url?: string;
path?: string;
region?: string;
scope?: string;
@@ -29,9 +28,6 @@ export type TCreateIntegrationDTO = {
}[];
kmsKeyId?: string;
shouldDisableDelete?: boolean;
shouldMaskSecrets?: boolean;
shouldProtectSecrets?: boolean;
shouldEnableDelete?: boolean;
};
} & Omit<TProjectPermission, "projectId">;
@@ -57,7 +53,6 @@ export type TUpdateIntegrationDTO = {
}[];
kmsKeyId?: string;
shouldDisableDelete?: boolean;
shouldEnableDelete?: boolean;
};
} & Omit<TProjectPermission, "projectId">;

View File

@@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TKmsDALFactory = ReturnType<typeof kmsDALFactory>;
export const kmsDALFactory = (db: TDbClient) => {
const kmsOrm = ormify(db, TableName.KmsKey);
return kmsOrm;
};

View File

@@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TKmsRootConfigDALFactory = ReturnType<typeof kmsRootConfigDALFactory>;
export const kmsRootConfigDALFactory = (db: TDbClient) => {
const kmsOrm = ormify(db, TableName.KmsServerRootConfig);
return kmsOrm;
};

View File

@@ -1,126 +0,0 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { randomSecureBytes } from "@app/lib/crypto";
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TKmsDALFactory } from "./kms-dal";
import { TKmsRootConfigDALFactory } from "./kms-root-config-dal";
import { TDecryptWithKmsDTO, TEncryptWithKmsDTO, TGenerateKMSDTO } from "./kms-types";
type TKmsServiceFactoryDep = {
kmsDAL: TKmsDALFactory;
kmsRootConfigDAL: Pick<TKmsRootConfigDALFactory, "findById" | "create">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "waitTillReady" | "setItemWithExpiry">;
};
export type TKmsServiceFactory = ReturnType<typeof kmsServiceFactory>;
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
const KMS_ROOT_CREATION_WAIT_KEY = "wait_till_ready_kms_root_key";
const KMS_ROOT_CREATION_WAIT_TIME = 10;
// akhilmhdh: Don't edit this value. This is measured for blob concatination in kms
const KMS_VERSION = "v01";
const KMS_VERSION_BLOB_LENGTH = 3;
export const kmsServiceFactory = ({ kmsDAL, kmsRootConfigDAL, keyStore }: TKmsServiceFactoryDep) => {
let ROOT_ENCRYPTION_KEY = Buffer.alloc(0);
// this is used symmetric encryption
const generateKmsKey = async ({ scopeId, scopeType, isReserved = true }: TGenerateKMSDTO) => {
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
const kmsKeyMaterial = randomSecureBytes(32);
const encryptedKeyMaterial = cipher.encrypt(kmsKeyMaterial, ROOT_ENCRYPTION_KEY);
const { encryptedKey, ...doc } = await kmsDAL.create({
version: 1,
encryptedKey: encryptedKeyMaterial,
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
isReserved,
orgId: scopeType === "org" ? scopeId : undefined,
projectId: scopeType === "project" ? scopeId : undefined
});
return doc;
};
const encrypt = async ({ kmsId, plainText }: TEncryptWithKmsDTO) => {
const kmsDoc = await kmsDAL.findById(kmsId);
if (!kmsDoc) throw new BadRequestError({ message: "KMS ID not found" });
// akhilmhdh: as more encryption are added do a check here on kmsDoc.encryptionAlgorithm
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
const kmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
const encryptedPlainTextBlob = cipher.encrypt(plainText, kmsKey);
// Buffer#1 encrypted text + Buffer#2 version number
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
return { cipherTextBlob };
};
const decrypt = async ({ cipherTextBlob: versionedCipherTextBlob, kmsId }: TDecryptWithKmsDTO) => {
const kmsDoc = await kmsDAL.findById(kmsId);
if (!kmsDoc) throw new BadRequestError({ message: "KMS ID not found" });
// akhilmhdh: as more encryption are added do a check here on kmsDoc.encryptionAlgorithm
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
const kmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
const decryptedBlob = cipher.decrypt(cipherTextBlob, kmsKey);
return decryptedBlob;
};
const startService = async () => {
const appCfg = getConfig();
// This will switch to a seal process and HMS flow in future
const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY;
// if root key its base64 encoded
const isBase64 = !appCfg.ENCRYPTION_KEY;
if (!encryptionKey) throw new Error("Root encryption key not found for KMS service.");
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
const lock = await keyStore.acquireLock([`KMS_ROOT_CFG_LOCK`], 3000, { retryCount: 3 }).catch(() => null);
if (!lock) {
await keyStore.waitTillReady({
key: KMS_ROOT_CREATION_WAIT_KEY,
keyCheckCb: (val) => val === "true",
waitingCb: () => logger.info("KMS. Waiting for leader to finish creation of KMS Root Key")
});
}
// check if KMS root key was already generated and saved in DB
const kmsRootConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID);
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
if (kmsRootConfig) {
if (lock) await lock.release();
logger.info("KMS: Encrypted ROOT Key found from DB. Decrypting.");
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
// set the flag so that other instancen nodes can start
await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true");
logger.info("KMS: Loading ROOT Key into Memory.");
ROOT_ENCRYPTION_KEY = decryptedRootKey;
return;
}
logger.info("KMS: Generating ROOT Key");
const newRootKey = randomSecureBytes(32);
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
// @ts-expect-error id is kept as fixed for idempotence and to avoid race condition
await kmsRootConfigDAL.create({ encryptedRootKey, id: KMS_ROOT_CONFIG_UUID });
// set the flag so that other instancen nodes can start
await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true");
logger.info("KMS: Saved and loaded ROOT Key into memory");
if (lock) await lock.release();
ROOT_ENCRYPTION_KEY = newRootKey;
};
return {
startService,
generateKmsKey,
encrypt,
decrypt
};
};

View File

@@ -1,15 +0,0 @@
export type TGenerateKMSDTO = {
scopeType: "project" | "org";
scopeId: string;
isReserved?: boolean;
};
export type TEncryptWithKmsDTO = {
kmsId: string;
plainText: Buffer;
};
export type TDecryptWithKmsDTO = {
kmsId: string;
cipherTextBlob: Buffer;
};

View File

@@ -336,7 +336,6 @@ export const orgServiceFactory = ({
return org;
});
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
return organization;
};

View File

@@ -169,7 +169,6 @@ const sqlFindSecretPathByFolderId = (db: Knex, projectId: string, folderIds: str
// this is for root condition
// if the given folder id is root folder id then intial path is set as / instead of /root
// if not root folder the path here will be /<folder name>
depth: 1,
path: db.raw(`CONCAT('/', (CASE WHEN "parentId" is NULL THEN '' ELSE ${TableName.SecretFolder}.name END))`),
child: db.raw("NULL::uuid"),
environmentSlug: `${TableName.Environment}.slug`
@@ -186,7 +185,6 @@ const sqlFindSecretPathByFolderId = (db: Knex, projectId: string, folderIds: str
.select({
// then we join join this folder name behind previous as we are going from child to parent
// the root folder check is used to avoid last / and also root name in folders
depth: db.raw("parent.depth + 1"),
path: db.raw(
`CONCAT( CASE
WHEN ${TableName.SecretFolder}."parentId" is NULL THEN ''
@@ -201,7 +199,7 @@ const sqlFindSecretPathByFolderId = (db: Knex, projectId: string, folderIds: str
);
})
.select("*")
.from<TSecretFolders & { child: string | null; path: string; environmentSlug: string; depth: number }>("parent");
.from<TSecretFolders & { child: string | null; path: string; environmentSlug: string }>("parent");
export type TSecretFolderDALFactory = ReturnType<typeof secretFolderDALFactory>;
// never change this. If u do write a migration for it
@@ -262,23 +260,12 @@ export const secretFolderDALFactory = (db: TDbClient) => {
try {
const folders = await sqlFindSecretPathByFolderId(tx || db, projectId, folderIds);
// travelling all the way from leaf node to root contains real path
const rootFolders = groupBy(
folders.filter(({ parentId }) => parentId === null),
(i) => i.child || i.id // root condition then child and parent will null
);
const actualFolders = groupBy(
folders.filter(({ depth }) => depth === 1),
(i) => i.id // root condition then child and parent will null
);
return folderIds.map((folderId) => {
if (!rootFolders[folderId]?.[0]) return;
const actualId = rootFolders[folderId][0].child || rootFolders[folderId][0].id;
const folder = actualFolders[actualId][0];
return { ...folder, path: rootFolders[folderId]?.[0].path };
});
return folderIds.map((folderId) => rootFolders[folderId]?.[0]);
} catch (error) {
throw new DatabaseError({ error, name: "Find by secret path" });
}

View File

@@ -253,7 +253,7 @@ export const secretFolderServiceFactory = ({
const env = await projectEnvDAL.findOne({ projectId, slug: environment });
if (!env) throw new BadRequestError({ message: "Environment not found", name: "Update folder" });
const folder = await folderDAL
.findOne({ envId: env.id, id, parentId: parentFolder.id, isReserved: false })
.findOne({ envId: env.id, id, parentId: parentFolder.id })
// now folder api accepts id based change
// this is for cli backward compatiability and when cli removes this, we will remove this logic
.catch(() => folderDAL.findOne({ envId: env.id, name: id, parentId: parentFolder.id }));
@@ -276,11 +276,7 @@ export const secretFolderServiceFactory = ({
}
const newFolder = await folderDAL.transaction(async (tx) => {
const [doc] = await folderDAL.update(
{ envId: env.id, id: folder.id, parentId: parentFolder.id, isReserved: false },
{ name },
tx
);
const [doc] = await folderDAL.update({ envId: env.id, id: folder.id, parentId: parentFolder.id }, { name }, tx);
await folderVersionDAL.create(
{
name: doc.name,
@@ -328,12 +324,7 @@ export const secretFolderServiceFactory = ({
if (!parentFolder) throw new BadRequestError({ message: "Secret path not found" });
const [doc] = await folderDAL.delete(
{
envId: env.id,
[uuidValidate(idOrName) ? "id" : "name"]: idOrName,
parentId: parentFolder.id,
isReserved: false
},
{ envId: env.id, [uuidValidate(idOrName) ? "id" : "name"]: idOrName, parentId: parentFolder.id },
tx
);
if (!doc) throw new BadRequestError({ message: "Folder not found", name: "Delete folder" });
@@ -363,7 +354,7 @@ export const secretFolderServiceFactory = ({
const parentFolder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
if (!parentFolder) return [];
const folders = await folderDAL.find({ envId: env.id, parentId: parentFolder.id, isReserved: false });
const folders = await folderDAL.find({ envId: env.id, parentId: parentFolder.id });
return folders;
};

View File

@@ -1,9 +1,5 @@
import { TProjectPermission } from "@app/lib/types";
export enum ReservedFolders {
SecretReplication = "__reserve_replication_"
}
export type TCreateFolderDTO = {
environment: string;
path: string;

View File

@@ -15,7 +15,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
try {
const docs = await (tx || db)(TableName.SecretFolderVersion)
.join(TableName.SecretFolder, `${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
.where({ parentId: folderId, isReserved: false })
.where({ parentId: folderId })
.join<TSecretFolderVersions>(
(tx || db)(TableName.SecretFolderVersion)
.groupBy("envId", "folderId")

View File

@@ -20,14 +20,14 @@ export const secretImportDALFactory = (db: TDbClient) => {
return lastPos?.position || 0;
};
const updateAllPosition = async (folderId: string, pos: number, targetPos: number, positionInc = 1, tx?: Knex) => {
const updateAllPosition = async (folderId: string, pos: number, targetPos: number, tx?: Knex) => {
try {
if (targetPos === -1) {
// this means delete
await (tx || db)(TableName.SecretImport)
.where({ folderId })
.andWhere("position", ">", pos)
.decrement("position", positionInc);
.decrement("position", 1);
return;
}
@@ -36,13 +36,13 @@ export const secretImportDALFactory = (db: TDbClient) => {
.where({ folderId })
.where("position", "<=", targetPos)
.andWhere("position", ">", pos)
.decrement("position", positionInc);
.decrement("position", 1);
} else {
await (tx || db)(TableName.SecretImport)
.where({ folderId })
.where("position", ">=", targetPos)
.andWhere("position", "<", pos)
.increment("position", positionInc);
.increment("position", 1);
}
} catch (error) {
throw new DatabaseError({ error, name: "Update position" });
@@ -74,7 +74,6 @@ export const secretImportDALFactory = (db: TDbClient) => {
try {
const docs = await (tx || db)(TableName.SecretImport)
.whereIn("folderId", folderIds)
.where("isReplication", false)
.join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`)
.select(
db.ref("*").withSchema(TableName.SecretImport) as unknown as keyof TSecretImports,

View File

@@ -79,7 +79,7 @@ export const fnSecretsFromImports = async ({
let secretsFromDeeperImports: TSecretImportSecrets[] = [];
if (deeperImports.length) {
secretsFromDeeperImports = await fnSecretsFromImports({
allowedImports: deeperImports.filter(({ isReplication }) => !isReplication),
allowedImports: deeperImports,
secretImportDAL,
folderDAL,
secretDAL,

View File

@@ -1,12 +1,7 @@
import path from "node:path";
import { ForbiddenError, subject } from "@casl/ability";
import { TableName } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { getReplicationFolderName } from "@app/ee/services/secret-replication/secret-replication-service";
import { BadRequestError } from "@app/lib/errors";
import { TProjectDALFactory } from "../project/project-dal";
@@ -21,7 +16,6 @@ import {
TDeleteSecretImportDTO,
TGetSecretImportsDTO,
TGetSecretsFromImportDTO,
TResyncSecretImportReplicationDTO,
TUpdateSecretImportDTO
} from "./secret-import-types";
@@ -32,8 +26,7 @@ type TSecretImportServiceFactoryDep = {
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
projectEnvDAL: TProjectEnvDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "replicateSecrets">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets">;
};
const ERR_SEC_IMP_NOT_FOUND = new BadRequestError({ message: "Secret import not found" });
@@ -47,8 +40,7 @@ export const secretImportServiceFactory = ({
folderDAL,
projectDAL,
secretDAL,
secretQueueService,
licenseService
secretQueueService
}: TSecretImportServiceFactoryDep) => {
const createImport = async ({
environment,
@@ -58,8 +50,7 @@ export const secretImportServiceFactory = ({
actorOrgId,
actorAuthMethod,
projectId,
isReplication,
path: secretPath
path
}: TCreateSecretImportDTO) => {
const { permission } = await permissionService.getProjectPermission(
actor,
@@ -72,7 +63,7 @@ export const secretImportServiceFactory = ({
// check if user has permission to import into destination path
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
// check if user has permission to import from target path
@@ -83,18 +74,10 @@ export const secretImportServiceFactory = ({
secretPath: data.path
})
);
if (isReplication) {
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.secretApproval) {
throw new BadRequestError({
message: "Failed to create secret replication due to plan restriction. Upgrade plan to create replication."
});
}
}
await projectDAL.checkProjectUpgradeStatus(projectId);
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create import" });
const [importEnv] = await projectEnvDAL.findBySlugs(projectId, [data.environment]);
@@ -105,62 +88,35 @@ export const secretImportServiceFactory = ({
const existingImport = await secretImportDAL.findOne({
folderId: sourceFolder.id,
importEnv: folder.environment.id,
importPath: secretPath
importPath: path
});
if (existingImport) throw new BadRequestError({ message: "Cyclic import not allowed" });
}
const secImport = await secretImportDAL.transaction(async (tx) => {
const lastPos = await secretImportDAL.findLastImportPosition(folder.id, tx);
const doc = await secretImportDAL.create(
return secretImportDAL.create(
{
folderId: folder.id,
position: lastPos + 1,
importEnv: importEnv.id,
importPath: data.path,
isReplication
importPath: data.path
},
tx
);
if (doc.isReplication) {
await secretImportDAL.create(
{
folderId: folder.id,
position: lastPos + 2,
isReserved: true,
importEnv: folder.environment.id,
importPath: path.join(secretPath, getReplicationFolderName(doc.id))
},
tx
);
}
return doc;
});
if (secImport.isReplication && sourceFolder) {
await secretQueueService.replicateSecrets({
secretPath: secImport.importPath,
projectId,
environmentSlug: importEnv.slug,
pickOnlyImportIds: [secImport.id],
actorId,
actor
});
} else {
await secretQueueService.syncSecrets({
secretPath,
projectId,
environmentSlug: environment,
actorId,
actor
});
}
await secretQueueService.syncSecrets({
secretPath: secImport.importPath,
projectId,
environment: importEnv.slug
});
return { ...secImport, importEnv };
};
const updateImport = async ({
path: secretPath,
path,
environment,
projectId,
actor,
@@ -179,10 +135,10 @@ export const secretImportServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update import" });
const secImpDoc = await secretImportDAL.findOne({ folderId: folder.id, id });
@@ -202,7 +158,7 @@ export const secretImportServiceFactory = ({
const existingImport = await secretImportDAL.findOne({
folderId: sourceFolder.id,
importEnv: folder.environment.id,
importPath: secretPath
importPath: path
});
if (existingImport) throw new BadRequestError({ message: "Cyclic import not allowed" });
}
@@ -211,31 +167,12 @@ export const secretImportServiceFactory = ({
const secImp = await secretImportDAL.findOne({ folderId: folder.id, id });
if (!secImp) throw ERR_SEC_IMP_NOT_FOUND;
if (data.position) {
if (secImp.isReplication) {
await secretImportDAL.updateAllPosition(folder.id, secImp.position, data.position, 2, tx);
} else {
await secretImportDAL.updateAllPosition(folder.id, secImp.position, data.position, 1, tx);
}
}
if (secImp.isReplication) {
const replicationFolderPath = path.join(secretPath, getReplicationFolderName(secImp.id));
await secretImportDAL.update(
{
folderId: folder.id,
importEnv: folder.environment.id,
importPath: replicationFolderPath,
isReserved: true
},
{ position: data?.position ? data.position + 1 : undefined },
tx
);
await secretImportDAL.updateAllPosition(folder.id, secImp.position, data.position, tx);
}
const [doc] = await secretImportDAL.update(
{ id, folderId: folder.id },
{
// when moving replicated import, the position is meant for reserved import
// replicated one should always be behind the reserved import
position: data.position,
position: data?.position,
importEnv: data?.environment ? importedEnv.id : undefined,
importPath: data?.path
},
@@ -247,7 +184,7 @@ export const secretImportServiceFactory = ({
};
const deleteImport = async ({
path: secretPath,
path,
environment,
projectId,
actor,
@@ -265,34 +202,16 @@ export const secretImportServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Delete import" });
const secImport = await secretImportDAL.transaction(async (tx) => {
const [doc] = await secretImportDAL.delete({ folderId: folder.id, id }, tx);
if (!doc) throw new BadRequestError({ name: "Sec imp del", message: "Secret import doc not found" });
if (doc.isReplication) {
const replicationFolderPath = path.join(secretPath, getReplicationFolderName(doc.id));
const replicatedFolder = await folderDAL.findBySecretPath(projectId, environment, replicationFolderPath, tx);
if (replicatedFolder) {
await secretImportDAL.delete(
{
folderId: folder.id,
importEnv: folder.environment.id,
importPath: replicationFolderPath,
isReserved: true
},
tx
);
await folderDAL.deleteById(replicatedFolder.id, tx);
}
await secretImportDAL.updateAllPosition(folder.id, doc.position, -1, 2, tx);
} else {
await secretImportDAL.updateAllPosition(folder.id, doc.position, -1, 1, tx);
}
await secretImportDAL.updateAllPosition(folder.id, doc.position, -1, tx);
const importEnv = await projectEnvDAL.findById(doc.importEnv);
if (!importEnv) throw new BadRequestError({ error: "Imported env not found", name: "Create import" });
@@ -300,91 +219,16 @@ export const secretImportServiceFactory = ({
});
await secretQueueService.syncSecrets({
secretPath,
secretPath: path,
projectId,
environmentSlug: environment,
actor,
actorId
environment
});
return secImport;
};
const resyncSecretImportReplication = async ({
environment,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
path: secretPath,
id: secretImportDocId
}: TResyncSecretImportReplicationDTO) => {
const { permission, membership } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
// check if user has permission to import into destination path
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.secretApproval) {
throw new BadRequestError({
message: "Failed to create secret replication due to plan restriction. Upgrade plan to create replication."
});
}
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update import" });
const [secretImportDoc] = await secretImportDAL.find({
folderId: folder.id,
[`${TableName.SecretImport}.id` as "id"]: secretImportDocId
});
if (!secretImportDoc) throw new BadRequestError({ message: "Failed to find secret import" });
if (!secretImportDoc.isReplication) throw new BadRequestError({ message: "Import is not in replication mode" });
// check if user has permission to import from target path
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, {
environment: secretImportDoc.importEnv.slug,
secretPath: secretImportDoc.importPath
})
);
await projectDAL.checkProjectUpgradeStatus(projectId);
const sourceFolder = await folderDAL.findBySecretPath(
projectId,
secretImportDoc.importEnv.slug,
secretImportDoc.importPath
);
if (membership && sourceFolder) {
await secretQueueService.replicateSecrets({
secretPath: secretImportDoc.importPath,
projectId,
environmentSlug: secretImportDoc.importEnv.slug,
pickOnlyImportIds: [secretImportDoc.id],
actorId,
actor
});
}
return { message: "replication started" };
};
const getImports = async ({
path: secretPath,
path,
environment,
projectId,
actor,
@@ -401,10 +245,10 @@ export const secretImportServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Get imports" });
const secImports = await secretImportDAL.find({ folderId: folder.id });
@@ -412,7 +256,7 @@ export const secretImportServiceFactory = ({
};
const getSecretsFromImports = async ({
path: secretPath,
path,
environment,
projectId,
actor,
@@ -429,13 +273,13 @@ export const secretImportServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) return [];
// this will already order by position
// so anything based on this order will also be in right position
const secretImports = await secretImportDAL.find({ folderId: folder.id, isReplication: false });
const secretImports = await secretImportDAL.find({ folderId: folder.id });
const allowedImports = secretImports.filter(({ importEnv, importPath }) =>
permission.can(
@@ -455,7 +299,6 @@ export const secretImportServiceFactory = ({
deleteImport,
getImports,
getSecretsFromImports,
resyncSecretImportReplication,
fnSecretsFromImports
};
};

View File

@@ -7,7 +7,6 @@ export type TCreateSecretImportDTO = {
environment: string;
path: string;
};
isReplication?: boolean;
} & TProjectPermission;
export type TUpdateSecretImportDTO = {
@@ -17,12 +16,6 @@ export type TUpdateSecretImportDTO = {
data: Partial<{ environment: string; path: string; position: number }>;
} & TProjectPermission;
export type TResyncSecretImportReplicationDTO = {
environment: string;
path: string;
id: string;
} & TProjectPermission;
export type TDeleteSecretImportDTO = {
environment: string;
path: string;

View File

@@ -32,8 +32,6 @@ import {
TCreateManySecretsRawFn,
TCreateManySecretsRawFnFactory,
TFnSecretBlindIndexCheck,
TFnSecretBlindIndexCheckV2,
TFnSecretBulkDelete,
TFnSecretBulkInsert,
TFnSecretBulkUpdate,
TUpdateManySecretsRawFn,
@@ -151,8 +149,7 @@ export const recursivelyGetSecretPaths = ({
// Fetch all folders in env once with a single query
const folders = await folderDAL.find({
envId: env.id,
isReserved: false
envId: env.id
});
// Build the folder hierarchy map
@@ -309,7 +306,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
};
const expandSecrets = async (
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>
) => {
const expandedSec: Record<string, string> = {};
const interpolatedSec: Record<string, string> = {};
@@ -329,8 +326,8 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
// should not do multi line encoding if user has set it to skip
// eslint-disable-next-line
secrets[key].value = secrets[key].skipMultilineEncoding
? formatMultiValueEnv(expandedSec[key])
: expandedSec[key];
? expandedSec[key]
: formatMultiValueEnv(expandedSec[key]);
// eslint-disable-next-line
continue;
}
@@ -347,7 +344,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
);
// eslint-disable-next-line
secrets[key].value = secrets[key].skipMultilineEncoding ? formatMultiValueEnv(expandedVal) : expandedVal;
secrets[key].value = secrets[key].skipMultilineEncoding ? expandedVal : formatMultiValueEnv(expandedVal);
}
return secrets;
@@ -395,35 +392,10 @@ export const decryptSecretRaw = (
type: secret.type,
_id: secret.id,
id: secret.id,
user: secret.userId,
skipMultilineEncoding: secret.skipMultilineEncoding
user: secret.userId
};
};
// this is used when secret blind index already exist
// mainly for secret approval
export const fnSecretBlindIndexCheckV2 = async ({
inputSecrets,
folderId,
userId,
secretDAL
}: TFnSecretBlindIndexCheckV2) => {
if (inputSecrets.some(({ type }) => type === SecretType.Personal) && !userId) {
throw new BadRequestError({ message: "Missing user id for personal secret" });
}
const secrets = await secretDAL.findByBlindIndexes(
folderId,
inputSecrets.map(({ secretBlindIndex, type }) => ({
blindIndex: secretBlindIndex,
type: type || SecretType.Shared
})),
userId
);
const secsGroupedByBlindIndex = groupBy(secrets, (i) => i.secretBlindIndex as string);
return { secsGroupedByBlindIndex, secrets };
};
/**
* Grabs and processes nested secret references from a string
*
@@ -626,35 +598,6 @@ export const fnSecretBulkUpdate = async ({
return newSecrets.map((secret) => ({ ...secret, _id: secret.id }));
};
export const fnSecretBulkDelete = async ({
folderId,
inputSecrets,
tx,
actorId,
secretDAL,
secretQueueService
}: TFnSecretBulkDelete) => {
const deletedSecrets = await secretDAL.deleteMany(
inputSecrets.map(({ type, secretBlindIndex }) => ({
blindIndex: secretBlindIndex,
type
})),
folderId,
actorId,
tx
);
await Promise.allSettled(
deletedSecrets
.filter(({ secretReminderRepeatDays }) => Boolean(secretReminderRepeatDays))
.map(({ id, secretReminderRepeatDays }) =>
secretQueueService.removeSecretReminder({ secretId: id, repeatDays: secretReminderRepeatDays as number })
)
);
return deletedSecrets;
};
export const createManySecretsRawFnFactory = ({
projectDAL,
projectBotDAL,

View File

@@ -1,6 +1,4 @@
/* eslint-disable no-await-in-loop */
import { AxiosError } from "axios";
import { getConfig } from "@app/lib/config/env";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { daysToMillisecond, secondsToMillis } from "@app/lib/dates";
@@ -30,12 +28,7 @@ import { TWebhookDALFactory } from "../webhook/webhook-dal";
import { fnTriggerWebhook } from "../webhook/webhook-fns";
import { TSecretDALFactory } from "./secret-dal";
import { interpolateSecrets } from "./secret-fns";
import {
TCreateSecretReminderDTO,
THandleReminderDTO,
TRemoveSecretReminderDTO,
TSyncSecretsDTO
} from "./secret-types";
import { TCreateSecretReminderDTO, THandleReminderDTO, TRemoveSecretReminderDTO } from "./secret-types";
export type TSecretQueueFactory = ReturnType<typeof secretQueueFactory>;
type TSecretQueueFactoryDep = {
@@ -66,13 +59,8 @@ export type TGetSecrets = {
};
const MAX_SYNC_SECRET_DEPTH = 5;
export const uniqueSecretQueueKey = (environment: string, secretPath: string) =>
`secret-queue-dedupe-${environment}-${secretPath}`;
const uniqueIntegrationKey = (environment: string, secretPath: string) => `integration-${environment}-${secretPath}`;
type TIntegrationSecret = Record<
string,
{ value: string; comment?: string; skipMultilineEncoding?: boolean | null | undefined }
>;
export const secretQueueFactory = ({
queueService,
integrationDAL,
@@ -93,6 +81,68 @@ export const secretQueueFactory = ({
secretTagDAL,
secretVersionTagDAL
}: TSecretQueueFactoryDep) => {
const createManySecretsRawFn = createManySecretsRawFnFactory({
projectDAL,
projectBotDAL,
secretDAL,
secretVersionDAL,
secretBlindIndexDAL,
secretTagDAL,
secretVersionTagDAL,
folderDAL
});
const updateManySecretsRawFn = updateManySecretsRawFnFactory({
projectDAL,
projectBotDAL,
secretDAL,
secretVersionDAL,
secretBlindIndexDAL,
secretTagDAL,
secretVersionTagDAL,
folderDAL
});
const syncIntegrations = async (dto: TGetSecrets & { deDupeQueue?: Record<string, boolean> }) => {
await queueService.queue(QueueName.IntegrationSync, QueueJobs.IntegrationSync, dto, {
attempts: 3,
delay: 1000,
backoff: {
type: "exponential",
delay: 3000
},
removeOnComplete: true,
removeOnFail: true
});
};
const syncSecrets = async ({
deDupeQueue = {},
...dto
}: TGetSecrets & { depth?: number; deDupeQueue?: Record<string, boolean> }) => {
const deDuplicationKey = uniqueIntegrationKey(dto.environment, dto.secretPath);
if (deDupeQueue?.[deDuplicationKey]) {
return;
}
// eslint-disable-next-line
deDupeQueue[deDuplicationKey] = true;
logger.info(
`syncSecrets: syncing project secrets where [projectId=${dto.projectId}] [environment=${dto.environment}] [path=${dto.secretPath}]`
);
await queueService.queue(QueueName.SecretWebhook, QueueJobs.SecWebhook, dto, {
jobId: `secret-webhook-${dto.environment}-${dto.projectId}-${dto.secretPath}`,
removeOnFail: true,
removeOnComplete: true,
delay: 1000,
attempts: 5,
backoff: {
type: "exponential",
delay: 3000
}
});
await syncIntegrations({ ...dto, deDupeQueue });
};
const removeSecretReminder = async (dto: TRemoveSecretReminderDTO) => {
const appCfg = getConfig();
await queueService.stopRepeatableJob(
@@ -187,27 +237,8 @@ export const secretQueueFactory = ({
}
}
};
const createManySecretsRawFn = createManySecretsRawFnFactory({
projectDAL,
projectBotDAL,
secretDAL,
secretVersionDAL,
secretBlindIndexDAL,
secretTagDAL,
secretVersionTagDAL,
folderDAL
});
const updateManySecretsRawFn = updateManySecretsRawFnFactory({
projectDAL,
projectBotDAL,
secretDAL,
secretVersionDAL,
secretBlindIndexDAL,
secretTagDAL,
secretVersionTagDAL,
folderDAL
});
type Content = Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>;
/**
* Return the secrets in a given [folderId] including secrets from
@@ -220,7 +251,7 @@ export const secretQueueFactory = ({
key: string;
depth: number;
}) => {
let content: TIntegrationSecret = {};
let content: Content = {};
if (dto.depth > MAX_SYNC_SECRET_DEPTH) {
logger.info(
`getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]`
@@ -270,7 +301,7 @@ export const secretQueueFactory = ({
await expandSecrets(content);
// check if current folder has any imports from other folders
const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
const secretImport = await secretImportDAL.find({ folderId: dto.folderId });
// if no imports then return secrets in the current folder
if (!secretImport) return content;
@@ -302,122 +333,8 @@ export const secretQueueFactory = ({
return content;
};
const syncIntegrations = async (dto: TGetSecrets & { deDupeQueue?: Record<string, boolean> }) => {
await queueService.queue(QueueName.IntegrationSync, QueueJobs.IntegrationSync, dto, {
attempts: 3,
delay: 1000,
backoff: {
type: "exponential",
delay: 3000
},
removeOnComplete: true,
removeOnFail: true
});
};
const replicateSecrets = async (dto: Omit<TSyncSecretsDTO, "deDupeQueue">) => {
await queueService.queue(QueueName.SecretReplication, QueueJobs.SecretReplication, dto, {
attempts: 3,
backoff: {
type: "exponential",
delay: 2000
},
removeOnComplete: true,
removeOnFail: true
});
};
const syncSecrets = async <T extends boolean = false>({
// seperate de-dupe queue for integration sync and replication sync
_deDupeQueue: deDupeQueue = {},
_depth: depth = 0,
_deDupeReplicationQueue: deDupeReplicationQueue = {},
...dto
}: TSyncSecretsDTO<T>) => {
logger.info(
`syncSecrets: syncing project secrets where [projectId=${dto.projectId}] [environment=${dto.environmentSlug}] [path=${dto.secretPath}]`
);
const deDuplicationKey = uniqueSecretQueueKey(dto.environmentSlug, dto.secretPath);
if (
!dto.excludeReplication
? deDupeReplicationQueue?.[deDuplicationKey]
: deDupeQueue?.[deDuplicationKey] || depth > MAX_SYNC_SECRET_DEPTH
) {
return;
}
// eslint-disable-next-line
deDupeQueue[deDuplicationKey] = true;
// eslint-disable-next-line
deDupeReplicationQueue[deDuplicationKey] = true;
await queueService.queue(
QueueName.SecretSync,
QueueJobs.SecretSync,
{
...dto,
_deDupeQueue: deDupeQueue,
_deDupeReplicationQueue: deDupeReplicationQueue,
_depth: depth
} as TSyncSecretsDTO,
{
removeOnFail: true,
removeOnComplete: true,
delay: 1000,
attempts: 5,
backoff: {
type: "exponential",
delay: 3000
}
}
);
};
queueService.start(QueueName.SecretSync, async (job) => {
const {
_deDupeQueue: deDupeQueue,
_deDupeReplicationQueue: deDupeReplicationQueue,
_depth: depth,
secretPath,
projectId,
environmentSlug: environment,
excludeReplication,
actorId,
actor
} = job.data;
await queueService.queue(
QueueName.SecretWebhook,
QueueJobs.SecWebhook,
{ environment, projectId, secretPath },
{
jobId: `secret-webhook-${environment}-${projectId}-${secretPath}`,
removeOnFail: { count: 5 },
removeOnComplete: true,
delay: 1000,
attempts: 5,
backoff: {
type: "exponential",
delay: 3000
}
}
);
await syncIntegrations({ secretPath, projectId, environment, deDupeQueue });
if (!excludeReplication) {
await replicateSecrets({
_deDupeReplicationQueue: deDupeReplicationQueue,
_depth: depth,
projectId,
secretPath,
actorId,
actor,
excludeReplication,
environmentSlug: environment
});
}
});
queueService.start(QueueName.IntegrationSync, async (job) => {
const { environment, projectId, secretPath, depth = 1, deDupeQueue = {} } = job.data;
if (depth > MAX_SYNC_SECRET_DEPTH) return;
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
if (!folder) {
@@ -431,8 +348,7 @@ export const secretQueueFactory = ({
const linkSourceDto = {
projectId,
importEnv: folder.environment.id,
importPath: secretPath,
isReplication: false
importPath: secretPath
};
const imports = await secretImportDAL.find(linkSourceDto);
@@ -440,31 +356,30 @@ export const secretQueueFactory = ({
// keep calling sync secret for all the imports made
const importedFolderIds = unique(imports, (i) => i.folderId).map(({ folderId }) => folderId);
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
const foldersGroupedById = groupBy(importedFolders, (i) => i.child || i.id);
logger.info(
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
);
await Promise.all(
imports
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0].path))
// filter out already synced ones
.filter(
({ folderId }) =>
!deDupeQueue[
uniqueSecretQueueKey(
foldersGroupedById[folderId][0]?.environmentSlug as string,
foldersGroupedById[folderId][0]?.path as string
uniqueIntegrationKey(
foldersGroupedById[folderId][0].environmentSlug,
foldersGroupedById[folderId][0].path
)
]
)
.map(({ folderId }) =>
syncSecrets({
depth: depth + 1,
projectId,
secretPath: foldersGroupedById[folderId][0]?.path as string,
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
_deDupeQueue: deDupeQueue,
_depth: depth + 1,
excludeReplication: true
secretPath: foldersGroupedById[folderId][0].path,
environment: foldersGroupedById[folderId][0].environmentSlug,
deDupeQueue
})
)
);
@@ -478,31 +393,30 @@ export const secretQueueFactory = ({
if (secretReferences.length) {
const referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId);
const referencedFolders = await folderDAL.findSecretPathByFolderIds(projectId, referencedFolderIds);
const referencedFoldersGroupedById = groupBy(referencedFolders.filter(Boolean), (i) => i?.id as string);
const referencedFoldersGroupedById = groupBy(referencedFolders, (i) => i.child || i.id);
logger.info(
`getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
);
await Promise.all(
secretReferences
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0]?.path))
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0].path))
// filter out already synced ones
.filter(
({ folderId }) =>
!deDupeQueue[
uniqueSecretQueueKey(
referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
referencedFoldersGroupedById[folderId][0]?.path as string
uniqueIntegrationKey(
referencedFoldersGroupedById[folderId][0].environmentSlug,
referencedFoldersGroupedById[folderId][0].path
)
]
)
.map(({ folderId }) =>
syncSecrets({
depth: depth + 1,
projectId,
secretPath: referencedFoldersGroupedById[folderId][0]?.path as string,
environmentSlug: referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
_deDupeQueue: deDupeQueue,
_depth: depth + 1,
excludeReplication: true
secretPath: referencedFoldersGroupedById[folderId][0].path,
environment: referencedFoldersGroupedById[folderId][0].environmentSlug,
deDupeQueue
})
)
);
@@ -572,14 +486,11 @@ export const secretQueueFactory = ({
isSynced: true
});
} catch (err: unknown) {
logger.info("Secret integration sync error: %o", err);
const message =
err instanceof AxiosError ? JSON.stringify((err as AxiosError)?.response?.data) : (err as Error)?.message;
logger.info("Secret integration sync error:", err);
await integrationDAL.updateById(integration.id, {
lastSyncJobId: job.id,
lastUsed: new Date(),
syncMessage: message,
syncMessage: (err as Error)?.message,
isSynced: false
});
}
@@ -635,11 +546,10 @@ export const secretQueueFactory = ({
return {
// depth is internal only field thus no need to make it available outside
syncSecrets,
syncSecrets: (dto: TGetSecrets) => syncSecrets(dto),
syncIntegrations,
addSecretReminder,
removeSecretReminder,
handleSecretReminder,
replicateSecrets
handleSecretReminder
};
};

View File

@@ -35,7 +35,6 @@ import { TSecretDALFactory } from "./secret-dal";
import {
decryptSecretRaw,
fnSecretBlindIndexCheck,
fnSecretBulkDelete,
fnSecretBulkInsert,
fnSecretBulkUpdate,
getAllNestedSecretReferences,
@@ -54,6 +53,8 @@ import {
TDeleteManySecretRawDTO,
TDeleteSecretDTO,
TDeleteSecretRawDTO,
TFnSecretBlindIndexCheckV2,
TFnSecretBulkDelete,
TGetASecretDTO,
TGetASecretRawDTO,
TGetSecretsDTO,
@@ -138,6 +139,53 @@ export const secretServiceFactory = ({
return secretBlindIndex;
};
const fnSecretBulkDelete = async ({ folderId, inputSecrets, tx, actorId }: TFnSecretBulkDelete) => {
const deletedSecrets = await secretDAL.deleteMany(
inputSecrets.map(({ type, secretBlindIndex }) => ({
blindIndex: secretBlindIndex,
type
})),
folderId,
actorId,
tx
);
for (const s of deletedSecrets) {
if (s.secretReminderRepeatDays) {
// eslint-disable-next-line no-await-in-loop
await secretQueueService
.removeSecretReminder({
secretId: s.id,
repeatDays: s.secretReminderRepeatDays
})
.catch((err) => {
logger.error(err, `Failed to delete secret reminder for secret with ID ${s?.id}`);
});
}
}
return deletedSecrets;
};
// this is used when secret blind index already exist
// mainly for secret approval
const fnSecretBlindIndexCheckV2 = async ({ inputSecrets, folderId, userId }: TFnSecretBlindIndexCheckV2) => {
if (inputSecrets.some(({ type }) => type === SecretType.Personal) && !userId) {
throw new BadRequestError({ message: "Missing user id for personal secret" });
}
const secrets = await secretDAL.findByBlindIndexes(
folderId,
inputSecrets.map(({ secretBlindIndex, type }) => ({
blindIndex: secretBlindIndex,
type: type || SecretType.Shared
})),
userId
);
const secsGroupedByBlindIndex = groupBy(secrets, (i) => i.secretBlindIndex as string);
return { secsGroupedByBlindIndex, secrets };
};
const createSecret = async ({
path,
actor,
@@ -235,13 +283,8 @@ export const secretServiceFactory = ({
);
await snapshotService.performSnapshot(folderId);
await secretQueueService.syncSecrets({
secretPath: path,
actorId,
actor,
projectId,
environmentSlug: folder.environment.slug
});
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
return { ...secret[0], environment, workspace: projectId, tags, secretPath: path };
};
@@ -370,13 +413,8 @@ export const secretServiceFactory = ({
);
await snapshotService.performSnapshot(folderId);
await secretQueueService.syncSecrets({
actor,
actorId,
secretPath: path,
projectId,
environmentSlug: folder.environment.slug
});
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
return { ...updatedSecret[0], workspace: projectId, environment, secretPath: path };
};
@@ -432,8 +470,6 @@ export const secretServiceFactory = ({
projectId,
folderId,
actorId,
secretDAL,
secretQueueService,
inputSecrets: [
{
type: inputSecret.type as SecretType,
@@ -445,13 +481,8 @@ export const secretServiceFactory = ({
);
await snapshotService.performSnapshot(folderId);
await secretQueueService.syncSecrets({
actor,
actorId,
secretPath: path,
projectId,
environmentSlug: folder.environment.slug
});
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment, secretPath: path };
};
@@ -520,8 +551,7 @@ export const secretServiceFactory = ({
if (includeImports) {
const secretImports = await secretImportDAL.findByFolderIds(paths.map((p) => p.folderId));
const allowedImports = secretImports.filter(({ importEnv, importPath, isReplication }) =>
!isReplication &&
const allowedImports = secretImports.filter(({ importEnv, importPath }) =>
// if its service token allow full access over imported one
actor === ActorType.SERVICE
? true
@@ -626,7 +656,7 @@ export const secretServiceFactory = ({
// then search for imported secrets
// here we consider the import order also thus starting from bottom
if (!secret && includeImports) {
const secretImports = await secretImportDAL.find({ folderId, isReplication: false });
const secretImports = await secretImportDAL.find({ folderId });
const allowedImports = secretImports.filter(({ importEnv, importPath }) =>
// if its service token allow full access over imported one
actor === ActorType.SERVICE
@@ -737,13 +767,7 @@ export const secretServiceFactory = ({
);
await snapshotService.performSnapshot(folderId);
await secretQueueService.syncSecrets({
actor,
actorId,
secretPath: path,
projectId,
environmentSlug: folder.environment.slug
});
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
return newSecrets;
};
@@ -843,13 +867,7 @@ export const secretServiceFactory = ({
);
await snapshotService.performSnapshot(folderId);
await secretQueueService.syncSecrets({
actor,
actorId,
secretPath: path,
projectId,
environmentSlug: folder.environment.slug
});
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
return secrets;
};
@@ -899,8 +917,6 @@ export const secretServiceFactory = ({
const secretsDeleted = await secretDAL.transaction(async (tx) =>
fnSecretBulkDelete({
secretDAL,
secretQueueService,
inputSecrets: inputSecrets.map(({ type, secretName }) => ({
secretBlindIndex: keyName2BlindIndex[secretName],
type
@@ -913,13 +929,7 @@ export const secretServiceFactory = ({
);
await snapshotService.performSnapshot(folderId);
await secretQueueService.syncSecrets({
actor,
actorId,
secretPath: path,
projectId,
environmentSlug: folder.environment.slug
});
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
return secretsDeleted;
};
@@ -952,49 +962,15 @@ export const secretServiceFactory = ({
});
const decryptedSecrets = secrets.map((el) => decryptSecretRaw(el, botKey));
const processedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => {
const decryptedImportSecrets = importedSecrets.map((sec) =>
const decryptedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => ({
...el,
secrets: importedSecrets.map((sec) =>
decryptSecretRaw(
{ ...sec, environment: el.environment, workspace: projectId, secretPath: el.secretPath },
botKey
)
);
// secret-override to handle duplicate keys from different import levels
// this prioritizes secret values from direct imports
const importedKeys = new Set<string>();
const importedEntries = decryptedImportSecrets.reduce(
(
accum: {
secretKey: string;
secretPath: string;
workspace: string;
environment: string;
secretValue: string;
secretComment: string;
version: number;
type: string;
_id: string;
id: string;
user: string | null | undefined;
skipMultilineEncoding: boolean | null | undefined;
}[],
sec
) => {
if (!importedKeys.has(sec.secretKey)) {
importedKeys.add(sec.secretKey);
return [...accum, sec];
}
return accum;
},
[]
);
return {
...el,
secrets: importedEntries
};
});
)
}));
if (expandSecretReferences) {
const expandSecrets = interpolateSecrets({
@@ -1005,24 +981,10 @@ export const secretServiceFactory = ({
});
const batchSecretsExpand = async (
secretBatch: {
secretKey: string;
secretValue: string;
secretComment?: string;
secretPath: string;
skipMultilineEncoding: boolean | null | undefined;
}[]
secretBatch: { secretKey: string; secretValue: string; secretComment?: string; secretPath: string }[]
) => {
// Group secrets by secretPath
const secretsByPath: Record<
string,
{
secretKey: string;
secretValue: string;
secretComment?: string;
skipMultilineEncoding: boolean | null | undefined;
}[]
> = {};
const secretsByPath: Record<string, { secretKey: string; secretValue: string; secretComment?: string }[]> = {};
secretBatch.forEach((secret) => {
if (!secretsByPath[secret.secretPath]) {
@@ -1038,15 +1000,11 @@ export const secretServiceFactory = ({
continue;
}
const secretRecord: Record<
string,
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
> = {};
const secretRecord: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }> = {};
secretsByPath[secPath].forEach((decryptedSecret) => {
secretRecord[decryptedSecret.secretKey] = {
value: decryptedSecret.secretValue,
comment: decryptedSecret.secretComment,
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
comment: decryptedSecret.secretComment
};
});
@@ -1063,12 +1021,12 @@ export const secretServiceFactory = ({
await batchSecretsExpand(decryptedSecrets);
// expand imports by batch
await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets)));
await Promise.all(decryptedImports.map((decryptedImport) => batchSecretsExpand(decryptedImport.secrets)));
}
return {
secrets: decryptedSecrets,
imports: processedImports
imports: decryptedImports
};
};
@@ -1151,6 +1109,9 @@ export const secretServiceFactory = ({
skipMultilineEncoding
});
await snapshotService.performSnapshot(secret.folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return decryptSecretRaw(secret, botKey);
};
@@ -1189,6 +1150,8 @@ export const secretServiceFactory = ({
});
await snapshotService.performSnapshot(secret.folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return decryptSecretRaw(secret, botKey);
};
@@ -1218,6 +1181,9 @@ export const secretServiceFactory = ({
actorAuthMethod
});
await snapshotService.performSnapshot(secret.folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return decryptSecretRaw(secret, botKey);
};
@@ -1266,6 +1232,9 @@ export const secretServiceFactory = ({
})
});
await snapshotService.performSnapshot(secrets[0].folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return secrets.map((secret) =>
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
);
@@ -1317,6 +1286,9 @@ export const secretServiceFactory = ({
})
});
await snapshotService.performSnapshot(secrets[0].folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return secrets.map((secret) =>
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
);
@@ -1350,6 +1322,9 @@ export const secretServiceFactory = ({
secrets: inputSecrets.map(({ secretKey }) => ({ secretName: secretKey, type: SecretType.Shared }))
});
await snapshotService.performSnapshot(secrets[0].folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return secrets.map((secret) =>
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
);
@@ -1473,12 +1448,7 @@ export const secretServiceFactory = ({
);
await snapshotService.performSnapshot(folder.id);
await secretQueueService.syncSecrets({
secretPath,
projectId: project.id,
environmentSlug: environment,
excludeReplication: true
});
await secretQueueService.syncSecrets({ secretPath, projectId: project.id, environment });
return {
...updatedSecret[0],
@@ -1580,12 +1550,7 @@ export const secretServiceFactory = ({
);
await snapshotService.performSnapshot(folder.id);
await secretQueueService.syncSecrets({
secretPath,
projectId: project.id,
environmentSlug: environment,
excludeReplication: true
});
await secretQueueService.syncSecrets({ secretPath, projectId: project.id, environment });
return {
...updatedSecret[0],
@@ -1659,6 +1624,12 @@ export const secretServiceFactory = ({
updateManySecretsRaw,
deleteManySecretsRaw,
getSecretVersions,
backfillSecretReferences
backfillSecretReferences,
// external services function
fnSecretBulkDelete,
fnSecretBulkUpdate,
fnSecretBlindIndexCheck,
fnSecretBulkInsert,
fnSecretBlindIndexCheckV2
};
};

View File

@@ -11,8 +11,6 @@ import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/se
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { ActorType } from "../auth/auth-type";
type TPartialSecret = Pick<TSecrets, "id" | "secretReminderRepeatDays" | "secretReminderNote">;
type TPartialInputSecret = Pick<TSecrets, "type" | "secretReminderNote" | "secretReminderRepeatDays" | "id">;
@@ -266,10 +264,6 @@ export type TFnSecretBulkDelete = {
inputSecrets: Array<{ type: SecretType; secretBlindIndex: string }>;
actorId: string;
tx?: Knex;
secretDAL: Pick<TSecretDALFactory, "deleteMany">;
secretQueueService: {
removeSecretReminder: (data: TRemoveSecretReminderDTO) => Promise<void>;
};
};
export type TFnSecretBlindIndexCheck = {
@@ -283,7 +277,6 @@ export type TFnSecretBlindIndexCheck = {
// when blind index is already present
export type TFnSecretBlindIndexCheckV2 = {
secretDAL: Pick<TSecretDALFactory, "findByBlindIndexes">;
folderId: string;
userId?: string;
inputSecrets: Array<{ secretBlindIndex: string; type?: SecretType }>;
@@ -370,27 +363,3 @@ export type TUpdateManySecretsRawFn = {
}[];
userId?: string;
};
export enum SecretOperations {
Create = "create",
Update = "update",
Delete = "delete"
}
export type TSyncSecretsDTO<T extends boolean = false> = {
_deDupeQueue?: Record<string, boolean>;
_deDupeReplicationQueue?: Record<string, boolean>;
_depth?: number;
secretPath: string;
projectId: string;
environmentSlug: string;
// cases for just doing sync integration and webhook
excludeReplication?: T;
} & (T extends true
? object
: {
actor: ActorType;
actorId: string;
// used for import creation to trigger replication
pickOnlyImportIds?: string[];
});

View File

@@ -89,7 +89,6 @@ export const secretVersionDALFactory = (db: TDbClient) => {
const findLatestVersionMany = async (folderId: string, secretIds: string[], tx?: Knex) => {
try {
if (!secretIds.length) return {};
const docs: Array<TSecretVersions & { max: number }> = await (tx || db)(TableName.SecretVersion)
.where("folderId", folderId)
.whereIn(`${TableName.SecretVersion}.secretId`, secretIds)

View File

@@ -41,8 +41,21 @@ export enum SmtpHost {
Office365 = "smtp.office365.com"
}
export const getTlsOption = (host?: SmtpHost | string, secure?: boolean) => {
if (!secure) return { secure: false };
if (!host) return { secure: true };
if ((host as SmtpHost) === SmtpHost.Sendgrid) {
return { secure: true, port: 465 }; // more details here https://nodemailer.com/smtp/
}
if (host.includes("amazonaws.com")) {
return { tls: { ciphers: "TLSv1.2" } };
}
return { requireTLS: true, tls: { ciphers: "TLSv1.2" } };
};
export const smtpServiceFactory = (cfg: TSmtpConfig) => {
const smtp = createTransport(cfg);
const smtp = createTransport({ ...cfg, ...getTlsOption(cfg.host, cfg.secure) });
const isSmtpOn = Boolean(cfg.host);
const sendMail = async ({ substitutions, recipients, template, subjectLine }: TSmtpSendMail) => {

View File

@@ -21,7 +21,6 @@ type TUserServiceFactoryDep = {
| "findOneUserAction"
| "createUserAction"
| "findUserEncKeyByUserId"
| "delete"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "find" | "insertMany">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "insertMany">;
@@ -86,7 +85,7 @@ export const userServiceFactory = ({
tx
);
// check if there are verified users with the same email.
// check if there are users with the same email.
const users = await userDAL.find(
{
email,
@@ -135,15 +134,6 @@ export const userServiceFactory = ({
);
}
} else {
await userDAL.delete(
{
email,
isAccepted: false,
isEmailVerified: false
},
tx
);
// update current user's username to [email]
await userDAL.updateById(
user.id,

1
cli/.gitignore vendored
View File

@@ -1,4 +1,3 @@
.infisical.json
dist/
agent-config.test.yaml
.test.env

View File

@@ -3,9 +3,7 @@ module github.com/Infisical/infisical-merge
go 1.21
require (
github.com/bradleyjkemp/cupaloy/v2 v2.8.0
github.com/charmbracelet/lipgloss v0.5.0
github.com/creack/pty v1.1.21
github.com/denisbrodbeck/machineid v1.0.1
github.com/fatih/semgroup v1.2.0
github.com/gitleaks/go-gitdiff v0.8.0
@@ -31,6 +29,7 @@ require (
require (
github.com/alessio/shellescape v1.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 // indirect
github.com/chzyer/readline v1.5.1 // indirect
github.com/danieljoos/wincred v1.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect

View File

@@ -74,8 +74,6 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0=
github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=

View File

@@ -4,8 +4,6 @@ import (
"fmt"
"net/http"
"os"
"github.com/Infisical/infisical-merge/packages/config"
)
func GetHomeDir() (string, error) {
@@ -23,7 +21,7 @@ func WriteToFile(fileName string, dataToWrite []byte, filePerm os.FileMode) erro
return nil
}
func ValidateInfisicalAPIConnection() (ok bool) {
_, err := http.Get(fmt.Sprintf("%v/status", config.INFISICAL_URL))
func CheckIsConnectedToInternet() (ok bool) {
_, err := http.Get("http://clients3.google.com/generate_204")
return err == nil
}

View File

@@ -307,33 +307,32 @@ func FilterSecretsByTag(plainTextSecrets []models.SingleEnvironmentVariable, tag
}
func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectConfigFilePath string) ([]models.SingleEnvironmentVariable, error) {
isConnected := CheckIsConnectedToInternet()
var secretsToReturn []models.SingleEnvironmentVariable
// var serviceTokenDetails api.GetServiceTokenDetailsResponse
var errorToReturn error
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
if projectConfigFilePath == "" {
RequireLocalWorkspaceFile()
} else {
ValidateWorkspaceFile(projectConfigFilePath)
}
if isConnected {
log.Debug().Msg("GetAllEnvironmentVariables: Connected to internet, checking logged in creds")
RequireLogin()
if projectConfigFilePath == "" {
RequireLocalWorkspaceFile()
} else {
ValidateWorkspaceFile(projectConfigFilePath)
}
RequireLogin()
}
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
isConnected := ValidateInfisicalAPIConnection()
if isConnected {
log.Debug().Msg("GetAllEnvironmentVariables: Connected to Infisical instance, checking logged in creds")
}
if err != nil {
return nil, err
}
if isConnected && loggedInUserDetails.LoginExpired {
if loggedInUserDetails.LoginExpired {
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
}
@@ -365,12 +364,12 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
backupSecretsEncryptionKey := []byte(loggedInUserDetails.UserCredentials.PrivateKey)[0:32]
if errorToReturn == nil {
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupSecretsEncryptionKey, secretsToReturn)
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, backupSecretsEncryptionKey, secretsToReturn)
}
// only attempt to serve cached secrets if no internet connection and if at least one secret cached
if !isConnected {
backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupSecretsEncryptionKey)
backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, backupSecretsEncryptionKey)
if len(backedSecrets) > 0 {
PrintWarning("Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug")
secretsToReturn = backedSecrets
@@ -635,9 +634,8 @@ func GetPlainTextSecrets(key []byte, encryptedSecrets []api.EncryptedSecretV3) (
return plainTextSecrets, nil
}
func WriteBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error {
formattedPath := strings.ReplaceAll(secretsPath, "/", "-")
fileName := fmt.Sprintf("secrets_%s_%s_%s", workspace, environment, formattedPath)
func WriteBackupSecrets(workspace string, environment string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error {
fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment)
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
@@ -674,9 +672,8 @@ func WriteBackupSecrets(workspace string, environment string, secretsPath string
return nil
}
func ReadBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) {
formattedPath := strings.ReplaceAll(secretsPath, "/", "-")
fileName := fmt.Sprintf("secrets_%s_%s_%s", workspace, environment, formattedPath)
func ReadBackupSecrets(workspace string, environment string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) {
fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment)
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()

View File

@@ -1,23 +0,0 @@
#!/bin/bash
TEST_ENV_FILE=".test.env"
# Check if the .env file exists
if [ ! -f "$TEST_ENV_FILE" ]; then
echo "$TEST_ENV_FILE does not exist."
exit 1
fi
# Export the variables
while IFS= read -r line
do
# Skip empty lines and lines starting with #
if [[ -z "$line" || "$line" =~ ^\# ]]; then
continue
fi
# Read the key-value pair
IFS='=' read -r key value <<< "$line"
eval export $key=\$value
done < "$TEST_ENV_FILE"
echo "Test environment variables set."

View File

@@ -1,7 +0,0 @@
┌───────────────┬──────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├───────────────┼──────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ TEST-SECRET-3 │ test-value-3 │ shared │
└───────────────┴──────────────┴─────────────┘

View File

@@ -1,8 +0,0 @@
Warning: Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug
┌───────────────┬──────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├───────────────┼──────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ TEST-SECRET-3 │ test-value-3 │ shared │
└───────────────┴──────────────┴─────────────┘

View File

@@ -8,6 +8,7 @@ import (
func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent")
@@ -23,6 +24,8 @@ func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) {
}
func TestServiceToken_ExportSecretsWithImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent")
if err != nil {
@@ -38,6 +41,8 @@ func TestServiceToken_ExportSecretsWithImports(t *testing.T) {
func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false")
if err != nil {
@@ -52,6 +57,8 @@ func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) {
}
func TestServiceToken_ExportSecretsWithoutImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false")
if err != nil {

View File

@@ -2,10 +2,10 @@ package tests
import (
"fmt"
"log"
"os"
"os/exec"
"strings"
"testing"
)
const (
@@ -23,8 +23,6 @@ type Credentials struct {
ServiceToken string
ProjectID string
EnvSlug string
UserEmail string
UserPassword string
}
var creds = Credentials{
@@ -34,21 +32,18 @@ var creds = Credentials{
ServiceToken: os.Getenv("CLI_TESTS_SERVICE_TOKEN"),
ProjectID: os.Getenv("CLI_TESTS_PROJECT_ID"),
EnvSlug: os.Getenv("CLI_TESTS_ENV_SLUG"),
UserEmail: os.Getenv("CLI_TESTS_USER_EMAIL"),
UserPassword: os.Getenv("CLI_TESTS_USER_PASSWORD"),
}
func ExecuteCliCommand(command string, args ...string) (string, error) {
cmd := exec.Command(command, args...)
output, err := cmd.CombinedOutput()
if err != nil {
fmt.Println(fmt.Sprint(err) + ": " + string(output))
return strings.TrimSpace(string(output)), err
}
return strings.TrimSpace(string(output)), nil
}
func SetupCli() {
func SetupCli(t *testing.T) {
if creds.ClientID == "" || creds.ClientSecret == "" || creds.ServiceToken == "" || creds.ProjectID == "" || creds.EnvSlug == "" {
panic("Missing required environment variables")
@@ -62,7 +57,7 @@ func SetupCli() {
if !alreadyBuilt {
if err := exec.Command("go", "build", "../.").Run(); err != nil {
log.Fatal(err)
t.Fatal(err)
}
}

View File

@@ -1,124 +1,14 @@
package tests
import (
"log"
"os/exec"
"strings"
"testing"
"github.com/creack/pty"
"github.com/stretchr/testify/assert"
)
func UserInitCmd() {
c := exec.Command(FORMATTED_CLI_NAME, "init")
ptmx, err := pty.Start(c)
if err != nil {
log.Fatalf("error running CLI command: %v", err)
}
defer func() { _ = ptmx.Close() }()
stepChan := make(chan int, 10)
go func() {
buf := make([]byte, 1024)
step := -1
for {
n, err := ptmx.Read(buf)
if n > 0 {
terminalOut := string(buf)
if strings.Contains(terminalOut, "Which Infisical organization would you like to select a project from?") && step < 0 {
step += 1
stepChan <- step
} else if strings.Contains(terminalOut, "Which of your Infisical projects would you like to connect this project to?") && step < 1 {
step += 1;
stepChan <- step
}
}
if err != nil {
close(stepChan)
return
}
}
}()
for i := range stepChan {
switch i {
case 0:
ptmx.Write([]byte("\n"))
case 1:
ptmx.Write([]byte("\n"))
}
}
}
func UserLoginCmd() {
// set vault to file because CI has no keyring
vaultCmd := exec.Command(FORMATTED_CLI_NAME, "vault", "set", "file")
_, err := vaultCmd.Output()
if err != nil {
log.Fatalf("error setting vault: %v", err)
}
// Start programmatic interaction with CLI
c := exec.Command(FORMATTED_CLI_NAME, "login", "--interactive")
ptmx, err := pty.Start(c)
if err != nil {
log.Fatalf("error running CLI command: %v", err)
}
defer func() { _ = ptmx.Close() }()
stepChan := make(chan int, 10)
go func() {
buf := make([]byte, 1024)
step := -1
for {
n, err := ptmx.Read(buf)
if n > 0 {
terminalOut := string(buf)
if strings.Contains(terminalOut, "Infisical Cloud") && step < 0 {
step += 1;
stepChan <- step
} else if strings.Contains(terminalOut, "Email") && step < 1 {
step += 1;
stepChan <- step
} else if strings.Contains(terminalOut, "Password") && step < 2 {
step += 1;
stepChan <- step
} else if strings.Contains(terminalOut, "Infisical organization") && step < 3 {
step += 1;
stepChan <- step
} else if strings.Contains(terminalOut, "Enter passphrase") && step < 4 {
step += 1;
stepChan <- step
}
}
if err != nil {
close(stepChan)
return
}
}
}()
for i := range stepChan {
switch i {
case 0:
ptmx.Write([]byte("\n"))
case 1:
ptmx.Write([]byte(creds.UserEmail))
ptmx.Write([]byte("\n"))
case 2:
ptmx.Write([]byte(creds.UserPassword))
ptmx.Write([]byte("\n"))
case 3:
ptmx.Write([]byte("\n"))
}
}
}
func MachineIdentityLoginCmd(t *testing.T) {
SetupCli(t)
if creds.UAAccessToken != "" {
return
}

View File

@@ -1,23 +0,0 @@
package tests
import (
"fmt"
"os"
"testing"
)
func TestMain(m *testing.M) {
// Setup
fmt.Println("Setting up CLI...")
SetupCli()
fmt.Println("Performing user login...")
UserLoginCmd()
fmt.Println("Performing infisical init...")
UserInitCmd()
// Run the tests
code := m.Run()
// Exit
os.Exit(code)
}

Some files were not shown because too many files have changed in this diff Show More