mirror of
https://github.com/Infisical/infisical.git
synced 2025-09-04 07:35:30 +00:00
Compare commits
87 Commits
misc/add-o
...
misc/remov
Author | SHA1 | Date | |
---|---|---|---|
|
98371f99e7 | ||
|
ddfc645cdd | ||
|
f4d9c61404 | ||
|
5342c85696 | ||
|
b05f3e0f1f | ||
|
9a2645b511 | ||
|
cb664bb042 | ||
|
07db1d826b | ||
|
74db1b75b4 | ||
|
d7023881e5 | ||
|
b74595cf35 | ||
|
a45453629c | ||
|
f7626d03bf | ||
|
bc14153bb3 | ||
|
935a3cb036 | ||
|
148a29db19 | ||
|
b12de3e4f5 | ||
|
661e5ec462 | ||
|
5cca51d711 | ||
|
9e9b9a7b94 | ||
|
df1ffcf934 | ||
|
0ef7eacd0e | ||
|
776822d7d5 | ||
|
fe9af20d8c | ||
|
398a8f363d | ||
|
ce5dbca6e2 | ||
|
ed5a7d72ab | ||
|
3ac6b7be65 | ||
|
10601b5afd | ||
|
8eec08356b | ||
|
0b4d4c008a | ||
|
ae953add3d | ||
|
5960a899ba | ||
|
ea98a0096d | ||
|
b8f65fc91a | ||
|
06a4e68ac1 | ||
|
9cbf9a675a | ||
|
178ddf1fb9 | ||
|
030d4fe152 | ||
|
46abda9041 | ||
|
c976a5ccba | ||
|
1eb9ea9c74 | ||
|
7d7612aaf4 | ||
|
f570b3b2ee | ||
|
0b8f6878fe | ||
|
758a9211ab | ||
|
0bb2b2887b | ||
|
eeb0111bbe | ||
|
d12c538511 | ||
|
6f67346b2a | ||
|
a93db44bbd | ||
|
1ddacfda62 | ||
|
5a1e43be44 | ||
|
04f54479cd | ||
|
351d0d0662 | ||
|
5a01edae7a | ||
|
506e86d666 | ||
|
11d9166684 | ||
|
1859557f90 | ||
|
59fc34412d | ||
|
1b2a1f2339 | ||
|
15b4c397ab | ||
|
fc27ad4575 | ||
|
b7467a83ab | ||
|
3baf434230 | ||
|
b2d6563994 | ||
|
cfba8f53e3 | ||
|
3537a5eb9b | ||
|
d5b17a8f24 | ||
|
d6881e2e68 | ||
|
92a663a17d | ||
|
b3463e0d0f | ||
|
c460f22665 | ||
|
db39d03713 | ||
|
9daa5badec | ||
|
e1ed37c713 | ||
|
8eea82a1a0 | ||
|
694d0e3ed3 | ||
|
58f6c6b409 | ||
|
98a15a901e | ||
|
1c2698f533 | ||
|
5d59fe8810 | ||
|
90eed8d39b | ||
|
f5974ce9ad | ||
|
c6b51af4b1 | ||
|
c13c37fc77 | ||
|
259c01c110 |
@@ -105,6 +105,13 @@ jobs:
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
|
25
.github/workflows/check-migration-file-edited.yml
vendored
Normal file
25
.github/workflows/check-migration-file-edited.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: Check migration file edited
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check any migration files are modified, renamed or duplicated.
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
|
||||
if [ -s edited_files.txt ]; then
|
||||
echo "Exiting migration files cannot be modified."
|
||||
cat edited_files.txt
|
||||
exit 1
|
||||
fi
|
@@ -19,18 +19,16 @@ jobs:
|
||||
|
||||
- name: Get list of newly added files in migration folder
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' || true | cut -f2 | xargs -r -n1 basename > added_files.txt
|
||||
if [ ! -s added_files.txt ]; then
|
||||
echo "No new files added. Skipping"
|
||||
echo "SKIP_RENAME=true" >> $GITHUB_ENV
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: Script to rename migrations
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: python .github/resources/rename_migration_files.py
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
|
@@ -5,3 +5,4 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/M
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
|
@@ -3,7 +3,6 @@ import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import jwt from "jsonwebtoken";
|
||||
import knex from "knex";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
@@ -15,6 +14,7 @@ import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@@ -23,23 +23,21 @@ export default {
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: cfg.DB_CONNECTION_URI,
|
||||
migrations: {
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
seeds: {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
}
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
try {
|
||||
await db.migrate.latest();
|
||||
await db.seed.run();
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
@@ -74,7 +72,14 @@ export default {
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback({}, true);
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
1571
backend/package-lock.json
generated
1571
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -72,6 +72,7 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
@@ -133,6 +134,7 @@
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
|
@@ -2,13 +2,14 @@
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
import slugify from "@sindresorhus/slugify"
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for migration: ");
|
||||
|
||||
// Remove spaces from migration name and replace with hyphens
|
||||
const formattedMigrationName = migrationName.replace(/\s+/g, "-");
|
||||
const formattedMigrationName = slugify(migrationName);
|
||||
|
||||
execSync(
|
||||
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
|
||||
|
239
backend/src/@types/knex.d.ts
vendored
239
backend/src/@types/knex.d.ts
vendored
@@ -1,4 +1,4 @@
|
||||
import { Knex } from "knex";
|
||||
import { Knex as KnexOriginal } from "knex";
|
||||
|
||||
import {
|
||||
TableName,
|
||||
@@ -280,318 +280,371 @@ import {
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
interface QueryInterface {
|
||||
primaryNode(): KnexOriginal;
|
||||
replicaNode(): KnexOriginal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.CertificateAuthority]: Knex.CompositeTableType<
|
||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCert]: Knex.CompositeTableType<
|
||||
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthoritySecret]: Knex.CompositeTableType<
|
||||
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCrl]: Knex.CompositeTableType<
|
||||
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: Knex.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateBody]: Knex.CompositeTableType<
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateSecret]: Knex.CompositeTableType<
|
||||
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: Knex.CompositeTableType<
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
|
||||
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
|
||||
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate
|
||||
>;
|
||||
[TableName.AuthTokens]: Knex.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: Knex.CompositeTableType<
|
||||
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate
|
||||
>;
|
||||
[TableName.BackupPrivateKey]: Knex.CompositeTableType<
|
||||
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate
|
||||
>;
|
||||
[TableName.Organization]: Knex.CompositeTableType<TOrganizations, TOrganizationsInsert, TOrganizationsUpdate>;
|
||||
[TableName.OrgMembership]: Knex.CompositeTableType<TOrgMemberships, TOrgMembershipsInsert, TOrgMembershipsUpdate>;
|
||||
[TableName.OrgRoles]: Knex.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: Knex.CompositeTableType<
|
||||
[TableName.Organization]: KnexOriginal.CompositeTableType<
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate
|
||||
>;
|
||||
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate
|
||||
>;
|
||||
[TableName.UserAction]: Knex.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: Knex.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: Knex.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: Knex.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: Knex.CompositeTableType<
|
||||
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.Environment]: Knex.CompositeTableType<
|
||||
[TableName.Environment]: KnexOriginal.CompositeTableType<
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
|
||||
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType<
|
||||
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: Knex.CompositeTableType<
|
||||
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
|
||||
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate
|
||||
>;
|
||||
[TableName.SecretVersion]: Knex.CompositeTableType<TSecretVersions, TSecretVersionsInsert, TSecretVersionsUpdate>;
|
||||
[TableName.SecretFolder]: Knex.CompositeTableType<TSecretFolders, TSecretFoldersInsert, TSecretFoldersUpdate>;
|
||||
[TableName.SecretFolderVersion]: Knex.CompositeTableType<
|
||||
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate
|
||||
>;
|
||||
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
|
||||
[TableName.RateLimit]: Knex.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
|
||||
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: Knex.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: Knex.CompositeTableType<TServiceTokens, TServiceTokensInsert, TServiceTokensUpdate>;
|
||||
[TableName.IntegrationAuth]: Knex.CompositeTableType<
|
||||
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate
|
||||
>;
|
||||
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate
|
||||
>;
|
||||
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate
|
||||
>;
|
||||
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate
|
||||
>;
|
||||
[TableName.Identity]: Knex.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityUniversalAuth]: Knex.CompositeTableType<
|
||||
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: Knex.CompositeTableType<
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: Knex.CompositeTableType<
|
||||
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: Knex.CompositeTableType<
|
||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAzureAuth]: Knex.CompositeTableType<
|
||||
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate
|
||||
>;
|
||||
[TableName.IdentityAccessToken]: Knex.CompositeTableType<
|
||||
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate
|
||||
>;
|
||||
[TableName.IdentityOrgMembership]: Knex.CompositeTableType<
|
||||
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembership]: Knex.CompositeTableType<
|
||||
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
|
||||
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType<
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: Knex.CompositeTableType<
|
||||
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyApprover]: Knex.CompositeTableType<
|
||||
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: Knex.CompositeTableType<
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequestReviewer]: Knex.CompositeTableType<
|
||||
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
|
||||
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
|
||||
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyApprover]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestReviewer]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecret]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTag]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate
|
||||
>;
|
||||
[TableName.SecretRotation]: Knex.CompositeTableType<
|
||||
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate
|
||||
>;
|
||||
[TableName.SecretRotationOutput]: Knex.CompositeTableType<
|
||||
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate
|
||||
>;
|
||||
[TableName.Snapshot]: Knex.CompositeTableType<TSecretSnapshots, TSecretSnapshotsInsert, TSecretSnapshotsUpdate>;
|
||||
[TableName.SnapshotSecret]: Knex.CompositeTableType<
|
||||
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate
|
||||
>;
|
||||
[TableName.SnapshotFolder]: Knex.CompositeTableType<
|
||||
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.DynamicSecret]: Knex.CompositeTableType<TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate>;
|
||||
[TableName.DynamicSecretLease]: Knex.CompositeTableType<
|
||||
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate
|
||||
>;
|
||||
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: Knex.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: Knex.CompositeTableType<
|
||||
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate
|
||||
>;
|
||||
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
|
||||
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate
|
||||
>;
|
||||
[TableName.GitAppOrg]: Knex.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: Knex.CompositeTableType<
|
||||
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate
|
||||
>;
|
||||
[TableName.TrustedIps]: Knex.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
// Junction tables
|
||||
[TableName.JnSecretTag]: Knex.CompositeTableType<
|
||||
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionTag]: Knex.CompositeTableType<
|
||||
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
|
||||
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
|
||||
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@@ -1,8 +1,38 @@
|
||||
import knex from "knex";
|
||||
import knex, { Knex } from "knex";
|
||||
|
||||
export type TDbClient = ReturnType<typeof initDbConnection>;
|
||||
export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnectionUri: string; dbRootCert?: string }) => {
|
||||
const db = knex({
|
||||
export const initDbConnection = ({
|
||||
dbConnectionUri,
|
||||
dbRootCert,
|
||||
readReplicas = []
|
||||
}: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
readReplicas?: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
}[];
|
||||
}) => {
|
||||
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
|
||||
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
|
||||
// eslint-disable-next-line
|
||||
let db: Knex<any, unknown[]>;
|
||||
// eslint-disable-next-line
|
||||
let readReplicaDbs: Knex<any, unknown[]>[];
|
||||
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
|
||||
knex.QueryBuilder.extend("primaryNode", () => {
|
||||
return db;
|
||||
});
|
||||
|
||||
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
|
||||
knex.QueryBuilder.extend("replicaNode", () => {
|
||||
if (!readReplicaDbs.length) return db;
|
||||
|
||||
const selectedReplica = readReplicaDbs[Math.floor(Math.random() * readReplicaDbs.length)];
|
||||
return selectedReplica;
|
||||
});
|
||||
|
||||
db = knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: dbConnectionUri,
|
||||
@@ -22,5 +52,21 @@ export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnection
|
||||
}
|
||||
});
|
||||
|
||||
readReplicaDbs = readReplicas.map((el) => {
|
||||
const replicaDbCertificate = el.dbRootCert || dbRootCert;
|
||||
return knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: el.dbConnectionUri,
|
||||
ssl: replicaDbCertificate
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return db;
|
||||
};
|
||||
|
@@ -0,0 +1,35 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
|
||||
TableName.IntegrationAuth,
|
||||
"awsAssumeIamRoleArnCipherText"
|
||||
);
|
||||
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
|
||||
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
|
||||
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
|
||||
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
|
||||
if (!hasAwsAssumeRoleCipherText) t.text("awsAssumeIamRoleArnCipherText");
|
||||
if (!hasAwsAssumeRoleIV) t.text("awsAssumeIamRoleArnIV");
|
||||
if (!hasAwsAssumeRoleTag) t.text("awsAssumeIamRoleArnTag");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
|
||||
TableName.IntegrationAuth,
|
||||
"awsAssumeIamRoleArnCipherText"
|
||||
);
|
||||
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
|
||||
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
|
||||
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
|
||||
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
|
||||
if (hasAwsAssumeRoleCipherText) t.dropColumn("awsAssumeIamRoleArnCipherText");
|
||||
if (hasAwsAssumeRoleIV) t.dropColumn("awsAssumeIamRoleArnIV");
|
||||
if (hasAwsAssumeRoleTag) t.dropColumn("awsAssumeIamRoleArnTag");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
tb.specificType("enabledLoginMethods", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("enabledLoginMethods");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites"))) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (tb) => {
|
||||
tb.specificType("projectFavorites", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites")) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
t.dropColumn("projectFavorites");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,53 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { WebhookType } from "@app/services/webhook/webhook-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
|
||||
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
|
||||
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
|
||||
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Webhook)) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (tb) => {
|
||||
if (!hasUrlCipherText) {
|
||||
tb.text("urlCipherText");
|
||||
}
|
||||
if (!hasUrlIV) {
|
||||
tb.string("urlIV");
|
||||
}
|
||||
if (!hasUrlTag) {
|
||||
tb.string("urlTag");
|
||||
}
|
||||
if (!hasType) {
|
||||
tb.string("type").defaultTo(WebhookType.GENERAL);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
|
||||
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
|
||||
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
|
||||
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Webhook)) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasUrlCipherText) {
|
||||
t.dropColumn("urlCipherText");
|
||||
}
|
||||
if (hasUrlIV) {
|
||||
t.dropColumn("urlIV");
|
||||
}
|
||||
if (hasUrlTag) {
|
||||
t.dropColumn("urlTag");
|
||||
}
|
||||
if (hasType) {
|
||||
t.dropColumn("type");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -29,7 +29,10 @@ export const IntegrationAuthsSchema = z.object({
|
||||
keyEncoding: z.string(),
|
||||
projectId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
awsAssumeIamRoleArnCipherText: z.string().nullable().optional(),
|
||||
awsAssumeIamRoleArnIV: z.string().nullable().optional(),
|
||||
awsAssumeIamRoleArnTag: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;
|
||||
|
@@ -16,7 +16,8 @@ export const OrgMembershipsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
roleId: z.string().uuid().nullable().optional()
|
||||
roleId: z.string().uuid().nullable().optional(),
|
||||
projectFavorites: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
|
||||
|
@@ -18,7 +18,8 @@ export const SuperAdminSchema = z.object({
|
||||
trustSamlEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustOidcEmails: z.boolean().default(false).nullable().optional(),
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional()
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@@ -21,7 +21,11 @@ export const WebhooksSchema = z.object({
|
||||
keyEncoding: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid()
|
||||
envId: z.string().uuid(),
|
||||
urlCipherText: z.string().nullable().optional(),
|
||||
urlIV: z.string().nullable().optional(),
|
||||
urlTag: z.string().nullable().optional(),
|
||||
type: z.string().default("general").nullable().optional()
|
||||
});
|
||||
|
||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||
|
@@ -32,7 +32,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db, {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
@@ -54,7 +54,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db, filter);
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
docs,
|
||||
"id",
|
||||
|
@@ -14,7 +14,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findRequestsWithPrivilegeByPolicyIds = async (policyIds: string[]) => {
|
||||
try {
|
||||
const docs = await db(TableName.AccessApprovalRequest)
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.AccessApprovalRequest)
|
||||
.whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
|
||||
|
||||
.leftJoin(
|
||||
@@ -170,7 +171,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db);
|
||||
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
|
||||
const docs = await sql;
|
||||
const formatedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
@@ -207,7 +208,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getCount = async ({ projectId }: { projectId: string }) => {
|
||||
try {
|
||||
const accessRequests = await db(TableName.AccessApprovalRequest)
|
||||
const accessRequests = await db
|
||||
.replicaNode()(TableName.AccessApprovalRequest)
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicy,
|
||||
`${TableName.AccessApprovalRequest}.policyId`,
|
||||
|
@@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@@ -27,7 +28,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const sqlQuery = (tx || db)(TableName.AuditLog)
|
||||
const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog)
|
||||
.where(
|
||||
stripUndefinedInWhere({
|
||||
projectId,
|
||||
@@ -55,13 +56,34 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
|
||||
// delete all audit log that have expired
|
||||
const pruneAuditLog = async (tx?: Knex) => {
|
||||
try {
|
||||
const today = new Date();
|
||||
const docs = await (tx || db)(TableName.AuditLog).where("expiresAt", "<", today).del();
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "PruneAuditLog" });
|
||||
}
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
.where("expiresAt", "<", today)
|
||||
.select("id")
|
||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
|
||||
.whereIn("id", findExpiredLogSubQuery)
|
||||
.del()
|
||||
.returning("id");
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 100); // time to breathe for db
|
||||
});
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
}
|
||||
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@@ -771,7 +771,6 @@ interface CreateWebhookEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
@@ -782,7 +781,6 @@ interface UpdateWebhookStatusEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
@@ -793,7 +791,6 @@ interface DeleteWebhookEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
|
@@ -12,7 +12,10 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
|
||||
const countLeasesForDynamicSecret = async (dynamicSecretId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.DynamicSecretLease).count("*").where({ dynamicSecretId }).first();
|
||||
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
|
||||
.count("*")
|
||||
.where({ dynamicSecretId })
|
||||
.first();
|
||||
return parseInt(doc || "0", 10);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "DynamicSecretCountLeases" });
|
||||
@@ -21,7 +24,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.DynamicSecretLease)
|
||||
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
|
||||
.where({ [`${TableName.DynamicSecretLease}.id` as "id"]: id })
|
||||
.first()
|
||||
.join(
|
||||
|
@@ -3,7 +3,8 @@ import { z } from "zod";
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb"
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql"
|
||||
}
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
|
@@ -12,7 +12,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findGroups = async (filter: TFindFilter<TGroups>, { offset, limit, sort, tx }: TFindOpt<TGroups> = {}) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.Groups)
|
||||
const query = (tx || db.replicaNode())(TableName.Groups)
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.select(selectAllTableCols(TableName.Groups));
|
||||
@@ -32,7 +32,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByOrgId = async (orgId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.Groups)
|
||||
const docs = await (tx || db.replicaNode())(TableName.Groups)
|
||||
.where(`${TableName.Groups}.orgId`, orgId)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.select(selectAllTableCols(TableName.Groups))
|
||||
@@ -74,11 +74,12 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
username?: string;
|
||||
}) => {
|
||||
try {
|
||||
let query = db(TableName.OrgMembership)
|
||||
let query = db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.UserGroupMembership, function () {
|
||||
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
.leftJoin(TableName.UserGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
|
@@ -18,7 +18,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
*/
|
||||
const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const userProjectMemberships: string[] = await (tx || db)(TableName.ProjectMembership)
|
||||
const userProjectMemberships: string[] = await (tx || db.replicaNode())(TableName.ProjectMembership)
|
||||
.where(`${TableName.ProjectMembership}.userId`, userId)
|
||||
.whereIn(`${TableName.ProjectMembership}.projectId`, projectIds)
|
||||
.pluck(`${TableName.ProjectMembership}.projectId`);
|
||||
@@ -43,7 +43,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
// special query
|
||||
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => {
|
||||
try {
|
||||
const usernameDocs: string[] = await db(TableName.UserGroupMembership)
|
||||
const usernameDocs: string[] = await db
|
||||
.replicaNode()(TableName.UserGroupMembership)
|
||||
.join(
|
||||
TableName.GroupProjectMembership,
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
@@ -73,7 +74,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
// get list of groups in the project with id [projectId]
|
||||
// that that are not the group with id [groupId]
|
||||
const groups: string[] = await (tx || db)(TableName.GroupProjectMembership)
|
||||
const groups: string[] = await (tx || db.replicaNode())(TableName.GroupProjectMembership)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.whereNot(`${TableName.GroupProjectMembership}.groupId`, groupId)
|
||||
.pluck(`${TableName.GroupProjectMembership}.groupId`);
|
||||
@@ -83,8 +84,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.UserGroupMembership}.groupId`, groupId)
|
||||
.where(`${TableName.UserGroupMembership}.isPending`, false)
|
||||
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.ProjectMembership, function () {
|
||||
this.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
|
||||
.leftJoin(TableName.ProjectMembership, (bd) => {
|
||||
bd.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
|
||||
`${TableName.ProjectMembership}.projectId`,
|
||||
"=",
|
||||
db.raw("?", [projectId])
|
||||
@@ -107,9 +108,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
|
||||
)
|
||||
.where({ isGhost: false }) // MAKE SURE USER IS NOT A GHOST USER
|
||||
.whereNotIn(`${TableName.UserGroupMembership}.userId`, function () {
|
||||
.whereNotIn(`${TableName.UserGroupMembership}.userId`, (bd) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.select(`${TableName.UserGroupMembership}.userId`)
|
||||
bd.select(`${TableName.UserGroupMembership}.userId`)
|
||||
.from(TableName.UserGroupMembership)
|
||||
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups);
|
||||
});
|
||||
|
@@ -34,6 +34,7 @@ import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@@ -53,7 +54,7 @@ import {
|
||||
TTestLdapConnectionDTO,
|
||||
TUpdateLdapCfgDTO
|
||||
} from "./ldap-config-types";
|
||||
import { testLDAPConfig } from "./ldap-fns";
|
||||
import { searchGroups, testLDAPConfig } from "./ldap-fns";
|
||||
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
||||
|
||||
type TLdapConfigServiceFactoryDep = {
|
||||
@@ -286,7 +287,7 @@ export const ldapConfigServiceFactory = ({
|
||||
return ldapConfig;
|
||||
};
|
||||
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean }) => {
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => {
|
||||
const ldapConfig = await ldapConfigDAL.findOne(filter);
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
@@ -417,6 +418,13 @@ export const ldapConfigServiceFactory = ({
|
||||
}: TLdapLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.LDAP)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with LDAP is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
@@ -456,6 +464,21 @@ export const ldapConfigServiceFactory = ({
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
|
||||
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
|
||||
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
userAlias = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
@@ -701,11 +724,25 @@ export const ldapConfigServiceFactory = ({
|
||||
message: "Failed to create LDAP group map due to plan restriction. Upgrade plan to create LDAP group map."
|
||||
});
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.findOne({
|
||||
id: ldapConfigId,
|
||||
orgId
|
||||
const ldapConfig = await getLdapCfg({
|
||||
orgId,
|
||||
id: ldapConfigId
|
||||
});
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
if (!ldapConfig.groupSearchBase) {
|
||||
throw new BadRequestError({
|
||||
message: "Configure a group search base in your LDAP configuration in order to proceed."
|
||||
});
|
||||
}
|
||||
|
||||
const groupSearchFilter = `(cn=${ldapGroupCN})`;
|
||||
const groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
|
||||
|
||||
if (!groups.some((g) => g.cn === ldapGroupCN)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find LDAP Group CN"
|
||||
});
|
||||
}
|
||||
|
||||
const group = await groupDAL.findOne({ slug: groupSlug, orgId });
|
||||
if (!group) throw new BadRequestError({ message: "Failed to find group" });
|
||||
|
@@ -10,7 +10,8 @@ export const ldapGroupMapDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findLdapGroupMapsByLdapConfigId = async (ldapConfigId: string) => {
|
||||
try {
|
||||
const docs = await db(TableName.LdapGroupMap)
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.LdapGroupMap)
|
||||
.where(`${TableName.LdapGroupMap}.ldapConfigId`, ldapConfigId)
|
||||
.join(TableName.Groups, `${TableName.LdapGroupMap}.groupId`, `${TableName.Groups}.id`)
|
||||
.select(selectAllTableCols(TableName.LdapGroupMap))
|
||||
|
@@ -7,6 +7,8 @@ export const getDefaultOnPremFeatures = () => {
|
||||
workspacesUsed: 0,
|
||||
memberLimit: null,
|
||||
membersUsed: 0,
|
||||
identityLimit: null,
|
||||
identitiesUsed: 0,
|
||||
environmentLimit: null,
|
||||
environmentsUsed: 0,
|
||||
secretVersioning: true,
|
||||
|
@@ -15,6 +15,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
membersUsed: 0,
|
||||
environmentLimit: null,
|
||||
environmentsUsed: 0,
|
||||
identityLimit: null,
|
||||
identitiesUsed: 0,
|
||||
dynamicSecret: false,
|
||||
secretVersioning: true,
|
||||
pitRecovery: false,
|
||||
|
@@ -9,7 +9,7 @@ export type TLicenseDALFactory = ReturnType<typeof licenseDALFactory>;
|
||||
export const licenseDALFactory = (db: TDbClient) => {
|
||||
const countOfOrgMembers = async (orgId: string | null, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.OrgMembership)
|
||||
const doc = await (tx || db.replicaNode())(TableName.OrgMembership)
|
||||
.where({ status: OrgMembershipStatus.Accepted })
|
||||
.andWhere((bd) => {
|
||||
if (orgId) {
|
||||
@@ -19,11 +19,44 @@ export const licenseDALFactory = (db: TDbClient) => {
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where(`${TableName.Users}.isGhost`, false)
|
||||
.count();
|
||||
return doc?.[0].count;
|
||||
return Number(doc?.[0].count);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Count of Org Members" });
|
||||
}
|
||||
};
|
||||
|
||||
return { countOfOrgMembers };
|
||||
const countOrgUsersAndIdentities = async (orgId: string | null, tx?: Knex) => {
|
||||
try {
|
||||
// count org users
|
||||
const userDoc = await (tx || db)(TableName.OrgMembership)
|
||||
.where({ status: OrgMembershipStatus.Accepted })
|
||||
.andWhere((bd) => {
|
||||
if (orgId) {
|
||||
void bd.where({ orgId });
|
||||
}
|
||||
})
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where(`${TableName.Users}.isGhost`, false)
|
||||
.count();
|
||||
|
||||
const userCount = Number(userDoc?.[0].count);
|
||||
|
||||
// count org identities
|
||||
const identityDoc = await (tx || db)(TableName.IdentityOrgMembership)
|
||||
.where((bd) => {
|
||||
if (orgId) {
|
||||
void bd.where({ orgId });
|
||||
}
|
||||
})
|
||||
.count();
|
||||
|
||||
const identityCount = Number(identityDoc?.[0].count);
|
||||
|
||||
return userCount + identityCount;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Count of Org Users + Identities" });
|
||||
}
|
||||
};
|
||||
|
||||
return { countOfOrgMembers, countOrgUsersAndIdentities };
|
||||
};
|
||||
|
@@ -5,6 +5,7 @@
|
||||
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@@ -155,6 +156,7 @@ export const licenseServiceFactory = ({
|
||||
LICENSE_SERVER_CLOUD_PLAN_TTL,
|
||||
JSON.stringify(currentPlan)
|
||||
);
|
||||
|
||||
return currentPlan;
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -199,21 +201,27 @@ export const licenseServiceFactory = ({
|
||||
await licenseServerCloudApi.request.delete(`/api/license-server/v1/customers/${customerId}`);
|
||||
};
|
||||
|
||||
const updateSubscriptionOrgMemberCount = async (orgId: string) => {
|
||||
const updateSubscriptionOrgMemberCount = async (orgId: string, tx?: Knex) => {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
const count = await licenseDAL.countOfOrgMembers(orgId);
|
||||
const quantity = await licenseDAL.countOfOrgMembers(orgId, tx);
|
||||
const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId, tx);
|
||||
if (org?.customerId) {
|
||||
await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, {
|
||||
quantity: count
|
||||
quantity,
|
||||
quantityIdentities
|
||||
});
|
||||
}
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
} else if (instanceType === InstanceType.EnterpriseOnPrem) {
|
||||
const usedSeats = await licenseDAL.countOfOrgMembers(null);
|
||||
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { usedSeats });
|
||||
const usedSeats = await licenseDAL.countOfOrgMembers(null, tx);
|
||||
const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null, tx);
|
||||
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, {
|
||||
usedSeats,
|
||||
usedIdentitySeats
|
||||
});
|
||||
}
|
||||
await refreshPlan(orgId);
|
||||
};
|
||||
|
@@ -31,6 +31,8 @@ export type TFeatureSet = {
|
||||
dynamicSecret: false;
|
||||
memberLimit: null;
|
||||
membersUsed: 0;
|
||||
identityLimit: null;
|
||||
identitiesUsed: 0;
|
||||
environmentLimit: null;
|
||||
environmentsUsed: 0;
|
||||
secretVersioning: true;
|
||||
|
@@ -26,6 +26,7 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@@ -157,6 +158,13 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with OIDC is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
|
@@ -10,7 +10,8 @@ export type TPermissionDALFactory = ReturnType<typeof permissionDALFactory>;
|
||||
export const permissionDALFactory = (db: TDbClient) => {
|
||||
const getOrgPermission = async (userId: string, orgId: string) => {
|
||||
try {
|
||||
const membership = await db(TableName.OrgMembership)
|
||||
const membership = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.OrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.join(TableName.Organization, `${TableName.OrgMembership}.orgId`, `${TableName.Organization}.id`)
|
||||
.where("userId", userId)
|
||||
@@ -28,7 +29,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getOrgIdentityPermission = async (identityId: string, orgId: string) => {
|
||||
try {
|
||||
const membership = await db(TableName.IdentityOrgMembership)
|
||||
const membership = await db
|
||||
.replicaNode()(TableName.IdentityOrgMembership)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.join(TableName.Organization, `${TableName.IdentityOrgMembership}.orgId`, `${TableName.Organization}.id`)
|
||||
.where("identityId", identityId)
|
||||
@@ -45,11 +47,13 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const groups: string[] = await db(TableName.GroupProjectMembership)
|
||||
const groups: string[] = await db
|
||||
.replicaNode()(TableName.GroupProjectMembership)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.pluck(`${TableName.GroupProjectMembership}.groupId`);
|
||||
|
||||
const groupDocs = await db(TableName.UserGroupMembership)
|
||||
const groupDocs = await db
|
||||
.replicaNode()(TableName.UserGroupMembership)
|
||||
.where(`${TableName.UserGroupMembership}.userId`, userId)
|
||||
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups)
|
||||
.join(
|
||||
@@ -231,7 +235,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectIdentityPermission = async (identityId: string, projectId: string) => {
|
||||
try {
|
||||
const docs = await db(TableName.IdentityProjectMembership)
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.IdentityProjectMembership)
|
||||
.join(
|
||||
TableName.IdentityProjectMembershipRole,
|
||||
`${TableName.IdentityProjectMembershipRole}.projectMembershipId`,
|
||||
|
@@ -10,7 +10,8 @@ export const samlConfigDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findEnforceableSamlCfg = async (orgId: string) => {
|
||||
try {
|
||||
const samlCfg = await db(TableName.SamlConfig)
|
||||
const samlCfg = await db
|
||||
.replicaNode()(TableName.SamlConfig)
|
||||
.where({
|
||||
orgId,
|
||||
isActive: true
|
||||
|
@@ -28,6 +28,7 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@@ -335,6 +336,13 @@ export const samlConfigServiceFactory = ({
|
||||
}: TSamlLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.SAML)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with SAML is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
@@ -380,6 +388,21 @@ export const samlConfigServiceFactory = ({
|
||||
return foundUser;
|
||||
});
|
||||
} else {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
|
||||
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
|
||||
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
|
@@ -30,7 +30,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await sapFindQuery(tx || db, {
|
||||
const doc = await sapFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.SecretApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
@@ -52,7 +52,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await sapFindQuery(tx || db, filter);
|
||||
const docs = await sapFindQuery(tx || db.replicaNode(), filter);
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
docs,
|
||||
"id",
|
||||
|
@@ -62,7 +62,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db);
|
||||
const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
|
||||
const docs = await sql;
|
||||
const formatedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
@@ -102,7 +102,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
const docs = await (tx || db)
|
||||
.with(
|
||||
"temp",
|
||||
(tx || db)(TableName.SecretApprovalRequest)
|
||||
(tx || db.replicaNode())(TableName.SecretApprovalRequest)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
@@ -148,7 +148,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
// akhilmhdh: If ever u wanted a 1 to so many relationship connected with pagination
|
||||
// this is the place u wanna look at.
|
||||
const query = (tx || db)(TableName.SecretApprovalRequest)
|
||||
const query = (tx || db.replicaNode())(TableName.SecretApprovalRequest)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
|
@@ -47,7 +47,7 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByRequestId = async (requestId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)({
|
||||
const doc = await (tx || db.replicaNode())({
|
||||
secVerTag: TableName.SecretTag
|
||||
})
|
||||
.from(TableName.SecretApprovalRequestSecret)
|
||||
|
@@ -41,7 +41,7 @@ export const secretRotationDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: TFindFilter<TSecretRotations & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const data = await findQuery(filter, tx || db);
|
||||
const data = await findQuery(filter, tx || db.replicaNode());
|
||||
return sqlNestRelationships({
|
||||
data,
|
||||
key: "id",
|
||||
@@ -93,7 +93,7 @@ export const secretRotationDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.SecretRotation)
|
||||
const doc = await (tx || db.replicaNode())(TableName.SecretRotation)
|
||||
.join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`)
|
||||
.where({ [`${TableName.SecretRotation}.id` as "id"]: id })
|
||||
.select(selectAllTableCols(TableName.SecretRotation))
|
||||
|
@@ -331,7 +331,7 @@ export const secretRotationQueueFactory = ({
|
||||
|
||||
logger.info("Finished rotating: rotation id: ", rotationId);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
logger.error(error, "Failed to execute secret rotation");
|
||||
if (error instanceof DisableRotationErrors) {
|
||||
if (job.id) {
|
||||
await queue.stopRepeatableJobByJobId(QueueName.SecretRotation, job.id);
|
||||
|
@@ -133,7 +133,7 @@ export const secretRotationServiceFactory = ({
|
||||
creds: []
|
||||
};
|
||||
const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData));
|
||||
const secretRotation = secretRotationDAL.transaction(async (tx) => {
|
||||
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
|
||||
const doc = await secretRotationDAL.create(
|
||||
{
|
||||
provider,
|
||||
@@ -148,13 +148,13 @@ export const secretRotationServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await secretRotationQueue.addToQueue(doc.id, doc.interval);
|
||||
const outputSecretMapping = await secretRotationDAL.secretOutputInsertMany(
|
||||
Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })),
|
||||
tx
|
||||
);
|
||||
return { ...doc, outputs: outputSecretMapping, environment: folder.environment };
|
||||
});
|
||||
await secretRotationQueue.addToQueue(secretRotation.id, secretRotation.interval);
|
||||
return secretRotation;
|
||||
};
|
||||
|
||||
@@ -212,9 +212,9 @@ export const secretRotationServiceFactory = ({
|
||||
);
|
||||
const deletedDoc = await secretRotationDAL.transaction(async (tx) => {
|
||||
const strat = await secretRotationDAL.deleteById(rotationId, tx);
|
||||
await secretRotationQueue.removeFromQueue(strat.id, strat.interval);
|
||||
return strat;
|
||||
});
|
||||
await secretRotationQueue.removeFromQueue(deletedDoc.id, deletedDoc.interval);
|
||||
return { ...doc, ...deletedDoc };
|
||||
};
|
||||
|
||||
|
@@ -21,7 +21,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const data = await (tx || db)(TableName.Snapshot)
|
||||
const data = await (tx || db.replicaNode())(TableName.Snapshot)
|
||||
.where(`${TableName.Snapshot}.id`, id)
|
||||
.join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.Snapshot))
|
||||
@@ -43,7 +43,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
|
||||
const countOfSnapshotsByFolderId = async (folderId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.Snapshot)
|
||||
const doc = await (tx || db.replicaNode())(TableName.Snapshot)
|
||||
.where({ folderId })
|
||||
.groupBy(["folderId"])
|
||||
.count("folderId")
|
||||
@@ -56,7 +56,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findSecretSnapshotDataById = async (snapshotId: string, tx?: Knex) => {
|
||||
try {
|
||||
const data = await (tx || db)(TableName.Snapshot)
|
||||
const data = await (tx || db.replicaNode())(TableName.Snapshot)
|
||||
.where(`${TableName.Snapshot}.id`, snapshotId)
|
||||
.join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`)
|
||||
.leftJoin(TableName.SnapshotSecret, `${TableName.Snapshot}.id`, `${TableName.SnapshotSecret}.snapshotId`)
|
||||
@@ -309,7 +309,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
// when we need to rollback we will pull from these snapshots
|
||||
const findLatestSnapshotByFolderId = async (folderId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.Snapshot)
|
||||
const docs = await (tx || db.replicaNode())(TableName.Snapshot)
|
||||
.where(`${TableName.Snapshot}.folderId`, folderId)
|
||||
.join<TSecretSnapshots>(
|
||||
(tx || db)(TableName.Snapshot).groupBy("folderId").max("createdAt").select("folderId").as("latestVersion"),
|
||||
|
@@ -692,6 +692,7 @@ export const INTEGRATION_AUTH = {
|
||||
integration: "The slug of integration for the auth object.",
|
||||
accessId: "The unique authorized access id of the external integration provider.",
|
||||
accessToken: "The unique authorized access token of the external integration provider.",
|
||||
awsAssumeIamRoleArn: "The AWS IAM Role to be assumed by Infisical",
|
||||
url: "",
|
||||
namespace: "",
|
||||
refreshToken: "The refresh token for integration authorization."
|
||||
|
@@ -10,6 +10,14 @@ const zodStrBool = z
|
||||
.optional()
|
||||
.transform((val) => val === "true");
|
||||
|
||||
const databaseReadReplicaSchema = z
|
||||
.object({
|
||||
DB_CONNECTION_URI: z.string().describe("Postgres read replica database connection string"),
|
||||
DB_ROOT_CERT: zpStr(z.string().optional().describe("Postgres read replica database certificate string"))
|
||||
})
|
||||
.array()
|
||||
.optional();
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
PORT: z.coerce.number().default(4000),
|
||||
@@ -29,6 +37,7 @@ const envSchema = z
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
DB_READ_REPLICAS: zpStr(z.string().describe("Postgres read replicas").optional()),
|
||||
BCRYPT_SALT_ROUND: z.number().default(12),
|
||||
NODE_ENV: z.enum(["development", "test", "production"]).default("production"),
|
||||
SALT_ROUNDS: z.coerce.number().default(10),
|
||||
@@ -101,6 +110,9 @@ const envSchema = z
|
||||
// azure
|
||||
CLIENT_ID_AZURE: zpStr(z.string().optional()),
|
||||
CLIENT_SECRET_AZURE: zpStr(z.string().optional()),
|
||||
// aws
|
||||
CLIENT_ID_AWS_INTEGRATION: zpStr(z.string().optional()),
|
||||
CLIENT_SECRET_AWS_INTEGRATION: zpStr(z.string().optional()),
|
||||
// gitlab
|
||||
CLIENT_ID_GITLAB: zpStr(z.string().optional()),
|
||||
CLIENT_SECRET_GITLAB: zpStr(z.string().optional()),
|
||||
@@ -127,6 +139,9 @@ const envSchema = z
|
||||
})
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
DB_READ_REPLICAS: data.DB_READ_REPLICAS
|
||||
? databaseReadReplicaSchema.parse(JSON.parse(data.DB_READ_REPLICAS))
|
||||
: undefined,
|
||||
isCloud: Boolean(data.LICENSE_SERVER_KEY),
|
||||
isSmtpConfigured: Boolean(data.SMTP_HOST),
|
||||
isRedisConfigured: Boolean(data.REDIS_URL),
|
||||
|
@@ -50,7 +50,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
}),
|
||||
findById: async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const result = await (tx || db)(tableName)
|
||||
const result = await (tx || db.replicaNode())(tableName)
|
||||
.where({ id } as never)
|
||||
.first("*");
|
||||
return result;
|
||||
@@ -60,7 +60,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
},
|
||||
findOne: async (filter: Partial<Tables[Tname]["base"]>, tx?: Knex) => {
|
||||
try {
|
||||
const res = await (tx || db)(tableName).where(filter).first("*");
|
||||
const res = await (tx || db.replicaNode())(tableName).where(filter).first("*");
|
||||
return res;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find one" });
|
||||
@@ -71,7 +71,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
{ offset, limit, sort, tx }: TFindOpt<Tables[Tname]["base"]> = {}
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db)(tableName).where(buildFindFilter(filter));
|
||||
const query = (tx || db.replicaNode())(tableName).where(buildFindFilter(filter));
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
|
@@ -58,7 +58,8 @@ const redactedKeys = [
|
||||
"decryptedSecret",
|
||||
"secrets",
|
||||
"key",
|
||||
"password"
|
||||
"password",
|
||||
"config"
|
||||
];
|
||||
|
||||
export const initLogger = async () => {
|
||||
|
@@ -15,7 +15,11 @@ const run = async () => {
|
||||
const appCfg = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
readReplicas: appCfg.DB_READ_REPLICAS?.map((el) => ({
|
||||
dbRootCert: el.DB_ROOT_CERT,
|
||||
dbConnectionUri: el.DB_CONNECTION_URI
|
||||
}))
|
||||
});
|
||||
|
||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||
|
@@ -415,8 +415,10 @@ export const registerRoutes = async (
|
||||
userAliasDAL,
|
||||
orgMembershipDAL,
|
||||
tokenService,
|
||||
smtpService
|
||||
smtpService,
|
||||
projectMembershipDAL
|
||||
});
|
||||
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, tokenDAL: authTokenDAL });
|
||||
const passwordService = authPaswordServiceFactory({
|
||||
tokenService,
|
||||
@@ -806,7 +808,8 @@ export const registerRoutes = async (
|
||||
const identityService = identityServiceFactory({
|
||||
permissionService,
|
||||
identityDAL,
|
||||
identityOrgMembershipDAL
|
||||
identityOrgMembershipDAL,
|
||||
licenseService
|
||||
});
|
||||
const identityAccessTokenService = identityAccessTokenServiceFactory({
|
||||
identityAccessTokenDAL,
|
||||
|
@@ -8,6 +8,7 @@ import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -54,7 +55,14 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
trustSamlEmails: z.boolean().optional(),
|
||||
trustLdapEmails: z.boolean().optional(),
|
||||
trustOidcEmails: z.boolean().optional(),
|
||||
defaultAuthOrgId: z.string().optional().nullable()
|
||||
defaultAuthOrgId: z.string().optional().nullable(),
|
||||
enabledLoginMethods: z
|
||||
.nativeEnum(LoginMethod)
|
||||
.array()
|
||||
.optional()
|
||||
.refine((methods) => !methods || methods.length > 0, {
|
||||
message: "At least one login method should be enabled."
|
||||
})
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -70,7 +78,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const config = await server.services.superAdmin.updateServerCfg(req.body);
|
||||
const config = await server.services.superAdmin.updateServerCfg(req.body, req.permission.id);
|
||||
return { config };
|
||||
}
|
||||
});
|
||||
|
@@ -240,6 +240,12 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
|
||||
integration: z.string().trim().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.integration),
|
||||
accessId: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.accessId),
|
||||
accessToken: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.accessToken),
|
||||
awsAssumeIamRoleArn: z
|
||||
.string()
|
||||
.url()
|
||||
.trim()
|
||||
.optional()
|
||||
.describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.awsAssumeIamRoleArn),
|
||||
url: z.string().url().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.url),
|
||||
namespace: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.namespace),
|
||||
refreshToken: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.refreshToken)
|
||||
|
@@ -3,7 +3,7 @@ import { z } from "zod";
|
||||
import { UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { authRateLimit, readLimit } from "@app/server/config/rateLimiter";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
@@ -90,4 +90,48 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
return res.redirect(`${appCfg.SITE_URL}/login`);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/me/project-favorites",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
orgId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
projectFavorites: z.string().array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.user.getUserProjectFavorites(req.permission.id, req.query.orgId);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PUT",
|
||||
url: "/me/project-favorites",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
orgId: z.string().trim(),
|
||||
projectFavorites: z.string().array()
|
||||
})
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.user.updateUserProjectFavorites(
|
||||
req.permission.id,
|
||||
req.body.orgId,
|
||||
req.body.projectFavorites
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -6,13 +6,17 @@ import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { WebhookType } from "@app/services/webhook/webhook-types";
|
||||
|
||||
export const sanitizedWebhookSchema = WebhooksSchema.omit({
|
||||
encryptedSecretKey: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
algorithm: true,
|
||||
keyEncoding: true
|
||||
keyEncoding: true,
|
||||
urlCipherText: true,
|
||||
urlIV: true,
|
||||
urlTag: true
|
||||
}).merge(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
@@ -33,13 +37,24 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z.object({
|
||||
workspaceId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
webhookUrl: z.string().url().trim(),
|
||||
webhookSecretKey: z.string().trim().optional(),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
type: z.nativeEnum(WebhookType).default(WebhookType.GENERAL),
|
||||
workspaceId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
webhookUrl: z.string().url().trim(),
|
||||
webhookSecretKey: z.string().trim().optional(),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash)
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.type === WebhookType.SLACK && !data.webhookUrl.includes("hooks.slack.com")) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Incoming Webhook URL is invalid.",
|
||||
path: ["webhookUrl"]
|
||||
});
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string(),
|
||||
@@ -66,8 +81,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => {
|
||||
environment: webhook.environment.slug,
|
||||
webhookId: webhook.id,
|
||||
isDisabled: webhook.isDisabled,
|
||||
secretPath: webhook.secretPath,
|
||||
webhookUrl: webhook.url
|
||||
secretPath: webhook.secretPath
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -116,8 +130,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => {
|
||||
environment: webhook.environment.slug,
|
||||
webhookId: webhook.id,
|
||||
isDisabled: webhook.isDisabled,
|
||||
secretPath: webhook.secretPath,
|
||||
webhookUrl: webhook.url
|
||||
secretPath: webhook.secretPath
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -156,8 +169,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => {
|
||||
environment: webhook.environment.slug,
|
||||
webhookId: webhook.id,
|
||||
isDisabled: webhook.isDisabled,
|
||||
secretPath: webhook.secretPath,
|
||||
webhookUrl: webhook.url
|
||||
secretPath: webhook.secretPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@@ -14,7 +14,7 @@ export const tokenDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOneTokenSession = async (filter: Partial<TAuthTokenSessions>): Promise<TAuthTokenSessions | undefined> => {
|
||||
try {
|
||||
const doc = await db(TableName.AuthTokenSession).where(filter).first();
|
||||
const doc = await db.replicaNode()(TableName.AuthTokenSession).where(filter).first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindOneTokenSession" });
|
||||
@@ -44,7 +44,7 @@ export const tokenDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findTokenSessions = async (filter: Partial<TAuthTokenSessions>, tx?: Knex) => {
|
||||
try {
|
||||
const sessions = await (tx || db)(TableName.AuthTokenSession).where(filter);
|
||||
const sessions = await (tx || db.replicaNode())(TableName.AuthTokenSession).where(filter);
|
||||
return sessions;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ name: "Find all token session", error });
|
||||
|
@@ -17,6 +17,7 @@ import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||
import { TokenType } from "../auth-token/auth-token-types";
|
||||
import { TOrgDALFactory } from "../org/org-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { LoginMethod } from "../super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { enforceUserLockStatus, validateProviderAuthToken } from "./auth-fns";
|
||||
import {
|
||||
@@ -158,9 +159,22 @@ export const authLoginServiceFactory = ({
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (
|
||||
serverCfg.enabledLoginMethods &&
|
||||
!serverCfg.enabledLoginMethods.includes(LoginMethod.EMAIL) &&
|
||||
!providerAuthToken
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with email is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
if (!userEnc || (userEnc && !userEnc.isAccepted)) {
|
||||
throw new Error("Failed to find user");
|
||||
}
|
||||
|
||||
if (!userEnc.authMethods?.includes(AuthMethod.EMAIL)) {
|
||||
validateProviderAuthToken(providerAuthToken as string, email);
|
||||
}
|
||||
@@ -507,6 +521,40 @@ export const authLoginServiceFactory = ({
|
||||
let user = await userDAL.findUserByUsername(email);
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods) {
|
||||
switch (authMethod) {
|
||||
case AuthMethod.GITHUB: {
|
||||
if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GITHUB)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with Github is disabled by administrator.",
|
||||
name: "Oauth 2 login"
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case AuthMethod.GOOGLE: {
|
||||
if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GOOGLE)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with Google is disabled by administrator.",
|
||||
name: "Oauth 2 login"
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case AuthMethod.GITLAB: {
|
||||
if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GITLAB)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with Gitlab is disabled by administrator.",
|
||||
name: "Oauth 2 login"
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!user) {
|
||||
|
@@ -364,7 +364,7 @@ export const authSignupServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
const uniqueOrgId = [...new Set(updatedMembersips.map(({ orgId }) => orgId))];
|
||||
await Promise.allSettled(uniqueOrgId.map((orgId) => licenseService.updateSubscriptionOrgMemberCount(orgId)));
|
||||
await Promise.allSettled(uniqueOrgId.map((orgId) => licenseService.updateSubscriptionOrgMemberCount(orgId, tx)));
|
||||
|
||||
await convertPendingGroupAdditionsToGroupMemberships({
|
||||
userIds: [user.id],
|
||||
|
@@ -16,6 +16,7 @@ export const certificateAuthorityDALFactory = (db: TDbClient) => {
|
||||
parentCaId?: string;
|
||||
encryptedCertificate: Buffer;
|
||||
}[] = await db
|
||||
.replicaNode()
|
||||
.withRecursive("cte", (cte) => {
|
||||
void cte
|
||||
.select("ca.id as caId", "ca.parentCaId", "cert.encryptedCertificate")
|
||||
|
@@ -14,7 +14,8 @@ export const certificateDALFactory = (db: TDbClient) => {
|
||||
count: string;
|
||||
}
|
||||
|
||||
const count = await db(TableName.Certificate)
|
||||
const count = await db
|
||||
.replicaNode()(TableName.Certificate)
|
||||
.join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`)
|
||||
.join(TableName.Project, `${TableName.CertificateAuthority}.projectId`, `${TableName.Project}.id`)
|
||||
.where(`${TableName.Project}.id`, projectId)
|
||||
|
@@ -12,7 +12,7 @@ export const groupProjectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.GroupProjectMembership)
|
||||
const docs = await (tx || db.replicaNode())(TableName.GroupProjectMembership)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.join(TableName.Groups, `${TableName.GroupProjectMembership}.groupId`, `${TableName.Groups}.id`)
|
||||
.join(
|
||||
|
@@ -12,7 +12,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOne = async (filter: Partial<TIdentityAccessTokens>, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.IdentityAccessToken)
|
||||
const doc = await (tx || db.replicaNode())(TableName.IdentityAccessToken)
|
||||
.where(filter)
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.leftJoin(TableName.IdentityUaClientSecret, (qb) => {
|
||||
|
@@ -12,7 +12,7 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByProjectId = async (projectId: string, filter: { identityId?: string } = {}, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.IdentityProjectMembership)
|
||||
const docs = await (tx || db.replicaNode())(TableName.IdentityProjectMembership)
|
||||
.where(`${TableName.IdentityProjectMembership}.projectId`, projectId)
|
||||
.join(TableName.Identity, `${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`)
|
||||
.where((qb) => {
|
||||
|
@@ -12,7 +12,7 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOne = async (filter: Partial<TIdentityOrgMemberships>, tx?: Knex) => {
|
||||
try {
|
||||
const [data] = await (tx || db)(TableName.IdentityOrgMembership)
|
||||
const [data] = await (tx || db.replicaNode())(TableName.IdentityOrgMembership)
|
||||
.where(filter)
|
||||
.join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`)
|
||||
.select(selectAllTableCols(TableName.IdentityOrgMembership))
|
||||
@@ -29,7 +29,7 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: Partial<TIdentityOrgMemberships>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.IdentityOrgMembership)
|
||||
const docs = await (tx || db.replicaNode())(TableName.IdentityOrgMembership)
|
||||
.where(filter)
|
||||
.join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { OrgMembershipRole, TableName, TOrgRoles } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
@@ -16,6 +17,7 @@ type TIdentityServiceFactoryDep = {
|
||||
identityDAL: TIdentityDALFactory;
|
||||
identityOrgMembershipDAL: TIdentityOrgDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
};
|
||||
|
||||
export type TIdentityServiceFactory = ReturnType<typeof identityServiceFactory>;
|
||||
@@ -23,7 +25,8 @@ export type TIdentityServiceFactory = ReturnType<typeof identityServiceFactory>;
|
||||
export const identityServiceFactory = ({
|
||||
identityDAL,
|
||||
identityOrgMembershipDAL,
|
||||
permissionService
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TIdentityServiceFactoryDep) => {
|
||||
const createIdentity = async ({
|
||||
name,
|
||||
@@ -45,6 +48,14 @@ export const identityServiceFactory = ({
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasRequiredPriviledges) throw new BadRequestError({ message: "Failed to create a more privileged identity" });
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
|
||||
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create identity due to identity limit reached. Upgrade plan to create more identities."
|
||||
});
|
||||
}
|
||||
|
||||
const identity = await identityDAL.transaction(async (tx) => {
|
||||
const newIdentity = await identityDAL.create({ name }, tx);
|
||||
await identityOrgMembershipDAL.create(
|
||||
@@ -58,6 +69,7 @@ export const identityServiceFactory = ({
|
||||
);
|
||||
return newIdentity;
|
||||
});
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
|
||||
return identity;
|
||||
};
|
||||
@@ -115,7 +127,7 @@ export const identityServiceFactory = ({
|
||||
{ identityId: id },
|
||||
{
|
||||
role: customRole ? OrgMembershipRole.Custom : role,
|
||||
roleId: customRole?.id
|
||||
roleId: customRole?.id || null
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -168,6 +180,9 @@ export const identityServiceFactory = ({
|
||||
throw new ForbiddenRequestError({ message: "Failed to delete more privileged identity" });
|
||||
|
||||
const deletedIdentity = await identityDAL.deleteById(id);
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(identityOrgMembership.orgId);
|
||||
|
||||
return { ...deletedIdentity, orgId: identityOrgMembership.orgId };
|
||||
};
|
||||
|
||||
|
@@ -178,7 +178,8 @@ export const integrationAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
accessId,
|
||||
namespace,
|
||||
accessToken
|
||||
accessToken,
|
||||
awsAssumeIamRoleArn
|
||||
}: TSaveIntegrationAccessTokenDTO) => {
|
||||
if (!Object.values(Integrations).includes(integration as Integrations))
|
||||
throw new BadRequestError({ message: "Invalid integration" });
|
||||
@@ -230,7 +231,7 @@ export const integrationAuthServiceFactory = ({
|
||||
updateDoc.accessExpiresAt = tokenDetails.accessExpiresAt;
|
||||
}
|
||||
|
||||
if (!refreshToken && (accessId || accessToken)) {
|
||||
if (!refreshToken && (accessId || accessToken || awsAssumeIamRoleArn)) {
|
||||
if (accessToken) {
|
||||
const accessEncToken = encryptSymmetric128BitHexKeyUTF8(accessToken, key);
|
||||
updateDoc.accessIV = accessEncToken.iv;
|
||||
@@ -243,6 +244,12 @@ export const integrationAuthServiceFactory = ({
|
||||
updateDoc.accessIdTag = accessEncToken.tag;
|
||||
updateDoc.accessIdCiphertext = accessEncToken.ciphertext;
|
||||
}
|
||||
if (awsAssumeIamRoleArn) {
|
||||
const awsAssumeIamRoleArnEnc = encryptSymmetric128BitHexKeyUTF8(awsAssumeIamRoleArn, key);
|
||||
updateDoc.awsAssumeIamRoleArnCipherText = awsAssumeIamRoleArnEnc.ciphertext;
|
||||
updateDoc.awsAssumeIamRoleArnIV = awsAssumeIamRoleArnEnc.iv;
|
||||
updateDoc.awsAssumeIamRoleArnTag = awsAssumeIamRoleArnEnc.tag;
|
||||
}
|
||||
}
|
||||
return integrationAuthDAL.create(updateDoc);
|
||||
};
|
||||
@@ -251,6 +258,14 @@ export const integrationAuthServiceFactory = ({
|
||||
const getIntegrationAccessToken = async (integrationAuth: TIntegrationAuths, botKey: string) => {
|
||||
let accessToken: string | undefined;
|
||||
let accessId: string | undefined;
|
||||
// this means its not access token based
|
||||
if (
|
||||
integrationAuth.integration === Integrations.AWS_SECRET_MANAGER &&
|
||||
integrationAuth.awsAssumeIamRoleArnCipherText
|
||||
) {
|
||||
return { accessToken: "", accessId: "" };
|
||||
}
|
||||
|
||||
if (integrationAuth.accessTag && integrationAuth.accessIV && integrationAuth.accessCiphertext) {
|
||||
accessToken = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: integrationAuth.accessCiphertext,
|
||||
|
@@ -17,6 +17,7 @@ export type TSaveIntegrationAccessTokenDTO = {
|
||||
url?: string;
|
||||
namespace?: string;
|
||||
refreshToken?: string;
|
||||
awsAssumeIamRoleArn?: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TDeleteIntegrationAuthsDTO = TProjectPermission & {
|
||||
|
@@ -17,14 +17,17 @@ import {
|
||||
UntagResourceCommand,
|
||||
UpdateSecretCommand
|
||||
} from "@aws-sdk/client-secrets-manager";
|
||||
import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import AWS, { AWSError } from "aws-sdk";
|
||||
import { AxiosError } from "axios";
|
||||
import { randomUUID } from "crypto";
|
||||
import sodium from "libsodium-wrappers";
|
||||
import isEqual from "lodash.isequal";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretType, TIntegrationAuths, TIntegrations, TSecrets } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@@ -695,24 +698,61 @@ const syncSecretsAWSSecretManager = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessId,
|
||||
accessToken
|
||||
accessToken,
|
||||
awsAssumeRoleArn,
|
||||
projectId
|
||||
}: {
|
||||
integration: TIntegrations;
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
awsAssumeRoleArn: string | null;
|
||||
projectId?: string;
|
||||
}) => {
|
||||
const appCfg = getConfig();
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
|
||||
if (!accessId) {
|
||||
throw new Error("AWS access ID is required");
|
||||
if (!accessId && !awsAssumeRoleArn) {
|
||||
throw new Error("AWS access ID/AWS Assume Role is required");
|
||||
}
|
||||
|
||||
let accessKeyId = "";
|
||||
let secretAccessKey = "";
|
||||
let sessionToken;
|
||||
if (awsAssumeRoleArn) {
|
||||
const client = new STSClient({
|
||||
region: integration.region as string,
|
||||
credentials:
|
||||
appCfg.CLIENT_ID_AWS_INTEGRATION && appCfg.CLIENT_SECRET_AWS_INTEGRATION
|
||||
? {
|
||||
accessKeyId: appCfg.CLIENT_ID_AWS_INTEGRATION,
|
||||
secretAccessKey: appCfg.CLIENT_SECRET_AWS_INTEGRATION
|
||||
}
|
||||
: undefined
|
||||
});
|
||||
const command = new AssumeRoleCommand({
|
||||
RoleArn: awsAssumeRoleArn,
|
||||
RoleSessionName: `infisical-sm-${randomUUID()}`,
|
||||
DurationSeconds: 900, // 15mins
|
||||
ExternalId: projectId
|
||||
});
|
||||
const response = await client.send(command);
|
||||
if (!response.Credentials?.AccessKeyId || !response.Credentials?.SecretAccessKey)
|
||||
throw new Error("Failed to assume role");
|
||||
accessKeyId = response.Credentials?.AccessKeyId;
|
||||
secretAccessKey = response.Credentials?.SecretAccessKey;
|
||||
sessionToken = response.Credentials?.SessionToken;
|
||||
} else {
|
||||
accessKeyId = accessId as string;
|
||||
secretAccessKey = accessToken;
|
||||
}
|
||||
|
||||
const secretsManager = new SecretsManagerClient({
|
||||
region: integration.region as string,
|
||||
credentials: {
|
||||
accessKeyId: accessId,
|
||||
secretAccessKey: accessToken
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
sessionToken
|
||||
}
|
||||
});
|
||||
|
||||
@@ -3568,7 +3608,9 @@ export const syncIntegrationSecrets = async ({
|
||||
secrets,
|
||||
accessId,
|
||||
accessToken,
|
||||
appendices
|
||||
awsAssumeRoleArn,
|
||||
appendices,
|
||||
projectId
|
||||
}: {
|
||||
createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
@@ -3585,8 +3627,10 @@ export const syncIntegrationSecrets = async ({
|
||||
integrationAuth: TIntegrationAuths;
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessId: string | null;
|
||||
awsAssumeRoleArn: string | null;
|
||||
accessToken: string;
|
||||
appendices?: { prefix: string; suffix: string };
|
||||
projectId?: string;
|
||||
}) => {
|
||||
let response: { isSynced: boolean; syncMessage: string } | null = null;
|
||||
|
||||
@@ -3620,7 +3664,9 @@ export const syncIntegrationSecrets = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessId,
|
||||
accessToken
|
||||
accessToken,
|
||||
awsAssumeRoleArn,
|
||||
projectId
|
||||
});
|
||||
break;
|
||||
case Integrations.HEROKU:
|
||||
|
@@ -22,7 +22,7 @@ export const integrationDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: Partial<TIntegrations>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await integrationFindQuery(tx || db, filter);
|
||||
const docs = await integrationFindQuery(tx || db.replicaNode(), filter);
|
||||
return docs.map(({ envId, envSlug, envName, ...el }) => ({
|
||||
...el,
|
||||
environment: {
|
||||
@@ -38,7 +38,7 @@ export const integrationDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOne = async (filter: Partial<TIntegrations>, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await integrationFindQuery(tx || db, filter).first();
|
||||
const doc = await integrationFindQuery(tx || db.replicaNode(), filter).first();
|
||||
if (!doc) return;
|
||||
|
||||
const { envName: name, envSlug: slug, envId: id, ...el } = doc;
|
||||
@@ -50,7 +50,7 @@ export const integrationDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await integrationFindQuery(tx || db, {
|
||||
const doc = await integrationFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.Integration}.id` as "id"]: id
|
||||
}).first();
|
||||
if (!doc) return;
|
||||
@@ -64,7 +64,7 @@ export const integrationDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const integrations = await (tx || db)(TableName.Integration)
|
||||
const integrations = await (tx || db.replicaNode())(TableName.Integration)
|
||||
.where(`${TableName.Environment}.projectId`, projectId)
|
||||
.join(TableName.Environment, `${TableName.Integration}.envId`, `${TableName.Environment}.id`)
|
||||
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
@@ -90,7 +90,7 @@ export const integrationDALFactory = (db: TDbClient) => {
|
||||
// used for syncing secrets
|
||||
// this will populate integration auth also
|
||||
const findByProjectIdV2 = async (projectId: string, environment: string, tx?: Knex) => {
|
||||
const docs = await (tx || db)(TableName.Integration)
|
||||
const docs = await (tx || db.replicaNode())(TableName.Integration)
|
||||
.where(`${TableName.Environment}.projectId`, projectId)
|
||||
.where("isActive", true)
|
||||
.where(`${TableName.Environment}.slug`, environment)
|
||||
@@ -120,7 +120,10 @@ export const integrationDALFactory = (db: TDbClient) => {
|
||||
db.ref("accessExpiresAt").withSchema(TableName.IntegrationAuth).as("accessExpiresAtAu"),
|
||||
db.ref("metadata").withSchema(TableName.IntegrationAuth).as("metadataAu"),
|
||||
db.ref("algorithm").withSchema(TableName.IntegrationAuth).as("algorithmAu"),
|
||||
db.ref("keyEncoding").withSchema(TableName.IntegrationAuth).as("keyEncodingAu")
|
||||
db.ref("keyEncoding").withSchema(TableName.IntegrationAuth).as("keyEncodingAu"),
|
||||
db.ref("awsAssumeIamRoleArnCipherText").withSchema(TableName.IntegrationAuth),
|
||||
db.ref("awsAssumeIamRoleArnIV").withSchema(TableName.IntegrationAuth),
|
||||
db.ref("awsAssumeIamRoleArnTag").withSchema(TableName.IntegrationAuth)
|
||||
);
|
||||
return docs.map(
|
||||
({
|
||||
@@ -146,6 +149,9 @@ export const integrationDALFactory = (db: TDbClient) => {
|
||||
algorithmAu: algorithm,
|
||||
keyEncodingAu: keyEncoding,
|
||||
accessExpiresAtAu: accessExpiresAt,
|
||||
awsAssumeIamRoleArnIV,
|
||||
awsAssumeIamRoleArnCipherText,
|
||||
awsAssumeIamRoleArnTag,
|
||||
...el
|
||||
}) => ({
|
||||
...el,
|
||||
@@ -174,7 +180,10 @@ export const integrationDALFactory = (db: TDbClient) => {
|
||||
metadata,
|
||||
algorithm,
|
||||
keyEncoding,
|
||||
accessExpiresAt
|
||||
accessExpiresAt,
|
||||
awsAssumeIamRoleArnIV,
|
||||
awsAssumeIamRoleArnCipherText,
|
||||
awsAssumeIamRoleArnTag
|
||||
}
|
||||
})
|
||||
);
|
||||
|
@@ -16,7 +16,7 @@ export const incidentContactDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByOrgId = async (orgId: string) => {
|
||||
try {
|
||||
const incidentContacts = await db(TableName.IncidentContact).where({ orgId });
|
||||
const incidentContacts = await db.replicaNode()(TableName.IncidentContact).where({ orgId });
|
||||
return incidentContacts;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ name: "Incident contact list", error });
|
||||
@@ -25,7 +25,8 @@ export const incidentContactDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOne = async (orgId: string, data: Partial<TIncidentContacts>) => {
|
||||
try {
|
||||
const incidentContacts = await db(TableName.IncidentContact)
|
||||
const incidentContacts = await db
|
||||
.replicaNode()(TableName.IncidentContact)
|
||||
.where({ orgId, ...data })
|
||||
.first();
|
||||
return incidentContacts;
|
||||
|
@@ -20,7 +20,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOrgById = async (orgId: string) => {
|
||||
try {
|
||||
const org = await db(TableName.Organization).where({ id: orgId }).first();
|
||||
const org = await db.replicaNode()(TableName.Organization).where({ id: orgId }).first();
|
||||
return org;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find org by id" });
|
||||
@@ -30,7 +30,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
// special query
|
||||
const findAllOrgsByUserId = async (userId: string): Promise<TOrganizations[]> => {
|
||||
try {
|
||||
const org = await db(TableName.OrgMembership)
|
||||
const org = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where({ userId })
|
||||
.join(TableName.Organization, `${TableName.OrgMembership}.orgId`, `${TableName.Organization}.id`)
|
||||
.select(selectAllTableCols(TableName.Organization));
|
||||
@@ -42,7 +43,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOrgByProjectId = async (projectId: string): Promise<TOrganizations> => {
|
||||
try {
|
||||
const [org] = await db(TableName.Project)
|
||||
const [org] = await db
|
||||
.replicaNode()(TableName.Project)
|
||||
.where({ [`${TableName.Project}.id` as "id"]: projectId })
|
||||
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
|
||||
.select(selectAllTableCols(TableName.Organization));
|
||||
@@ -56,7 +58,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
// special query
|
||||
const findAllOrgMembers = async (orgId: string) => {
|
||||
try {
|
||||
const members = await db(TableName.OrgMembership)
|
||||
const members = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin<TUserEncryptionKeys>(
|
||||
@@ -95,7 +98,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
count: string;
|
||||
}
|
||||
|
||||
const count = await db(TableName.OrgMembership)
|
||||
const count = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.count("*")
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
@@ -110,7 +114,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOrgMembersByUsername = async (orgId: string, usernames: string[]) => {
|
||||
try {
|
||||
const members = await db(TableName.OrgMembership)
|
||||
const members = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin<TUserEncryptionKeys>(
|
||||
@@ -145,7 +150,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOrgGhostUser = async (orgId: string) => {
|
||||
try {
|
||||
const member = await db(TableName.OrgMembership)
|
||||
const member = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where({ orgId })
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`)
|
||||
@@ -169,7 +175,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
|
||||
const ghostUserExists = async (orgId: string) => {
|
||||
try {
|
||||
const member = await db(TableName.OrgMembership)
|
||||
const member = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where({ orgId })
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`)
|
||||
@@ -257,7 +264,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
{ offset, limit, sort, tx }: TFindOpt<TOrgMemberships> = {}
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.OrgMembership)
|
||||
const query = (tx || db.replicaNode())(TableName.OrgMembership)
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.join(TableName.Users, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
|
||||
|
@@ -420,13 +420,20 @@ export const orgServiceFactory = ({
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan.memberLimit !== null && plan.membersUsed >= plan.memberLimit) {
|
||||
// case: limit imposed on number of members allowed
|
||||
// case: number of members used exceeds the number of members allowed
|
||||
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
|
||||
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to invite member due to member limit reached. Upgrade plan to invite more members."
|
||||
});
|
||||
}
|
||||
|
||||
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
|
||||
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to invite member due to member limit reached. Upgrade plan to invite more members."
|
||||
});
|
||||
}
|
||||
|
||||
const invitee = await orgDAL.transaction(async (tx) => {
|
||||
const inviteeUser = await userDAL.findUserByUsername(inviteeEmail, tx);
|
||||
if (inviteeUser) {
|
||||
|
@@ -12,7 +12,7 @@ export const projectBotDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOne = async (filter: Partial<TProjectBots>, tx?: Knex) => {
|
||||
try {
|
||||
const bot = await (tx || db)(TableName.ProjectBot)
|
||||
const bot = await (tx || db.replicaNode())(TableName.ProjectBot)
|
||||
.where(filter)
|
||||
.leftJoin(TableName.Users, `${TableName.ProjectBot}.senderId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`)
|
||||
|
@@ -12,7 +12,9 @@ export const projectEnvDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findBySlugs = async (projectId: string, env: string[], tx?: Knex) => {
|
||||
try {
|
||||
const envs = await (tx || db)(TableName.Environment).where("projectId", projectId).whereIn("slug", env);
|
||||
const envs = await (tx || db.replicaNode())(TableName.Environment)
|
||||
.where("projectId", projectId)
|
||||
.whereIn("slug", env);
|
||||
return envs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find by slugs" });
|
||||
|
@@ -16,7 +16,7 @@ export const projectKeyDALFactory = (db: TDbClient) => {
|
||||
tx?: Knex
|
||||
): Promise<(TProjectKeys & { sender: { publicKey: string } }) | undefined> => {
|
||||
try {
|
||||
const projectKey = await (tx || db)(TableName.ProjectKeys)
|
||||
const projectKey = await (tx || db.replicaNode())(TableName.ProjectKeys)
|
||||
.join(TableName.Users, `${TableName.ProjectKeys}.senderId`, `${TableName.Users}.id`)
|
||||
.join(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`)
|
||||
.where({ projectId, receiverId: userId })
|
||||
@@ -34,7 +34,7 @@ export const projectKeyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findAllProjectUserPubKeys = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const pubKeys = await (tx || db)(TableName.ProjectMembership)
|
||||
const pubKeys = await (tx || db.replicaNode())(TableName.ProjectMembership)
|
||||
.where({ projectId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`)
|
||||
|
@@ -13,7 +13,8 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
// special query
|
||||
const findAllProjectMembers = async (projectId: string, filter: { usernames?: string[]; username?: string } = {}) => {
|
||||
try {
|
||||
const docs = await db(TableName.ProjectMembership)
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.ProjectMembership)
|
||||
.where({ [`${TableName.ProjectMembership}.projectId` as "projectId"]: projectId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where((qb) => {
|
||||
@@ -108,7 +109,7 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findProjectGhostUser = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const ghostUser = await (tx || db)(TableName.ProjectMembership)
|
||||
const ghostUser = await (tx || db.replicaNode())(TableName.ProjectMembership)
|
||||
.where({ projectId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.select(selectAllTableCols(TableName.Users))
|
||||
@@ -123,7 +124,8 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findMembershipsByUsername = async (projectId: string, usernames: string[]) => {
|
||||
try {
|
||||
const members = await db(TableName.ProjectMembership)
|
||||
const members = await db
|
||||
.replicaNode()(TableName.ProjectMembership)
|
||||
.where({ projectId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.join<TUserEncryptionKeys>(
|
||||
@@ -149,7 +151,8 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findProjectMembershipsByUserId = async (orgId: string, userId: string) => {
|
||||
try {
|
||||
const memberships = await db(TableName.ProjectMembership)
|
||||
const memberships = await db
|
||||
.replicaNode()(TableName.ProjectMembership)
|
||||
.where({ userId })
|
||||
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.where({ [`${TableName.Project}.orgId` as "orgId"]: orgId })
|
||||
|
@@ -14,7 +14,8 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findAllProjects = async (userId: string) => {
|
||||
try {
|
||||
const workspaces = await db(TableName.ProjectMembership)
|
||||
const workspaces = await db
|
||||
.replicaNode()(TableName.ProjectMembership)
|
||||
.where({ userId })
|
||||
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
|
||||
@@ -83,7 +84,7 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findProjectGhostUser = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const ghostUser = await (tx || db)(TableName.ProjectMembership)
|
||||
const ghostUser = await (tx || db.replicaNode())(TableName.ProjectMembership)
|
||||
.where({ projectId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.select(selectAllTableCols(TableName.Users))
|
||||
@@ -109,7 +110,8 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findAllProjectsByIdentity = async (identityId: string) => {
|
||||
try {
|
||||
const workspaces = await db(TableName.IdentityProjectMembership)
|
||||
const workspaces = await db
|
||||
.replicaNode()(TableName.IdentityProjectMembership)
|
||||
.where({ identityId })
|
||||
.join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
|
||||
@@ -151,7 +153,8 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findProjectById = async (id: string) => {
|
||||
try {
|
||||
const workspaces = await db(TableName.Project)
|
||||
const workspaces = await db
|
||||
.replicaNode()(TableName.Project)
|
||||
.where(`${TableName.Project}.id`, id)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
|
||||
.select(
|
||||
@@ -198,7 +201,8 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
throw new BadRequestError({ message: "Organization ID is required when querying with slugs" });
|
||||
}
|
||||
|
||||
const projects = await db(TableName.Project)
|
||||
const projects = await db
|
||||
.replicaNode()(TableName.Project)
|
||||
.where(`${TableName.Project}.slug`, slug)
|
||||
.where(`${TableName.Project}.orgId`, orgId)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
|
||||
|
@@ -12,7 +12,7 @@ export const secretBlindIndexDALFactory = (db: TDbClient) => {
|
||||
|
||||
const countOfSecretsWithNullSecretBlindIndex = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.Secret)
|
||||
const doc = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Secret}.folderId`)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
|
||||
.where({ projectId })
|
||||
@@ -26,7 +26,7 @@ export const secretBlindIndexDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findAllSecretsByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.Secret)
|
||||
const docs = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Secret}.folderId`)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
|
||||
.where({ projectId })
|
||||
@@ -43,7 +43,7 @@ export const secretBlindIndexDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findSecretsByProjectId = async (projectId: string, secretIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.Secret)
|
||||
const docs = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Secret}.folderId`)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
|
||||
.where({ projectId })
|
||||
|
@@ -211,7 +211,12 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findBySecretPath = async (projectId: string, environment: string, path: string, tx?: Knex) => {
|
||||
try {
|
||||
const folder = await sqlFindFolderByPathQuery(tx || db, projectId, environment, removeTrailingSlash(path))
|
||||
const folder = await sqlFindFolderByPathQuery(
|
||||
tx || db.replicaNode(),
|
||||
projectId,
|
||||
environment,
|
||||
removeTrailingSlash(path)
|
||||
)
|
||||
.orderBy("depth", "desc")
|
||||
.first();
|
||||
if (folder && folder.path !== removeTrailingSlash(path)) {
|
||||
@@ -230,7 +235,12 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
// it will stop automatically at /path2
|
||||
const findClosestFolder = async (projectId: string, environment: string, path: string, tx?: Knex) => {
|
||||
try {
|
||||
const folder = await sqlFindFolderByPathQuery(tx || db, projectId, environment, removeTrailingSlash(path))
|
||||
const folder = await sqlFindFolderByPathQuery(
|
||||
tx || db.replicaNode(),
|
||||
projectId,
|
||||
environment,
|
||||
removeTrailingSlash(path)
|
||||
)
|
||||
.orderBy("depth", "desc")
|
||||
.first();
|
||||
if (!folder) return;
|
||||
@@ -247,7 +257,7 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
envId,
|
||||
secretPath: removeTrailingSlash(secretPath)
|
||||
}));
|
||||
const folders = await sqlFindMultipleFolderByEnvPathQuery(tx || db, formatedQuery);
|
||||
const folders = await sqlFindMultipleFolderByEnvPathQuery(tx || db.replicaNode(), formatedQuery);
|
||||
return formatedQuery.map(({ envId, secretPath }) =>
|
||||
folders.find(({ path: targetPath, envId: targetEnvId }) => targetPath === secretPath && targetEnvId === envId)
|
||||
);
|
||||
@@ -260,7 +270,7 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
// that is instances in which for a given folderid find the secret path
|
||||
const findSecretPathByFolderIds = async (projectId: string, folderIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const folders = await sqlFindSecretPathByFolderId(tx || db, projectId, folderIds);
|
||||
const folders = await sqlFindSecretPathByFolderId(tx || db.replicaNode(), projectId, folderIds);
|
||||
|
||||
// travelling all the way from leaf node to root contains real path
|
||||
const rootFolders = groupBy(
|
||||
@@ -299,7 +309,7 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const folder = await (tx || db)(TableName.SecretFolder)
|
||||
const folder = await (tx || db.replicaNode())(TableName.SecretFolder)
|
||||
.where({ [`${TableName.SecretFolder}.id` as "id"]: id })
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.SecretFolder))
|
||||
|
@@ -13,7 +13,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
// This will fetch all latest secret versions from a folder
|
||||
const findLatestVersionByFolderId = async (folderId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.SecretFolderVersion)
|
||||
const docs = await (tx || db.replicaNode())(TableName.SecretFolderVersion)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.where({ parentId: folderId, isReserved: false })
|
||||
.join<TSecretFolderVersions>(
|
||||
@@ -38,7 +38,9 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findLatestFolderVersions = async (folderIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const docs: Array<TSecretFolderVersions & { max: number }> = await (tx || db)(TableName.SecretFolderVersion)
|
||||
const docs: Array<TSecretFolderVersions & { max: number }> = await (tx || db.replicaNode())(
|
||||
TableName.SecretFolderVersion
|
||||
)
|
||||
.whereIn("folderId", folderIds)
|
||||
.join(
|
||||
(tx || db)(TableName.SecretFolderVersion)
|
||||
|
@@ -51,7 +51,7 @@ export const secretImportDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: Partial<TSecretImports & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.SecretImport)
|
||||
const docs = await (tx || db.replicaNode())(TableName.SecretImport)
|
||||
.where(filter)
|
||||
.join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`)
|
||||
.select(
|
||||
@@ -72,7 +72,7 @@ export const secretImportDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByFolderIds = async (folderIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.SecretImport)
|
||||
const docs = await (tx || db.replicaNode())(TableName.SecretImport)
|
||||
.whereIn("folderId", folderIds)
|
||||
.where("isReplication", false)
|
||||
.join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`)
|
||||
|
@@ -13,7 +13,7 @@ export const secretTagDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findManyTagsById = async (projectId: string, ids: string[], tx?: Knex) => {
|
||||
try {
|
||||
const tags = await (tx || db)(TableName.SecretTag).where({ projectId }).whereIn("id", ids);
|
||||
const tags = await (tx || db.replicaNode())(TableName.SecretTag).where({ projectId }).whereIn("id", ids);
|
||||
return tags;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all by ids" });
|
||||
|
@@ -114,7 +114,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
userId = undefined;
|
||||
}
|
||||
|
||||
const secs = await (tx || db)(TableName.Secret)
|
||||
const secs = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.where({ folderId })
|
||||
.where((bd) => {
|
||||
void bd.whereNull("userId").orWhere({ userId: userId || null });
|
||||
@@ -152,7 +152,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getSecretTags = async (secretId: string, tx?: Knex) => {
|
||||
try {
|
||||
const tags = await (tx || db)(TableName.JnSecretTag)
|
||||
const tags = await (tx || db.replicaNode())(TableName.JnSecretTag)
|
||||
.join(TableName.SecretTag, `${TableName.JnSecretTag}.${TableName.SecretTag}Id`, `${TableName.SecretTag}.id`)
|
||||
.where({ [`${TableName.Secret}Id` as const]: secretId })
|
||||
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
|
||||
@@ -179,7 +179,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
userId = undefined;
|
||||
}
|
||||
|
||||
const secs = await (tx || db)(TableName.Secret)
|
||||
const secs = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.whereIn("folderId", folderIds)
|
||||
.where((bd) => {
|
||||
void bd.whereNull("userId").orWhere({ userId: userId || null });
|
||||
@@ -223,7 +223,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
) => {
|
||||
if (!blindIndexes.length) return [];
|
||||
try {
|
||||
const secrets = await (tx || db)(TableName.Secret)
|
||||
const secrets = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.where({ folderId })
|
||||
.where((bd) => {
|
||||
blindIndexes.forEach((el) => {
|
||||
@@ -278,7 +278,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findReferencedSecretReferences = async (projectId: string, envSlug: string, secretPath: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.SecretReference)
|
||||
const docs = await (tx || db.replicaNode())(TableName.SecretReference)
|
||||
.where({
|
||||
secretPath,
|
||||
environment: envSlug
|
||||
@@ -298,7 +298,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
// special query to backfill secret value
|
||||
const findAllProjectSecretValues = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.Secret)
|
||||
const docs = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.join(TableName.SecretFolder, `${TableName.Secret}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.where("projectId", projectId)
|
||||
@@ -313,7 +313,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOneWithTags = async (filter: Partial<TSecrets>, tx?: Knex) => {
|
||||
try {
|
||||
const rawDocs = await (tx || db)(TableName.Secret)
|
||||
const rawDocs = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.where(filter)
|
||||
.leftJoin(TableName.JnSecretTag, `${TableName.Secret}.id`, `${TableName.JnSecretTag}.${TableName.Secret}Id`)
|
||||
.leftJoin(TableName.SecretTag, `${TableName.JnSecretTag}.${TableName.SecretTag}Id`, `${TableName.SecretTag}.id`)
|
||||
|
@@ -525,6 +525,18 @@ export const secretQueueFactory = ({
|
||||
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
const { accessToken, accessId } = await integrationAuthService.getIntegrationAccessToken(integrationAuth, botKey);
|
||||
const awsAssumeRoleArn =
|
||||
integrationAuth.awsAssumeIamRoleArnTag &&
|
||||
integrationAuth.awsAssumeIamRoleArnIV &&
|
||||
integrationAuth.awsAssumeIamRoleArnCipherText
|
||||
? decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: integrationAuth.awsAssumeIamRoleArnCipherText,
|
||||
iv: integrationAuth.awsAssumeIamRoleArnIV,
|
||||
tag: integrationAuth.awsAssumeIamRoleArnTag,
|
||||
key: botKey
|
||||
})
|
||||
: null;
|
||||
|
||||
const secrets = await getIntegrationSecrets({
|
||||
environment,
|
||||
projectId,
|
||||
@@ -544,6 +556,8 @@ export const secretQueueFactory = ({
|
||||
}
|
||||
|
||||
try {
|
||||
// akhilmhdh: this needs to changed later to be more easier to use
|
||||
// at present this is not at all extendable like to add a new parameter for just one integration need to modify multiple places
|
||||
const response = await syncIntegrationSecrets({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
@@ -552,7 +566,9 @@ export const secretQueueFactory = ({
|
||||
integrationAuth,
|
||||
secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets,
|
||||
accessId: accessId as string,
|
||||
awsAssumeRoleArn,
|
||||
accessToken,
|
||||
projectId,
|
||||
appendices: {
|
||||
prefix: metadata?.secretPrefix || "",
|
||||
suffix: metadata?.secretSuffix || ""
|
||||
|
@@ -13,7 +13,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
// This will fetch all latest secret versions from a folder
|
||||
const findLatestVersionByFolderId = async (folderId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.SecretVersion)
|
||||
const docs = await (tx || db.replicaNode())(TableName.SecretVersion)
|
||||
.where(`${TableName.SecretVersion}.folderId`, folderId)
|
||||
.join(TableName.Secret, `${TableName.Secret}.id`, `${TableName.SecretVersion}.secretId`)
|
||||
.join<TSecretVersions, TSecretVersions & { secretId: string; max: number }>(
|
||||
@@ -90,7 +90,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
const findLatestVersionMany = async (folderId: string, secretIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
if (!secretIds.length) return {};
|
||||
const docs: Array<TSecretVersions & { max: number }> = await (tx || db)(TableName.SecretVersion)
|
||||
const docs: Array<TSecretVersions & { max: number }> = await (tx || db.replicaNode())(TableName.SecretVersion)
|
||||
.where("folderId", folderId)
|
||||
.whereIn(`${TableName.SecretVersion}.secretId`, secretIds)
|
||||
.join(
|
||||
|
@@ -12,7 +12,7 @@ export const serviceTokenDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.ServiceToken)
|
||||
const doc = await (tx || db.replicaNode())(TableName.ServiceToken)
|
||||
.leftJoin<TUsers>(
|
||||
TableName.Users,
|
||||
`${TableName.Users}.id`,
|
||||
|
@@ -12,7 +12,7 @@ import { AuthMethod } from "../auth/auth-type";
|
||||
import { TOrgServiceFactory } from "../org/org-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TSuperAdminDALFactory } from "./super-admin-dal";
|
||||
import { TAdminSignUpDTO } from "./super-admin-types";
|
||||
import { LoginMethod, TAdminSignUpDTO } from "./super-admin-types";
|
||||
|
||||
type TSuperAdminServiceFactoryDep = {
|
||||
serverCfgDAL: TSuperAdminDALFactory;
|
||||
@@ -79,7 +79,37 @@ export const superAdminServiceFactory = ({
|
||||
return newCfg;
|
||||
};
|
||||
|
||||
const updateServerCfg = async (data: TSuperAdminUpdate) => {
|
||||
const updateServerCfg = async (data: TSuperAdminUpdate, userId: string) => {
|
||||
if (data.enabledLoginMethods) {
|
||||
const superAdminUser = await userDAL.findById(userId);
|
||||
const loginMethodToAuthMethod = {
|
||||
[LoginMethod.EMAIL]: [AuthMethod.EMAIL],
|
||||
[LoginMethod.GOOGLE]: [AuthMethod.GOOGLE],
|
||||
[LoginMethod.GITLAB]: [AuthMethod.GITLAB],
|
||||
[LoginMethod.GITHUB]: [AuthMethod.GITHUB],
|
||||
[LoginMethod.LDAP]: [AuthMethod.LDAP],
|
||||
[LoginMethod.OIDC]: [AuthMethod.OIDC],
|
||||
[LoginMethod.SAML]: [
|
||||
AuthMethod.AZURE_SAML,
|
||||
AuthMethod.GOOGLE_SAML,
|
||||
AuthMethod.JUMPCLOUD_SAML,
|
||||
AuthMethod.KEYCLOAK_SAML,
|
||||
AuthMethod.OKTA_SAML
|
||||
]
|
||||
};
|
||||
|
||||
if (
|
||||
!data.enabledLoginMethods.some((loginMethod) =>
|
||||
loginMethodToAuthMethod[loginMethod as LoginMethod].some(
|
||||
(authMethod) => superAdminUser.authMethods?.includes(authMethod)
|
||||
)
|
||||
)
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "You must configure at least one auth method to prevent account lockout"
|
||||
});
|
||||
}
|
||||
}
|
||||
const updatedServerCfg = await serverCfgDAL.updateById(ADMIN_CONFIG_DB_UUID, data);
|
||||
|
||||
await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(updatedServerCfg));
|
||||
@@ -167,7 +197,7 @@ export const superAdminServiceFactory = ({
|
||||
orgName: initialOrganizationName
|
||||
});
|
||||
|
||||
await updateServerCfg({ initialized: true });
|
||||
await updateServerCfg({ initialized: true }, userInfo.user.id);
|
||||
const token = await authService.generateUserTokens({
|
||||
user: userInfo.user,
|
||||
authMethod: AuthMethod.EMAIL,
|
||||
|
@@ -15,3 +15,13 @@ export type TAdminSignUpDTO = {
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
};
|
||||
|
||||
export enum LoginMethod {
|
||||
EMAIL = "email",
|
||||
GOOGLE = "google",
|
||||
GITHUB = "github",
|
||||
GITLAB = "gitlab",
|
||||
SAML = "saml",
|
||||
LDAP = "ldap",
|
||||
OIDC = "oidc"
|
||||
}
|
||||
|
@@ -22,7 +22,8 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
// -------------------------
|
||||
const findUserEncKeyByUsername = async ({ username }: { username: string }) => {
|
||||
try {
|
||||
return await db(TableName.Users)
|
||||
return await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.where({
|
||||
username,
|
||||
isGhost: false
|
||||
@@ -36,7 +37,7 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findUserEncKeyByUserIdsBatch = async ({ userIds }: { userIds: string[] }, tx?: Knex) => {
|
||||
try {
|
||||
return await (tx || db)(TableName.Users)
|
||||
return await (tx || db.replicaNode())(TableName.Users)
|
||||
.where({
|
||||
isGhost: false
|
||||
})
|
||||
@@ -49,7 +50,8 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findUserEncKeyByUserId = async (userId: string) => {
|
||||
try {
|
||||
const user = await db(TableName.Users)
|
||||
const user = await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.where(`${TableName.Users}.id`, userId)
|
||||
.join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`)
|
||||
.first();
|
||||
@@ -65,7 +67,8 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findUserByProjectMembershipId = async (projectMembershipId: string) => {
|
||||
try {
|
||||
return await db(TableName.ProjectMembership)
|
||||
return await db
|
||||
.replicaNode()(TableName.ProjectMembership)
|
||||
.where({ [`${TableName.ProjectMembership}.id` as "id"]: projectMembershipId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.first();
|
||||
@@ -76,7 +79,8 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findUsersByProjectMembershipIds = async (projectMembershipIds: string[]) => {
|
||||
try {
|
||||
return await db(TableName.ProjectMembership)
|
||||
return await db
|
||||
.replicaNode()(TableName.ProjectMembership)
|
||||
.whereIn(`${TableName.ProjectMembership}.id`, projectMembershipIds)
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.select("*");
|
||||
@@ -128,7 +132,7 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
// ---------------------
|
||||
const findOneUserAction = (filter: TUserActionsUpdate, tx?: Knex) => {
|
||||
try {
|
||||
return (tx || db)(TableName.UserAction).where(filter).first("*");
|
||||
return (tx || db.replicaNode())(TableName.UserAction).where(filter).first("*");
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find one user action" });
|
||||
}
|
||||
|
@@ -8,6 +8,7 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
|
||||
import { AuthMethod } from "../auth/auth-type";
|
||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||
import { TUserDALFactory } from "./user-dal";
|
||||
|
||||
type TUserServiceFactoryDep = {
|
||||
@@ -26,8 +27,9 @@ type TUserServiceFactoryDep = {
|
||||
| "delete"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "find" | "insertMany">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "insertMany">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "insertMany" | "findOne" | "updateById">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser" | "validateTokenForUser">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
};
|
||||
|
||||
@@ -37,6 +39,7 @@ export const userServiceFactory = ({
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
orgMembershipDAL,
|
||||
projectMembershipDAL,
|
||||
tokenService,
|
||||
smtpService
|
||||
}: TUserServiceFactoryDep) => {
|
||||
@@ -247,6 +250,51 @@ export const userServiceFactory = ({
|
||||
return privateKey;
|
||||
};
|
||||
|
||||
const getUserProjectFavorites = async (userId: string, orgId: string) => {
|
||||
const orgMembership = await orgMembershipDAL.findOne({
|
||||
userId,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (!orgMembership) {
|
||||
throw new BadRequestError({
|
||||
message: "User does not belong in the organization."
|
||||
});
|
||||
}
|
||||
|
||||
return { projectFavorites: orgMembership.projectFavorites || [] };
|
||||
};
|
||||
|
||||
const updateUserProjectFavorites = async (userId: string, orgId: string, projectIds: string[]) => {
|
||||
const orgMembership = await orgMembershipDAL.findOne({
|
||||
userId,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (!orgMembership) {
|
||||
throw new BadRequestError({
|
||||
message: "User does not belong in the organization."
|
||||
});
|
||||
}
|
||||
|
||||
const matchingUserProjectMemberships = await projectMembershipDAL.find({
|
||||
userId,
|
||||
$in: {
|
||||
projectId: projectIds
|
||||
}
|
||||
});
|
||||
|
||||
const memberProjectFavorites = matchingUserProjectMemberships.map(
|
||||
(projectMembership) => projectMembership.projectId
|
||||
);
|
||||
|
||||
const updatedOrgMembership = await orgMembershipDAL.updateById(orgMembership.id, {
|
||||
projectFavorites: memberProjectFavorites
|
||||
});
|
||||
|
||||
return updatedOrgMembership.projectFavorites;
|
||||
};
|
||||
|
||||
return {
|
||||
sendEmailVerificationCode,
|
||||
verifyEmailVerificationCode,
|
||||
@@ -258,6 +306,8 @@ export const userServiceFactory = ({
|
||||
createUserAction,
|
||||
getUserAction,
|
||||
unlockUser,
|
||||
getUserPrivateKey
|
||||
getUserPrivateKey,
|
||||
getUserProjectFavorites,
|
||||
updateUserProjectFavorites
|
||||
};
|
||||
};
|
||||
|
@@ -22,7 +22,7 @@ export const webhookDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: Partial<TWebhooks>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await webhookFindQuery(tx || db, filter);
|
||||
const docs = await webhookFindQuery(tx || db.replicaNode(), filter);
|
||||
return docs.map(({ envId, envSlug, envName, ...el }) => ({
|
||||
...el,
|
||||
envId,
|
||||
@@ -39,7 +39,7 @@ export const webhookDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOne = async (filter: Partial<TWebhooks>, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await webhookFindQuery(tx || db, filter).first();
|
||||
const doc = await webhookFindQuery(tx || db.replicaNode(), filter).first();
|
||||
if (!doc) return;
|
||||
|
||||
const { envName: name, envSlug: slug, envId: id, ...el } = doc;
|
||||
@@ -51,7 +51,7 @@ export const webhookDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await webhookFindQuery(tx || db, {
|
||||
const doc = await webhookFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.Webhook}.id` as "id"]: id
|
||||
}).first();
|
||||
if (!doc) return;
|
||||
@@ -65,7 +65,7 @@ export const webhookDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findAllWebhooks = async (projectId: string, environment?: string, secretPath?: string, tx?: Knex) => {
|
||||
try {
|
||||
const webhooks = await (tx || db)(TableName.Webhook)
|
||||
const webhooks = await (tx || db.replicaNode())(TableName.Webhook)
|
||||
.where(`${TableName.Environment}.projectId`, projectId)
|
||||
.where((qb) => {
|
||||
if (environment) {
|
||||
|
@@ -4,55 +4,63 @@ import { AxiosError } from "axios";
|
||||
import picomatch from "picomatch";
|
||||
|
||||
import { SecretKeyEncoding, TWebhooks } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { decryptSymmetric, decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { TWebhookDALFactory } from "./webhook-dal";
|
||||
import { WebhookType } from "./webhook-types";
|
||||
|
||||
const WEBHOOK_TRIGGER_TIMEOUT = 15 * 1000;
|
||||
export const triggerWebhookRequest = async (
|
||||
{ url, encryptedSecretKey, iv, tag, keyEncoding }: TWebhooks,
|
||||
data: Record<string, unknown>
|
||||
) => {
|
||||
const headers: Record<string, string> = {};
|
||||
const payload = { ...data, timestamp: Date.now() };
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const decryptWebhookDetails = (webhook: TWebhooks) => {
|
||||
const { keyEncoding, iv, encryptedSecretKey, tag, urlCipherText, urlIV, urlTag, url } = webhook;
|
||||
|
||||
let decryptedSecretKey = "";
|
||||
let decryptedUrl = url;
|
||||
|
||||
if (encryptedSecretKey) {
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY;
|
||||
const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY;
|
||||
let secretKey;
|
||||
if (rootEncryptionKey && keyEncoding === SecretKeyEncoding.BASE64) {
|
||||
// case: encoding scheme is base64
|
||||
secretKey = decryptSymmetric({
|
||||
ciphertext: encryptedSecretKey,
|
||||
iv: iv as string,
|
||||
tag: tag as string,
|
||||
key: rootEncryptionKey
|
||||
});
|
||||
} else if (encryptionKey && keyEncoding === SecretKeyEncoding.UTF8) {
|
||||
// case: encoding scheme is utf8
|
||||
secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: encryptedSecretKey,
|
||||
iv: iv as string,
|
||||
tag: tag as string,
|
||||
key: encryptionKey
|
||||
});
|
||||
}
|
||||
if (secretKey) {
|
||||
const webhookSign = crypto.createHmac("sha256", secretKey).update(JSON.stringify(payload)).digest("hex");
|
||||
headers["x-infisical-signature"] = `t=${payload.timestamp};${webhookSign}`;
|
||||
}
|
||||
decryptedSecretKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: encryptedSecretKey,
|
||||
iv: iv as string,
|
||||
tag: tag as string
|
||||
});
|
||||
}
|
||||
|
||||
if (urlCipherText) {
|
||||
decryptedUrl = infisicalSymmetricDecrypt({
|
||||
keyEncoding: keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: urlCipherText,
|
||||
iv: urlIV as string,
|
||||
tag: urlTag as string
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
secretKey: decryptedSecretKey,
|
||||
url: decryptedUrl
|
||||
};
|
||||
};
|
||||
|
||||
export const triggerWebhookRequest = async (webhook: TWebhooks, data: Record<string, unknown>) => {
|
||||
const headers: Record<string, string> = {};
|
||||
const payload = { ...data, timestamp: Date.now() };
|
||||
const { secretKey, url } = decryptWebhookDetails(webhook);
|
||||
|
||||
if (secretKey) {
|
||||
const webhookSign = crypto.createHmac("sha256", secretKey).update(JSON.stringify(payload)).digest("hex");
|
||||
headers["x-infisical-signature"] = `t=${payload.timestamp};${webhookSign}`;
|
||||
}
|
||||
|
||||
const req = await request.post(url, payload, {
|
||||
headers,
|
||||
timeout: WEBHOOK_TRIGGER_TIMEOUT,
|
||||
signal: AbortSignal.timeout(WEBHOOK_TRIGGER_TIMEOUT)
|
||||
});
|
||||
|
||||
return req;
|
||||
};
|
||||
|
||||
@@ -60,15 +68,48 @@ export const getWebhookPayload = (
|
||||
eventName: string,
|
||||
workspaceId: string,
|
||||
environment: string,
|
||||
secretPath?: string
|
||||
) => ({
|
||||
event: eventName,
|
||||
project: {
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath
|
||||
secretPath?: string,
|
||||
type?: string | null
|
||||
) => {
|
||||
switch (type) {
|
||||
case WebhookType.SLACK:
|
||||
return {
|
||||
text: "A secret value has been added or modified.",
|
||||
attachments: [
|
||||
{
|
||||
color: "#E7F256",
|
||||
fields: [
|
||||
{
|
||||
title: "Workspace ID",
|
||||
value: workspaceId,
|
||||
short: false
|
||||
},
|
||||
{
|
||||
title: "Environment",
|
||||
value: environment,
|
||||
short: false
|
||||
},
|
||||
{
|
||||
title: "Secret Path",
|
||||
value: secretPath,
|
||||
short: false
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
case WebhookType.GENERAL:
|
||||
default:
|
||||
return {
|
||||
event: eventName,
|
||||
project: {
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export type TFnTriggerWebhookDTO = {
|
||||
projectId: string;
|
||||
@@ -95,9 +136,10 @@ export const fnTriggerWebhook = async ({
|
||||
logger.info("Secret webhook job started", { environment, secretPath, projectId });
|
||||
const webhooksTriggered = await Promise.allSettled(
|
||||
toBeTriggeredHooks.map((hook) =>
|
||||
triggerWebhookRequest(hook, getWebhookPayload("secrets.modified", projectId, environment, secretPath))
|
||||
triggerWebhookRequest(hook, getWebhookPayload("secrets.modified", projectId, environment, secretPath, hook.type))
|
||||
)
|
||||
);
|
||||
|
||||
// filter hooks by status
|
||||
const successWebhooks = webhooksTriggered
|
||||
.filter(({ status }) => status === "fulfilled")
|
||||
|
@@ -1,15 +1,14 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, TWebhooksInsert } from "@app/db/schemas";
|
||||
import { TWebhooksInsert } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { encryptSymmetric, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { TWebhookDALFactory } from "./webhook-dal";
|
||||
import { getWebhookPayload, triggerWebhookRequest } from "./webhook-fns";
|
||||
import { decryptWebhookDetails, getWebhookPayload, triggerWebhookRequest } from "./webhook-fns";
|
||||
import {
|
||||
TCreateWebhookDTO,
|
||||
TDeleteWebhookDTO,
|
||||
@@ -36,7 +35,8 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer
|
||||
webhookUrl,
|
||||
environment,
|
||||
secretPath,
|
||||
webhookSecretKey
|
||||
webhookSecretKey,
|
||||
type
|
||||
}: TCreateWebhookDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@@ -50,30 +50,29 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer
|
||||
if (!env) throw new BadRequestError({ message: "Env not found" });
|
||||
|
||||
const insertDoc: TWebhooksInsert = {
|
||||
url: webhookUrl,
|
||||
url: "", // deprecated - we are moving away from plaintext URLs
|
||||
envId: env.id,
|
||||
isDisabled: false,
|
||||
secretPath: secretPath || "/"
|
||||
secretPath: secretPath || "/",
|
||||
type
|
||||
};
|
||||
|
||||
if (webhookSecretKey) {
|
||||
const appCfg = getConfig();
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY;
|
||||
const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY;
|
||||
if (rootEncryptionKey) {
|
||||
const { ciphertext, iv, tag } = encryptSymmetric(webhookSecretKey, rootEncryptionKey);
|
||||
insertDoc.encryptedSecretKey = ciphertext;
|
||||
insertDoc.iv = iv;
|
||||
insertDoc.tag = tag;
|
||||
insertDoc.algorithm = SecretEncryptionAlgo.AES_256_GCM;
|
||||
insertDoc.keyEncoding = SecretKeyEncoding.BASE64;
|
||||
} else if (encryptionKey) {
|
||||
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(webhookSecretKey, encryptionKey);
|
||||
insertDoc.encryptedSecretKey = ciphertext;
|
||||
insertDoc.iv = iv;
|
||||
insertDoc.tag = tag;
|
||||
insertDoc.algorithm = SecretEncryptionAlgo.AES_256_GCM;
|
||||
insertDoc.keyEncoding = SecretKeyEncoding.UTF8;
|
||||
}
|
||||
const { ciphertext, iv, tag, algorithm, encoding } = infisicalSymmetricEncypt(webhookSecretKey);
|
||||
insertDoc.encryptedSecretKey = ciphertext;
|
||||
insertDoc.iv = iv;
|
||||
insertDoc.tag = tag;
|
||||
insertDoc.algorithm = algorithm;
|
||||
insertDoc.keyEncoding = encoding;
|
||||
}
|
||||
|
||||
if (webhookUrl) {
|
||||
const { ciphertext, iv, tag, algorithm, encoding } = infisicalSymmetricEncypt(webhookUrl);
|
||||
insertDoc.urlCipherText = ciphertext;
|
||||
insertDoc.urlIV = iv;
|
||||
insertDoc.urlTag = tag;
|
||||
insertDoc.algorithm = algorithm;
|
||||
insertDoc.keyEncoding = encoding;
|
||||
}
|
||||
|
||||
const webhook = await webhookDAL.create(insertDoc);
|
||||
@@ -131,7 +130,7 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer
|
||||
try {
|
||||
await triggerWebhookRequest(
|
||||
webhook,
|
||||
getWebhookPayload("test", webhook.projectId, webhook.environment.slug, webhook.secretPath)
|
||||
getWebhookPayload("test", webhook.projectId, webhook.environment.slug, webhook.secretPath, webhook.type)
|
||||
);
|
||||
} catch (err) {
|
||||
webhookError = (err as Error).message;
|
||||
@@ -162,7 +161,14 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
|
||||
|
||||
return webhookDAL.findAllWebhooks(projectId, environment, secretPath);
|
||||
const webhooks = await webhookDAL.findAllWebhooks(projectId, environment, secretPath);
|
||||
return webhooks.map((w) => {
|
||||
const { url } = decryptWebhookDetails(w);
|
||||
return {
|
||||
...w,
|
||||
url
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -5,6 +5,7 @@ export type TCreateWebhookDTO = {
|
||||
secretPath?: string;
|
||||
webhookUrl: string;
|
||||
webhookSecretKey?: string;
|
||||
type: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateWebhookDTO = {
|
||||
@@ -24,3 +25,8 @@ export type TListWebhookDTO = {
|
||||
environment?: string;
|
||||
secretPath?: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export enum WebhookType {
|
||||
GENERAL = "general",
|
||||
SLACK = "slack"
|
||||
}
|
||||
|
@@ -885,7 +885,7 @@ func SetEncryptedSecrets(secretArgs []string, secretType string, environmentName
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
key := splitKeyValueFromArg[0]
|
||||
key := strings.TrimSpace(splitKeyValueFromArg[0])
|
||||
value := splitKeyValueFromArg[1]
|
||||
|
||||
hashedKey := fmt.Sprintf("%x", sha256.Sum256([]byte(key)))
|
||||
|
@@ -10,4 +10,8 @@ To request time off, just submit a request in Rippling and let Maidul know at le
|
||||
|
||||
## National holidays
|
||||
|
||||
Since Infisical's team is globally distributed, it is hard for us to keep track of all the various national holidays across many different countries. Whether you'd like to celebrate Christmas or National Brisket Day (which, by the way, is on May 28th), you are welcome to take PTO on those days – just let Maidul know at least a week ahead so that we can adjust our planning.
|
||||
Since Infisical's team is globally distributed, it is hard for us to keep track of all the various national holidays across many different countries. Whether you'd like to celebrate Christmas or National Brisket Day (which, by the way, is on May 28th), you are welcome to take PTO on those days – just let Maidul know at least a week ahead so that we can adjust our planning.
|
||||
|
||||
## Winter Break
|
||||
|
||||
Every year, Infisical team goes on a company-wide vacation during winter holidays. This year, the winter break period starts on December 21st, 2024 and ends on January 5th, 2025. You should expect to do no scheduled work during this period, but we will have a rotation process for [high and urgent service disruptions](https://infisical.com/sla).
|
@@ -64,5 +64,10 @@
|
||||
],
|
||||
"integrations": {
|
||||
"intercom": "hsg644ru"
|
||||
},
|
||||
"analytics": {
|
||||
"koala": {
|
||||
"publicApiKey": "pk_b50d7184e0e39ddd5cdb43cf6abeadd9b97d"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@
|
||||
|
||||
#sidebar {
|
||||
left: 0;
|
||||
padding-left: 48px;
|
||||
padding-right: 30px;
|
||||
border-right: 1px;
|
||||
border-color: #cdd64b;
|
||||
@@ -18,6 +17,10 @@
|
||||
border-right: 1px solid #ebebeb;
|
||||
}
|
||||
|
||||
#sidebar-content {
|
||||
padding-left: 2rem;
|
||||
}
|
||||
|
||||
#sidebar .relative .sticky {
|
||||
opacity: 0;
|
||||
}
|
||||
|
191
docker-compose.dev-read-replica.yml
Normal file
191
docker-compose.dev-read-replica.yml
Normal file
@@ -0,0 +1,191 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
nginx:
|
||||
container_name: infisical-dev-nginx
|
||||
image: nginx
|
||||
restart: always
|
||||
ports:
|
||||
- 8080:80
|
||||
volumes:
|
||||
- ./nginx/default.dev.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
depends_on:
|
||||
- backend
|
||||
- frontend
|
||||
|
||||
db:
|
||||
image: bitnami/postgresql:14
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRESQL_PASSWORD: infisical
|
||||
POSTGRESQL_USERNAME: infisical
|
||||
POSTGRESQL_DATABASE: infisical
|
||||
POSTGRESQL_REPLICATION_MODE: master
|
||||
POSTGRESQL_REPLICATION_USER: repl_user
|
||||
POSTGRESQL_REPLICATION_PASSWORD: repl_password
|
||||
POSTGRESQL_SYNCHRONOUS_COMMIT_MODE: on
|
||||
POSTGRESQL_NUM_SYNCHRONOUS_REPLICAS: 1
|
||||
|
||||
db-slave:
|
||||
image: bitnami/postgresql:14
|
||||
ports:
|
||||
- "5433:5432"
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRESQL_PASSWORD: infisical
|
||||
POSTGRESQL_USERNAME: infisical
|
||||
POSTGRESQL_DATABASE: infisical
|
||||
POSTGRESQL_REPLICATION_MODE: slave
|
||||
POSTGRESQL_REPLICATION_USER: repl_user
|
||||
POSTGRESQL_REPLICATION_PASSWORD: repl_password
|
||||
POSTGRESQL_MASTER_HOST: db
|
||||
POSTGRESQL_MASTER_PORT_NUMBER: 5432
|
||||
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
container_name: infisical-dev-redis
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
ports:
|
||||
- 6379:6379
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
redis-commander:
|
||||
container_name: infisical-dev-redis-commander
|
||||
image: rediscommander/redis-commander
|
||||
restart: always
|
||||
depends_on:
|
||||
- redis
|
||||
environment:
|
||||
- REDIS_HOSTS=local:redis:6379
|
||||
ports:
|
||||
- "8085:8081"
|
||||
|
||||
db-test:
|
||||
profiles: ["test"]
|
||||
image: postgres:14-alpine
|
||||
ports:
|
||||
- "5430:5432"
|
||||
environment:
|
||||
POSTGRES_PASSWORD: infisical
|
||||
POSTGRES_USER: infisical
|
||||
POSTGRES_DB: infisical-test
|
||||
|
||||
db-migration:
|
||||
container_name: infisical-db-migration
|
||||
depends_on:
|
||||
- db
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile.dev
|
||||
env_file: .env
|
||||
environment:
|
||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||
command: npm run migration:latest
|
||||
volumes:
|
||||
- ./backend/src:/app/src
|
||||
|
||||
backend:
|
||||
container_name: infisical-dev-api
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile.dev
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_started
|
||||
redis:
|
||||
condition: service_started
|
||||
db-migration:
|
||||
condition: service_completed_successfully
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 4000:4000
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||
- TELEMETRY_ENABLED=false
|
||||
volumes:
|
||||
- ./backend/src:/app/src
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
|
||||
frontend:
|
||||
container_name: infisical-dev-frontend
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- backend
|
||||
build:
|
||||
context: ./frontend
|
||||
dockerfile: Dockerfile.dev
|
||||
volumes:
|
||||
- ./frontend/src:/app/src/ # mounted whole src to avoid missing reload on new files
|
||||
- ./frontend/public:/app/public
|
||||
env_file: .env
|
||||
environment:
|
||||
- NEXT_PUBLIC_ENV=development
|
||||
- INFISICAL_TELEMETRY_ENABLED=false
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4
|
||||
restart: always
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: admin@example.com
|
||||
PGADMIN_DEFAULT_PASSWORD: pass
|
||||
ports:
|
||||
- 5050:80
|
||||
depends_on:
|
||||
- db
|
||||
|
||||
smtp-server:
|
||||
container_name: infisical-dev-smtp-server
|
||||
image: lytrax/mailhog:latest # https://github.com/mailhog/MailHog/issues/353#issuecomment-821137362
|
||||
restart: always
|
||||
logging:
|
||||
driver: "none" # disable saving logs
|
||||
ports:
|
||||
- 1025:1025 # SMTP server
|
||||
- 8025:8025 # Web UI
|
||||
|
||||
openldap: # note: more advanced configuration is available
|
||||
image: osixia/openldap:1.5.0
|
||||
restart: always
|
||||
environment:
|
||||
LDAP_ORGANISATION: Acme
|
||||
LDAP_DOMAIN: acme.com
|
||||
LDAP_ADMIN_PASSWORD: admin
|
||||
ports:
|
||||
- 389:389
|
||||
- 636:636
|
||||
volumes:
|
||||
- ldap_data:/var/lib/ldap
|
||||
- ldap_config:/etc/ldap/slapd.d
|
||||
profiles: [ldap]
|
||||
|
||||
phpldapadmin: # username: cn=admin,dc=acme,dc=com, pass is admin
|
||||
image: osixia/phpldapadmin:latest
|
||||
restart: always
|
||||
environment:
|
||||
- PHPLDAPADMIN_LDAP_HOSTS=openldap
|
||||
- PHPLDAPADMIN_HTTPS=false
|
||||
ports:
|
||||
- 6433:80
|
||||
depends_on:
|
||||
- openldap
|
||||
profiles: [ldap]
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
driver: local
|
||||
postgres-slave-data:
|
||||
driver: local
|
||||
redis_data:
|
||||
driver: local
|
||||
ldap_data:
|
||||
ldap_config:
|
118
docs/documentation/platform/dynamic-secrets/mssql.mdx
Normal file
118
docs/documentation/platform/dynamic-secrets/mssql.mdx
Normal file
@@ -0,0 +1,118 @@
|
||||
---
|
||||
title: "MS SQL"
|
||||
description: "How to dynamically generate MS SQL database users."
|
||||
---
|
||||
|
||||
The Infisical MS SQL dynamic secret allows you to generate Microsoft SQL server database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
Create a user with the required permission in your SQL instance. This user will be used to create new accounts on-demand.
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with MS SQL
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select `SQL Database`">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Service" type="string" required>
|
||||
Choose the service you want to generate dynamic secrets for. This must be selected as **MS SQL**.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Database host
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="number" required>
|
||||
Database port
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="User" type="string" required>
|
||||
Username that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
Password that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Database Name" type="string" required>
|
||||
Name of the database for which you want to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions).
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify SQL Statements">
|
||||
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certficate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete the lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user