mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-02 16:55:02 +00:00
Compare commits
190 Commits
doc/update
...
infisical/
Author | SHA1 | Date | |
---|---|---|---|
c3f8c55672 | |||
75aeef3897 | |||
c97fe77aec | |||
6bf4b4a380 | |||
9dedaa6779 | |||
8eab7d2f01 | |||
4e796e7e41 | |||
c6fa647825 | |||
496cebb08f | |||
33db6df7f2 | |||
88d25e97e9 | |||
4ad9fa1ad1 | |||
1642fb42d8 | |||
3983c2bc4a | |||
34d87ca30f | |||
12b6f27151 | |||
ea426e8b2d | |||
4d567f0b08 | |||
6548372e3b | |||
77af640c4c | |||
90f85152bc | |||
cfa8770bdc | |||
be8562824d | |||
7503876ca0 | |||
36b5a3dc90 | |||
dfe36f346f | |||
b1b61842c6 | |||
f9ca9b51b2 | |||
7e7e6ade5c | |||
4010817916 | |||
eea367c3bc | |||
860ebb73a9 | |||
56567ee7c9 | |||
1cd17a451c | |||
6b7bc2a3c4 | |||
cb52568ebd | |||
9d30fb3870 | |||
161ac5e097 | |||
bb5b585cf6 | |||
fa94191c40 | |||
6a5eabc411 | |||
c956a0f91f | |||
df7b55606e | |||
5f14b27f41 | |||
02b2395276 | |||
402fa2b0e0 | |||
3725241f52 | |||
10b457a695 | |||
3912e2082d | |||
7dd6eac20a | |||
5664e1ff26 | |||
a27a428329 | |||
b196251c19 | |||
b18d8d542f | |||
3c287600ab | |||
759d11ff21 | |||
2bd817765c | |||
7aa9c5dd00 | |||
b693c035ce | |||
c65a991943 | |||
3a3811cb3c | |||
332ca61f5d | |||
64f43e59d0 | |||
ccaf4c00af | |||
e3ba1c59bf | |||
ce0bc191d8 | |||
489ccb8e15 | |||
ae8f695b6f | |||
19357d4bd7 | |||
776d0a0fe1 | |||
85dec28667 | |||
21ea7dd317 | |||
57e214ef50 | |||
1986fe9617 | |||
1309f30af9 | |||
89a4fc91ca | |||
af0ec2400d | |||
770e73e40b | |||
39fdeabdea | |||
25c26f2cde | |||
1ca8b9ba08 | |||
14d9fe01e0 | |||
216810f289 | |||
f530b78eb8 | |||
c3809ed22b | |||
9f85d8bba1 | |||
1056645ee3 | |||
5e9914b738 | |||
1ea52e6a80 | |||
20da697de8 | |||
16abf48081 | |||
e73ae485bc | |||
621f73e223 | |||
93e69bd34e | |||
e382135384 | |||
f2a554b5fd | |||
df5bdf3773 | |||
8401048daf | |||
335a87d856 | |||
1add9dd965 | |||
df46daf93d | |||
f82f7ae8d0 | |||
8536a1c987 | |||
b3cf43b46d | |||
9d4dbb63ae | |||
9c6f23fba6 | |||
babe483ca9 | |||
38ede687cd | |||
5f465c4832 | |||
a0618086b0 | |||
9a9bb4ca43 | |||
b68ddfae1b | |||
7646670378 | |||
d18be0f74c | |||
ec96db3503 | |||
7245aaa9ec | |||
d32f69e052 | |||
726477e3d7 | |||
a4ca996a1b | |||
303312fe91 | |||
f3f2879d6d | |||
d0f3d96b3e | |||
70d2a21fbc | |||
418ae42d94 | |||
273c6b3842 | |||
6be8d5d2a7 | |||
9eb7640755 | |||
741138c4bd | |||
bed620aad0 | |||
2ddf75d2e6 | |||
02d9dbb987 | |||
0ed333c2b2 | |||
55db45cd36 | |||
2d82273158 | |||
b3e61f579d | |||
d0bcbe15c6 | |||
657130eb80 | |||
3841394eb7 | |||
b1ba770a71 | |||
3552119c7d | |||
7a46725523 | |||
0515c994c7 | |||
e0d0e22e39 | |||
2f79ae42ab | |||
3bc39c6cec | |||
b5b1e57fe7 | |||
1a5f66fe46 | |||
a01f235808 | |||
b9a1629db0 | |||
203422c131 | |||
35826c288e | |||
fae4e1fa55 | |||
8094ef607a | |||
104bff0586 | |||
0fb5fa0c8b | |||
f407022e16 | |||
34d6525418 | |||
911479baff | |||
05bdbbf59d | |||
c8e47771d4 | |||
e0cbcb0318 | |||
f8d65f44e3 | |||
58ce623a2c | |||
7ae28596ec | |||
833398ef39 | |||
4e6ebcc8d9 | |||
ce8689f568 | |||
e9ab19b7f9 | |||
f2b852a09e | |||
a1c2bc695c | |||
00573ebfda | |||
3b2b8ca013 | |||
2afc6b133e | |||
b6a1ab2376 | |||
d03f890471 | |||
5ef81cd935 | |||
3e8f1d8de7 | |||
558a809b4c | |||
a749e70815 | |||
6f44f3ae21 | |||
b062ca3075 | |||
a1397f0a66 | |||
91c11d61f1 | |||
93218d5a3f | |||
d6ffd4fa5f | |||
1c32dd5d8a | |||
c183ef2b4f | |||
b6955d0e9b | |||
f4ba441ec3 | |||
0f314c45b4 |
@ -78,3 +78,5 @@ PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=
|
||||
|
@ -7,12 +7,12 @@ permissions:
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
name: Integration tests
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
@ -104,8 +104,8 @@ jobs:
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-postgres-deployment:
|
||||
name: Deploy to production
|
||||
production-us:
|
||||
name: US production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
@ -159,3 +159,54 @@ jobs:
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-eu:
|
||||
name: EU production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [production-us]
|
||||
environment:
|
||||
name: production-eu
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
@ -34,7 +34,7 @@ describe("Identity v1", async () => {
|
||||
test("Create identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
@ -42,7 +42,7 @@ describe("Identity v1", async () => {
|
||||
test("Update identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
|
||||
const updatedIdentity = await testServer.inject({
|
||||
method: "PATCH",
|
||||
|
@ -118,9 +118,9 @@ describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
@ -173,9 +173,9 @@ describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
@ -343,9 +343,9 @@ describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
|
@ -56,7 +56,10 @@ describe("Secret expansion", () => {
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
@ -123,7 +126,10 @@ describe("Secret expansion", () => {
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
@ -190,7 +196,11 @@ describe("Secret expansion", () => {
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
@ -275,7 +285,11 @@ describe("Secret expansion", () => {
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
|
3323
backend/package-lock.json
generated
3323
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -44,7 +44,7 @@
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
@ -156,11 +156,12 @@
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
"google-auth-library": "^9.9.0",
|
||||
"googleapis": "^137.1.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
@ -195,6 +196,7 @@
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"sjcl": "^1.0.8",
|
||||
"smee-client": "^2.0.0",
|
||||
"snowflake-sdk": "^1.14.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
2
backend/src/@types/fastify.d.ts
vendored
@ -13,6 +13,7 @@ import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secr
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
@ -177,6 +178,7 @@ declare module "fastify" {
|
||||
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
|
4
backend/src/@types/hdb.d.ts
vendored
Normal file
4
backend/src/@types/hdb.d.ts
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
declare module "hdb" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function createClient(options): any;
|
||||
}
|
@ -9,7 +9,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("integration").notNullable();
|
||||
t.string("teamId"); // vercel-specific
|
||||
t.string("url"); // for self hosted
|
||||
t.string("url"); // for self-hosted
|
||||
t.string("namespace"); // hashicorp specific
|
||||
t.string("accountId"); // netlify
|
||||
t.text("refreshCiphertext");
|
||||
@ -36,7 +36,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.createTable(TableName.Integration, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.boolean("isActive").notNullable();
|
||||
t.string("url"); // self hosted
|
||||
t.string("url"); // self-hosted
|
||||
t.string("app"); // name of app in provider
|
||||
t.string("appId");
|
||||
t.string("targetEnvironment");
|
||||
|
@ -4,27 +4,40 @@ import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret");
|
||||
const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("iv").nullable().alter();
|
||||
t.string("tag").nullable().alter();
|
||||
t.string("encryptedValue").nullable().alter();
|
||||
|
||||
t.binary("encryptedSecret").nullable();
|
||||
if (!hasEncryptedSecret) {
|
||||
t.binary("encryptedSecret").nullable();
|
||||
}
|
||||
t.string("hashedHex").nullable().alter();
|
||||
|
||||
t.string("identifier", 64).nullable();
|
||||
t.unique("identifier");
|
||||
t.index("identifier");
|
||||
if (!hasIdentifier) {
|
||||
t.string("identifier", 64).nullable();
|
||||
t.unique("identifier");
|
||||
t.index("identifier");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret");
|
||||
const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier");
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("encryptedSecret");
|
||||
if (hasEncryptedSecret) {
|
||||
t.dropColumn("encryptedSecret");
|
||||
}
|
||||
|
||||
t.dropColumn("identifier");
|
||||
if (hasIdentifier) {
|
||||
t.dropColumn("identifier");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -7,15 +7,18 @@ export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
|
||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
|
||||
|
||||
// drop constraint if exists (won't exist if rolled back, see below)
|
||||
await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex);
|
||||
|
||||
// projectId for CMEK functionality
|
||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
||||
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
if (!hasProjectId) {
|
||||
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
}
|
||||
|
||||
if (hasOrgId) {
|
||||
if (hasOrgId && hasSlug) {
|
||||
table.unique(["orgId", "projectId", "slug"]);
|
||||
}
|
||||
|
||||
@ -30,6 +33,7 @@ export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
|
||||
const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name");
|
||||
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
|
||||
|
||||
// remove projectId for CMEK functionality
|
||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
||||
@ -40,7 +44,9 @@ export async function down(knex: Knex): Promise<void> {
|
||||
if (hasOrgId) {
|
||||
table.dropUnique(["orgId", "projectId", "slug"]);
|
||||
}
|
||||
table.dropColumn("projectId");
|
||||
if (hasProjectId) {
|
||||
table.dropColumn("projectId");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,101 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { packRules, unpackRules } from "@casl/ability/extra";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import {
|
||||
backfillPermissionV1SchemaToV2Schema,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const CHUNK_SIZE = 1000;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version");
|
||||
if (!hasVersion) {
|
||||
await knex.schema.alterTable(TableName.ProjectRoles, (t) => {
|
||||
t.integer("version").defaultTo(1).notNullable();
|
||||
});
|
||||
|
||||
const docs = await knex(TableName.ProjectRoles).select("*");
|
||||
const updatedDocs = docs
|
||||
.filter((i) => {
|
||||
const permissionString = JSON.stringify(i.permissions || []);
|
||||
return (
|
||||
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
|
||||
!permissionString.includes(ProjectPermissionSub.DynamicSecrets)
|
||||
);
|
||||
})
|
||||
.map((el) => ({
|
||||
...el,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions), true)))
|
||||
}));
|
||||
if (updatedDocs.length) {
|
||||
for (let i = 0; i < updatedDocs.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedDocs.slice(i, i + CHUNK_SIZE);
|
||||
await knex(TableName.ProjectRoles).insert(chunk).onConflict("id").merge();
|
||||
}
|
||||
}
|
||||
|
||||
// secret permission is split into multiple ones like secrets, folders, imports and dynamic-secrets
|
||||
// so we just find all the privileges with respective mapping and map it as needed
|
||||
const identityPrivileges = await knex(TableName.IdentityProjectAdditionalPrivilege).select("*");
|
||||
const updatedIdentityPrivilegesDocs = identityPrivileges
|
||||
.filter((i) => {
|
||||
const permissionString = JSON.stringify(i.permissions || []);
|
||||
return (
|
||||
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
|
||||
!permissionString.includes(ProjectPermissionSub.DynamicSecrets) &&
|
||||
!permissionString.includes(ProjectPermissionSub.SecretFolders)
|
||||
);
|
||||
})
|
||||
.map((el) => ({
|
||||
...el,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions))))
|
||||
}));
|
||||
if (updatedIdentityPrivilegesDocs.length) {
|
||||
for (let i = 0; i < updatedIdentityPrivilegesDocs.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedIdentityPrivilegesDocs.slice(i, i + CHUNK_SIZE);
|
||||
await knex(TableName.IdentityProjectAdditionalPrivilege).insert(chunk).onConflict("id").merge();
|
||||
}
|
||||
}
|
||||
|
||||
const userPrivileges = await knex(TableName.ProjectUserAdditionalPrivilege).select("*");
|
||||
const updatedUserPrivilegeDocs = userPrivileges
|
||||
.filter((i) => {
|
||||
const permissionString = JSON.stringify(i.permissions || []);
|
||||
return (
|
||||
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
|
||||
!permissionString.includes(ProjectPermissionSub.DynamicSecrets) &&
|
||||
!permissionString.includes(ProjectPermissionSub.SecretFolders)
|
||||
);
|
||||
})
|
||||
.map((el) => ({
|
||||
...el,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions))))
|
||||
}));
|
||||
if (docs.length) {
|
||||
for (let i = 0; i < updatedUserPrivilegeDocs.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedUserPrivilegeDocs.slice(i, i + CHUNK_SIZE);
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege).insert(chunk).onConflict("id").merge();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version");
|
||||
if (hasVersion) {
|
||||
await knex.schema.alterTable(TableName.ProjectRoles, (t) => {
|
||||
t.dropColumn("version");
|
||||
});
|
||||
|
||||
// permission change can be ignored
|
||||
}
|
||||
}
|
@ -0,0 +1,76 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 30_000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
|
||||
if (!hasAuthMethodColumnAccessToken) {
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
t.string("authMethod").nullable();
|
||||
});
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
let totalUpdated = 0;
|
||||
|
||||
do {
|
||||
const batchIds = nullableAccessTokens.map((token) => token.id);
|
||||
|
||||
// ! Update the auth method column in batches for the current batch
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.whereIn("id", batchIds)
|
||||
.update({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore because generate schema happens after this
|
||||
authMethod: knex(TableName.Identity)
|
||||
.select("authMethod")
|
||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||
.whereNotNull("authMethod")
|
||||
.first()
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
|
||||
totalUpdated += batchIds.length;
|
||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||
} while (nullableAccessTokens.length > 0);
|
||||
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.whereNotExists((queryBuilder) => {
|
||||
void queryBuilder
|
||||
.select("id")
|
||||
.from(TableName.Identity)
|
||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||
.whereNotNull("authMethod");
|
||||
})
|
||||
.delete();
|
||||
|
||||
// Finally we set the authMethod to notNullable after populating the column.
|
||||
// This will fail if the data is not populated correctly, so it's safe.
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
t.string("authMethod").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ! We aren't dropping the authMethod column from the Identity itself, because we wan't to be able to easily rollback for the time being.
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
|
||||
if (hasAuthMethodColumnAccessToken) {
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
t.dropColumn("authMethod");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const config = { transaction: false };
|
||||
export { config };
|
@ -20,7 +20,8 @@ export const IdentityAccessTokensSchema = z.object({
|
||||
identityId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
name: z.string().nullable().optional()
|
||||
name: z.string().nullable().optional(),
|
||||
authMethod: z.string()
|
||||
});
|
||||
|
||||
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;
|
||||
|
@ -189,7 +189,7 @@ export enum ProjectUpgradeStatus {
|
||||
|
||||
export enum IdentityAuthMethod {
|
||||
TOKEN_AUTH = "token-auth",
|
||||
Univeral = "universal-auth",
|
||||
UNIVERSAL_AUTH = "universal-auth",
|
||||
KUBERNETES_AUTH = "kubernetes-auth",
|
||||
GCP_AUTH = "gcp-auth",
|
||||
AWS_AUTH = "aws-auth",
|
||||
|
@ -16,7 +16,7 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
// @ts-ignore
|
||||
id: seedData1.machineIdentity.id,
|
||||
name: seedData1.machineIdentity.name,
|
||||
authMethod: IdentityAuthMethod.Univeral
|
||||
authMethod: IdentityAuthMethod.UNIVERSAL_AUTH
|
||||
}
|
||||
]);
|
||||
const identityUa = await knex(TableName.IdentityUniversalAuth)
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
||||
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
@ -79,7 +79,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
...req.body,
|
||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
||||
isTemporary: false,
|
||||
permissions: JSON.stringify(packRules(permission))
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
permissions: backfillPermissionV1SchemaToV2Schema(permission)
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
@ -159,7 +161,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
...req.body,
|
||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
||||
isTemporary: true,
|
||||
permissions: JSON.stringify(packRules(permission))
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
permissions: backfillPermissionV1SchemaToV2Schema(permission)
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
@ -244,7 +248,13 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
projectSlug: req.body.projectSlug,
|
||||
data: {
|
||||
...updatedInfo,
|
||||
permissions: permission ? JSON.stringify(packRules(permission)) : undefined
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
permissions: permission
|
||||
? // eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
backfillPermissionV1SchemaToV2Schema(permission)
|
||||
: undefined
|
||||
}
|
||||
});
|
||||
return { privilege };
|
||||
|
@ -81,9 +81,9 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
|
||||
await server.register(registerGroupRouter, { prefix: "/groups" });
|
||||
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
|
||||
await server.register(registerUserAdditionalPrivilegeRouter, { prefix: "/user-project-additional-privilege" });
|
||||
await server.register(
|
||||
async (privilegeRouter) => {
|
||||
await privilegeRouter.register(registerUserAdditionalPrivilegeRouter, { prefix: "/users" });
|
||||
await privilegeRouter.register(registerIdentityProjectAdditionalPrivilegeRouter, { prefix: "/identity" });
|
||||
},
|
||||
{ prefix: "/additional-privilege" }
|
||||
|
@ -3,12 +3,16 @@ import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { ProjectPermissionSchema } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
backfillPermissionV1SchemaToV2Schema,
|
||||
ProjectPermissionV1Schema
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ProjectRoleServiceIdentifierType } from "@app/services/project-role/project-role-types";
|
||||
|
||||
export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -43,11 +47,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
||||
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
role: SanitizedRoleSchemaV1
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -58,12 +62,16 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
projectSlug: req.params.projectSlug,
|
||||
filter: {
|
||||
type: ProjectRoleServiceIdentifierType.SLUG,
|
||||
projectSlug: req.params.projectSlug
|
||||
},
|
||||
data: {
|
||||
...req.body,
|
||||
permissions: JSON.stringify(packRules(req.body.permissions))
|
||||
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(req.body.permissions, true)))
|
||||
}
|
||||
});
|
||||
|
||||
return { role };
|
||||
}
|
||||
});
|
||||
@ -103,11 +111,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
role: SanitizedRoleSchemaV1
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -118,11 +126,12 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
projectSlug: req.params.projectSlug,
|
||||
roleId: req.params.roleId,
|
||||
data: {
|
||||
...req.body,
|
||||
permissions: req.body.permissions ? JSON.stringify(packRules(req.body.permissions)) : undefined
|
||||
permissions: req.body.permissions
|
||||
? JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(req.body.permissions, true)))
|
||||
: undefined
|
||||
}
|
||||
});
|
||||
return { role };
|
||||
@ -148,7 +157,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
role: SanitizedRoleSchemaV1
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -159,7 +168,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
projectSlug: req.params.projectSlug,
|
||||
roleId: req.params.roleId
|
||||
});
|
||||
return { role };
|
||||
@ -195,7 +203,10 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
projectSlug: req.params.projectSlug
|
||||
filter: {
|
||||
type: ProjectRoleServiceIdentifierType.SLUG,
|
||||
projectSlug: req.params.projectSlug
|
||||
}
|
||||
});
|
||||
return { roles };
|
||||
}
|
||||
@ -214,7 +225,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
role: SanitizedRoleSchemaV1
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -225,9 +236,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
projectSlug: req.params.projectSlug,
|
||||
filter: {
|
||||
type: ProjectRoleServiceIdentifierType.SLUG,
|
||||
projectSlug: req.params.projectSlug
|
||||
},
|
||||
roleSlug: req.params.slug
|
||||
});
|
||||
|
||||
return { role };
|
||||
}
|
||||
});
|
||||
|
@ -2,17 +2,18 @@ import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectUserAdditionalPrivilegeSchema } from "@app/db/schemas";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
|
||||
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/permanent",
|
||||
url: "/",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
@ -31,66 +32,30 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
})
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: ProjectUserAdditionalPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const privilege = await server.services.projectUserAdditionalPrivilege.create({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
...req.body,
|
||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
||||
isTemporary: false,
|
||||
permissions: JSON.stringify(req.body.permissions)
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/temporary",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
}),
|
||||
z.object({
|
||||
isTemporary: z.literal(true),
|
||||
temporaryMode: z
|
||||
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryRange),
|
||||
temporaryAccessStartTime: z
|
||||
.string()
|
||||
.datetime()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryAccessStartTime)
|
||||
})
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
temporaryMode: z
|
||||
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryRange),
|
||||
temporaryAccessStartTime: z
|
||||
.string()
|
||||
.datetime()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryAccessStartTime)
|
||||
])
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: ProjectUserAdditionalPrivilegeSchema
|
||||
privilege: SanitizedUserProjectAdditionalPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -101,10 +66,10 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
...req.body,
|
||||
slug: req.body.slug ? slugify(req.body.slug) : `privilege-${slugify(alphaNumericNanoId(12))}`,
|
||||
isTemporary: true,
|
||||
permissions: JSON.stringify(req.body.permissions)
|
||||
projectMembershipId: req.body.projectMembershipId,
|
||||
...req.body.type,
|
||||
slug: req.body.slug || slugify(alphaNumericNanoId(8)),
|
||||
permissions: req.body.permissions
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
@ -131,24 +96,31 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
isTemporary: z.boolean().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary),
|
||||
temporaryMode: z
|
||||
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange),
|
||||
temporaryAccessStartTime: z
|
||||
.string()
|
||||
.datetime()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryAccessStartTime)
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
isTemporary: z.literal(true).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary),
|
||||
temporaryMode: z
|
||||
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.refine((val) => typeof val === "undefined" || ms(val) > 0, "Temporary range must be a positive number")
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange),
|
||||
temporaryAccessStartTime: z
|
||||
.string()
|
||||
.datetime()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryAccessStartTime)
|
||||
})
|
||||
])
|
||||
})
|
||||
.partial(),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: ProjectUserAdditionalPrivilegeSchema
|
||||
privilege: SanitizedUserProjectAdditionalPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -160,7 +132,12 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
...req.body,
|
||||
permissions: req.body.permissions ? JSON.stringify(req.body.permissions) : undefined,
|
||||
...req.body.type,
|
||||
permissions: req.body.permissions
|
||||
? // eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
req.body.permissions
|
||||
: undefined,
|
||||
privilegeId: req.params.privilegeId
|
||||
});
|
||||
return { privilege };
|
||||
@ -179,7 +156,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: ProjectUserAdditionalPrivilegeSchema
|
||||
privilege: SanitizedUserProjectAdditionalPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -208,7 +185,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privileges: ProjectUserAdditionalPrivilegeSchema.array()
|
||||
privileges: SanitizedUserProjectAdditionalPrivilegeSchema.omit({ permissions: true }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -233,11 +210,11 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
privilegeId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.GET_BY_PRIVILEGEID.privilegeId)
|
||||
privilegeId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.GET_BY_PRIVILEGE_ID.privilegeId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: ProjectUserAdditionalPrivilegeSchema
|
||||
privilege: SanitizedUserProjectAdditionalPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -0,0 +1,305 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Add an additional privilege for identity.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
}),
|
||||
z.object({
|
||||
isTemporary: z.literal(true),
|
||||
temporaryMode: z
|
||||
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryMode),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryRange),
|
||||
temporaryAccessStartTime: z
|
||||
.string()
|
||||
.datetime()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryAccessStartTime)
|
||||
})
|
||||
])
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: SanitizedIdentityPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.create({
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
projectId: req.body.projectId,
|
||||
identityId: req.body.identityId,
|
||||
...req.body.type,
|
||||
slug: req.body.slug || slugify(alphaNumericNanoId(8)),
|
||||
permissions: req.body.permissions
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Update a specific identity privilege.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
isTemporary: z.literal(true).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary),
|
||||
temporaryMode: z
|
||||
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryMode),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.refine((val) => typeof val === "undefined" || ms(val) > 0, "Temporary range must be a positive number")
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryRange),
|
||||
temporaryAccessStartTime: z
|
||||
.string()
|
||||
.datetime()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryAccessStartTime)
|
||||
})
|
||||
])
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: SanitizedIdentityPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.updateById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
id: req.params.id,
|
||||
data: {
|
||||
...req.body,
|
||||
...req.body.type,
|
||||
permissions: req.body.permissions || undefined
|
||||
}
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Delete the specified identity privilege.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.DELETE.id)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: SanitizedIdentityPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.deleteById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.id
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Retrieve details of a specific privilege by id.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_ID.id)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: SanitizedIdentityPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.getPrivilegeDetailsById({
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.id
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/slug/:privilegeSlug",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Retrieve details of a specific privilege by slug.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
privilegeSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.slug)
|
||||
}),
|
||||
querystring: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.identityId),
|
||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.projectSlug)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privilege: SanitizedIdentityPrivilegeSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.getPrivilegeDetailsBySlug({
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
slug: req.params.privilegeSlug,
|
||||
...req.query
|
||||
});
|
||||
return { privilege };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List privileges for the specified identity by project.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.LIST.identityId),
|
||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.LIST.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
privileges: SanitizedIdentityPrivilegeSchema.omit({ permissions: true }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const privileges = await server.services.identityProjectAdditionalPrivilegeV2.listIdentityProjectPrivileges({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
return {
|
||||
privileges
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
16
backend/src/ee/routes/v2/index.ts
Normal file
16
backend/src/ee/routes/v2/index.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
|
||||
export const registerV2EERoutes = async (server: FastifyZodProvider) => {
|
||||
// org role starts with organization
|
||||
await server.register(
|
||||
async (projectRouter) => {
|
||||
await projectRouter.register(registerProjectRoleRouter);
|
||||
},
|
||||
{ prefix: "/workspace" }
|
||||
);
|
||||
|
||||
await server.register(registerIdentityProjectAdditionalPrivilegeRouter, {
|
||||
prefix: "/identity-project-additional-privilege"
|
||||
});
|
||||
};
|
242
backend/src/ee/routes/v2/project-role-router.ts
Normal file
242
backend/src/ee/routes/v2/project-role-router.ts
Normal file
@ -0,0 +1,242 @@
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ProjectRoleServiceIdentifierType } from "@app/services/project-role/project-role-types";
|
||||
|
||||
export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:projectId/roles",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Create a project role",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine(
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
})
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const role = await server.services.projectRole.createRole({
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
filter: {
|
||||
type: ProjectRoleServiceIdentifierType.ID,
|
||||
projectId: req.params.projectId
|
||||
},
|
||||
data: {
|
||||
...req.body,
|
||||
permissions: JSON.stringify(packRules(req.body.permissions))
|
||||
}
|
||||
});
|
||||
return { role };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:projectId/roles/:roleId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Update a project role",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECT_ROLE.UPDATE.projectId),
|
||||
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.optional()
|
||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||
.refine(
|
||||
(val) =>
|
||||
typeof val === "undefined" ||
|
||||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const role = await server.services.projectRole.updateRole({
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
roleId: req.params.roleId,
|
||||
data: {
|
||||
...req.body,
|
||||
permissions: req.body.permissions ? JSON.stringify(packRules(req.body.permissions)) : undefined
|
||||
}
|
||||
});
|
||||
return { role };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:projectId/roles/:roleId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Delete a project role",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECT_ROLE.DELETE.projectId),
|
||||
roleId: z.string().trim().describe(PROJECT_ROLE.DELETE.roleId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const role = await server.services.projectRole.deleteRole({
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
roleId: req.params.roleId
|
||||
});
|
||||
return { role };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/roles",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List project role",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECT_ROLE.LIST.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
roles: ProjectRolesSchema.omit({ permissions: true }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const roles = await server.services.projectRole.listRoles({
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
filter: {
|
||||
type: ProjectRoleServiceIdentifierType.ID,
|
||||
projectId: req.params.projectId
|
||||
}
|
||||
});
|
||||
return { roles };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/roles/slug/:roleSlug",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECT_ROLE.GET_ROLE_BY_SLUG.projectId),
|
||||
roleSlug: z.string().trim().describe(PROJECT_ROLE.GET_ROLE_BY_SLUG.roleSlug)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const role = await server.services.projectRole.getRoleBySlug({
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
filter: {
|
||||
type: ProjectRoleServiceIdentifierType.ID,
|
||||
projectId: req.params.projectId
|
||||
},
|
||||
roleSlug: req.params.roleSlug
|
||||
});
|
||||
return { role };
|
||||
}
|
||||
});
|
||||
};
|
@ -14,7 +14,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const accessApprovalPolicyFindQuery = async (
|
||||
tx: Knex,
|
||||
filter: TFindFilter<TAccessApprovalPolicies>,
|
||||
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
|
||||
customFilter?: {
|
||||
policyId?: string;
|
||||
}
|
||||
|
@ -1,36 +0,0 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TIsApproversValid } from "./access-approval-policy-types";
|
||||
|
||||
export const isApproversValid = async ({
|
||||
userIds,
|
||||
projectId,
|
||||
orgId,
|
||||
envSlug,
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
permissionService
|
||||
}: TIsApproversValid) => {
|
||||
try {
|
||||
for await (const userId of userIds) {
|
||||
const { permission: approverPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
userId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(approverPermission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: envSlug, secretPath })
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
@ -11,7 +11,6 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
|
||||
import { isApproversValid } from "./access-approval-policy-fns";
|
||||
import {
|
||||
ApproverType,
|
||||
TCreateAccessApprovalPolicy,
|
||||
@ -134,22 +133,6 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
.map((user) => user.id);
|
||||
verifyAllApprovers.push(...verifyGroupApprovers);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
projectId: project.id,
|
||||
orgId: actorOrgId,
|
||||
envSlug: environment,
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: verifyAllApprovers
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await accessApprovalPolicyDAL.create(
|
||||
{
|
||||
@ -293,22 +276,6 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id));
|
||||
}
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalPolicy.environment.slug,
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: userApproverIds
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
userApproverIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
@ -319,45 +286,6 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
}
|
||||
|
||||
if (groupApprovers) {
|
||||
const usersPromises: Promise<
|
||||
{
|
||||
id: string;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
isPartOfGroup: boolean;
|
||||
}[]
|
||||
>[] = [];
|
||||
|
||||
for (const groupId of groupApprovers) {
|
||||
usersPromises.push(
|
||||
groupDAL
|
||||
.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 })
|
||||
.then((group) => group.members)
|
||||
);
|
||||
}
|
||||
const verifyGroupApprovers = (await Promise.all(usersPromises))
|
||||
.flat()
|
||||
.filter((user) => user.isPartOfGroup)
|
||||
.map((user) => user.id);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalPolicy.environment.slug,
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: verifyGroupApprovers
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
groupApprovers.map((groupId) => ({
|
||||
approverGroupId: groupId,
|
||||
|
@ -17,7 +17,6 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-policy/access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal";
|
||||
import { isApproversValid } from "../access-approval-policy/access-approval-policy-fns";
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
@ -78,7 +77,6 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
permissionService,
|
||||
accessApprovalRequestDAL,
|
||||
accessApprovalRequestReviewerDAL,
|
||||
projectMembershipDAL,
|
||||
accessApprovalPolicyDAL,
|
||||
accessApprovalPolicyApproverDAL,
|
||||
additionalPrivilegeDAL,
|
||||
@ -331,22 +329,6 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" });
|
||||
}
|
||||
|
||||
const reviewerProjectMembership = await projectMembershipDAL.findById(membership.id);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalRequest.environment,
|
||||
secretPath: accessApprovalRequest.policy.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: [reviewerProjectMembership.userId]
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new ForbiddenRequestError({ message: "You don't have access to approve this request" });
|
||||
}
|
||||
|
||||
const existingReviews = await accessApprovalRequestReviewerDAL.find({ requestId: accessApprovalRequest.id });
|
||||
if (existingReviews.some((review) => review.status === ApprovalStatus.REJECTED)) {
|
||||
throw new BadRequestError({ message: "The request has already been rejected by another reviewer" });
|
||||
|
@ -4,7 +4,10 @@ import ms from "ms";
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@ -72,8 +75,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
@ -151,8 +154,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
@ -230,8 +233,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
@ -299,8 +302,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
@ -341,8 +344,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
|
20
backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts
Normal file
20
backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
export const verifyHostInputValidity = (host: string) => {
|
||||
const appCfg = getConfig();
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
};
|
@ -3,10 +3,13 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { OrderByDirection, ProjectServiceActor } from "@app/lib/types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@ -19,6 +22,7 @@ import {
|
||||
TDeleteDynamicSecretDTO,
|
||||
TDetailsDynamicSecretDTO,
|
||||
TGetDynamicSecretsCountDTO,
|
||||
TListDynamicSecretsByFolderMappingsDTO,
|
||||
TListDynamicSecretsDTO,
|
||||
TListDynamicSecretsMultiEnvDTO,
|
||||
TUpdateDynamicSecretDTO
|
||||
@ -77,8 +81,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.CreateRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
@ -148,8 +152,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.EditRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
@ -231,8 +235,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
@ -291,8 +295,12 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.EditRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
@ -340,8 +348,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
// verify user has access to each env in request
|
||||
environmentSlugs.forEach((environmentSlug) =>
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
)
|
||||
);
|
||||
}
|
||||
@ -380,8 +388,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
@ -428,8 +436,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
@ -447,8 +455,44 @@ export const dynamicSecretServiceFactory = ({
|
||||
return dynamicSecretCfg;
|
||||
};
|
||||
|
||||
const listDynamicSecretsByFolderIds = async (
|
||||
{ folderMappings, filters, projectId }: TListDynamicSecretsByFolderMappingsDTO,
|
||||
actor: ProjectServiceActor
|
||||
) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
projectId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
const userAccessibleFolderMappings = folderMappings.filter(({ path, environment }) =>
|
||||
permission.can(
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment, secretPath: path })
|
||||
)
|
||||
);
|
||||
|
||||
const groupedFolderMappings = new Map(userAccessibleFolderMappings.map((path) => [path.folderId, path]));
|
||||
|
||||
const dynamicSecrets = await dynamicSecretDAL.listDynamicSecretsByFolderIds({
|
||||
folderIds: userAccessibleFolderMappings.map(({ folderId }) => folderId),
|
||||
...filters
|
||||
});
|
||||
|
||||
return dynamicSecrets.map((dynamicSecret) => {
|
||||
const { environment, path } = groupedFolderMappings.get(dynamicSecret.folderId)!;
|
||||
return {
|
||||
...dynamicSecret,
|
||||
environment,
|
||||
path
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
// get dynamic secrets for multiple envs
|
||||
const listDynamicSecretsByFolderIds = async ({
|
||||
const listDynamicSecretsByEnvs = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
@ -471,8 +515,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
// verify user has access to each env in request
|
||||
environmentSlugs.forEach((environmentSlug) =>
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
)
|
||||
);
|
||||
}
|
||||
@ -514,9 +558,10 @@ export const dynamicSecretServiceFactory = ({
|
||||
deleteByName,
|
||||
getDetails,
|
||||
listDynamicSecretsByEnv,
|
||||
listDynamicSecretsByFolderIds,
|
||||
listDynamicSecretsByEnvs,
|
||||
getDynamicSecretCount,
|
||||
getCountMultiEnv,
|
||||
fetchAzureEntraIdUsers
|
||||
fetchAzureEntraIdUsers,
|
||||
listDynamicSecretsByFolderIds
|
||||
};
|
||||
};
|
||||
|
@ -48,17 +48,27 @@ export type TDetailsDynamicSecretDTO = {
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListDynamicSecretsDTO = {
|
||||
path: string;
|
||||
environmentSlug: string;
|
||||
projectSlug?: string;
|
||||
projectId?: string;
|
||||
export type ListDynamicSecretsFilters = {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: SecretsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
search?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
};
|
||||
|
||||
export type TListDynamicSecretsDTO = {
|
||||
path: string;
|
||||
environmentSlug: string;
|
||||
projectSlug?: string;
|
||||
projectId?: string;
|
||||
} & ListDynamicSecretsFilters &
|
||||
Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListDynamicSecretsByFolderMappingsDTO = {
|
||||
projectId: string;
|
||||
folderMappings: { folderId: string; path: string; environment: string }[];
|
||||
filters: ListDynamicSecretsFilters;
|
||||
};
|
||||
|
||||
export type TListDynamicSecretsMultiEnvDTO = Omit<
|
||||
TListDynamicSecretsDTO,
|
||||
|
@ -2,10 +2,9 @@ import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
@ -19,23 +18,8 @@ const generateUsername = () => {
|
||||
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
|
||||
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
@ -9,6 +11,7 @@ import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
@ -22,5 +25,7 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
|
||||
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider(),
|
||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider()
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
||||
});
|
||||
|
@ -166,6 +166,27 @@ export const DynamicSecretMongoDBSchema = z.object({
|
||||
)
|
||||
});
|
||||
|
||||
export const DynamicSecretSapHanaSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSnowflakeSchema = z.object({
|
||||
accountId: z.string().trim().min(1),
|
||||
orgId: z.string().trim().min(1),
|
||||
username: z.string().trim().min(1),
|
||||
password: z.string().trim().min(1),
|
||||
creationStatement: z.string().trim().min(1),
|
||||
revocationStatement: z.string().trim().min(1),
|
||||
renewStatement: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const AzureEntraIDSchema = z.object({
|
||||
tenantId: z.string().trim().min(1),
|
||||
userId: z.string().trim().min(1),
|
||||
@ -196,7 +217,9 @@ export enum DynamicSecretProviders {
|
||||
MongoDB = "mongo-db",
|
||||
RabbitMq = "rabbit-mq",
|
||||
AzureEntraID = "azure-entra-id",
|
||||
Ldap = "ldap"
|
||||
Ldap = "ldap",
|
||||
SapHana = "sap-hana",
|
||||
Snowflake = "snowflake"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
@ -204,13 +227,15 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@ -2,10 +2,9 @@ import { MongoClient } from "mongodb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
@ -19,22 +18,8 @@ const generateUsername = () => {
|
||||
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
|
@ -3,12 +3,11 @@ import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
@ -79,23 +78,8 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
|
||||
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
@ -3,11 +3,10 @@ import { Redis } from "ioredis";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
@ -51,22 +50,8 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
|
||||
|
||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1" || dbHost === providerInputs.host)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
|
174
backend/src/ee/services/dynamic-secret/providers/sap-hana.ts
Normal file
174
backend/src/ee/services/dynamic-secret/providers/sap-hana.ts
Normal file
@ -0,0 +1,174 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
|
||||
import handlebars from "handlebars";
|
||||
import hdb from "hdb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
...(providerInputs.ca
|
||||
? {
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
: {})
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
client.connect((err: any) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
if (client.readyState) {
|
||||
return resolve(true);
|
||||
}
|
||||
|
||||
reject(new Error("SAP HANA client not ready"));
|
||||
});
|
||||
});
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const testResult: boolean = await new Promise((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
reject();
|
||||
}
|
||||
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
|
||||
return testResult;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
client.disconnect();
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
174
backend/src/ee/services/dynamic-secret/providers/snowflake.ts
Normal file
174
backend/src/ee/services/dynamic-secret/providers/snowflake.ts
Normal file
@ -0,0 +1,174 @@
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import snowflake from "snowflake-sdk";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
// destroy client requires callback...
|
||||
const noop = () => {};
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix
|
||||
};
|
||||
|
||||
const getDaysToExpiry = (expiryDate: Date) => {
|
||||
const start = new Date().getTime();
|
||||
const end = new Date(expiryDate).getTime();
|
||||
const diffTime = Math.abs(end - start);
|
||||
|
||||
return Math.ceil(diffTime / (1000 * 60 * 60 * 24));
|
||||
};
|
||||
|
||||
export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const client = snowflake.createConnection({
|
||||
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
application: "Infisical"
|
||||
});
|
||||
|
||||
await client.connectAsync(noop);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
let isValidConnection: boolean;
|
||||
|
||||
try {
|
||||
isValidConnection = await Promise.race([
|
||||
client.isValidAsync(),
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000);
|
||||
}).then(() => {
|
||||
throw new BadRequestError({ message: "Unable to establish connection - verify credentials" });
|
||||
})
|
||||
]);
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return isValidConnection;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
try {
|
||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
client.execute({
|
||||
sqlText: creationStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "CreateLease", message: err.message }));
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
client.execute({
|
||||
sqlText: revokeStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "RevokeLease", message: err.message }));
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
if (!providerInputs.renewStatement) return { entityId: username };
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username,
|
||||
expiration
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
client.execute({
|
||||
sqlText: renewStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "RenewLease", message: err.message }));
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -3,11 +3,9 @@ import knex from "knex";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
@ -29,27 +27,8 @@ const generateUsername = (provider: SqlProviders) => {
|
||||
|
||||
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (
|
||||
providerInputs.host === "localhost" ||
|
||||
providerInputs.host === "127.0.0.1" ||
|
||||
// database infisical uses
|
||||
dbHost === providerInputs.host
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
|
@ -0,0 +1,12 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TIdentityProjectAdditionalPrivilegeV2DALFactory = ReturnType<
|
||||
typeof identityProjectAdditionalPrivilegeV2DALFactory
|
||||
>;
|
||||
|
||||
export const identityProjectAdditionalPrivilegeV2DALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.IdentityProjectAdditionalPrivilege);
|
||||
return orm;
|
||||
};
|
@ -0,0 +1,343 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2DALFactory } from "./identity-project-additional-privilege-v2-dal";
|
||||
import {
|
||||
IdentityProjectAdditionalPrivilegeTemporaryMode,
|
||||
TCreateIdentityPrivilegeDTO,
|
||||
TDeleteIdentityPrivilegeByIdDTO,
|
||||
TGetIdentityPrivilegeDetailsByIdDTO,
|
||||
TGetIdentityPrivilegeDetailsBySlugDTO,
|
||||
TListIdentityPrivilegesDTO,
|
||||
TUpdateIdentityPrivilegeByIdDTO
|
||||
} from "./identity-project-additional-privilege-v2-types";
|
||||
|
||||
type TIdentityProjectAdditionalPrivilegeV2ServiceFactoryDep = {
|
||||
identityProjectAdditionalPrivilegeDAL: TIdentityProjectAdditionalPrivilegeV2DALFactory;
|
||||
identityProjectDAL: Pick<TIdentityProjectDALFactory, "findOne" | "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
};
|
||||
|
||||
export type TIdentityProjectAdditionalPrivilegeV2ServiceFactory = ReturnType<
|
||||
typeof identityProjectAdditionalPrivilegeV2ServiceFactory
|
||||
>;
|
||||
|
||||
export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
identityProjectAdditionalPrivilegeDAL,
|
||||
identityProjectDAL,
|
||||
projectDAL,
|
||||
permissionService
|
||||
}: TIdentityProjectAdditionalPrivilegeV2ServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
slug,
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorOrgId,
|
||||
identityId,
|
||||
permissions: customPermission,
|
||||
actorAuthMethod,
|
||||
...dto
|
||||
}: TCreateIdentityPrivilegeDTO) => {
|
||||
const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId });
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
projectMembershipId: identityProjectMembership.id
|
||||
});
|
||||
if (existingSlug) throw new BadRequestError({ message: "Additional privilege with provided slug already exists" });
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
|
||||
projectMembershipId: identityProjectMembership.id,
|
||||
slug,
|
||||
permissions: packedPermission
|
||||
});
|
||||
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
}
|
||||
|
||||
const relativeTempAllocatedTimeInMs = ms(dto.temporaryRange);
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
|
||||
projectMembershipId: identityProjectMembership.id,
|
||||
slug,
|
||||
permissions: packedPermission,
|
||||
isTemporary: true,
|
||||
temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative,
|
||||
temporaryRange: dto.temporaryRange,
|
||||
temporaryAccessStartTime: new Date(dto.temporaryAccessStartTime),
|
||||
temporaryAccessEndTime: new Date(new Date(dto.temporaryAccessStartTime).getTime() + relativeTempAllocatedTimeInMs)
|
||||
});
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const updateById = async ({
|
||||
id,
|
||||
data,
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod
|
||||
}: TUpdateIdentityPrivilegeByIdDTO) => {
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findById(id);
|
||||
if (!identityPrivilege) throw new NotFoundError({ message: `Identity privilege with ${id} not found` });
|
||||
|
||||
const identityProjectMembership = await identityProjectDAL.findOne({ id: identityPrivilege.projectMembershipId });
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
if (data?.slug) {
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug: data.slug,
|
||||
projectMembershipId: identityProjectMembership.id
|
||||
});
|
||||
if (existingSlug && existingSlug.id !== identityPrivilege.id)
|
||||
throw new BadRequestError({ message: "Additional privilege with provided slug already exists" });
|
||||
}
|
||||
|
||||
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
|
||||
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
|
||||
if (isTemporary) {
|
||||
const temporaryAccessStartTime = data?.temporaryAccessStartTime || identityPrivilege?.temporaryAccessStartTime;
|
||||
const temporaryRange = data?.temporaryRange || identityPrivilege?.temporaryRange;
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.updateById(identityPrivilege.id, {
|
||||
slug: data.slug,
|
||||
permissions: packedPermission,
|
||||
isTemporary: data.isTemporary,
|
||||
temporaryRange: data.temporaryRange,
|
||||
temporaryMode: data.temporaryMode,
|
||||
temporaryAccessStartTime: new Date(temporaryAccessStartTime || ""),
|
||||
temporaryAccessEndTime: new Date(new Date(temporaryAccessStartTime || "").getTime() + ms(temporaryRange || ""))
|
||||
});
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
}
|
||||
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.updateById(identityPrivilege.id, {
|
||||
slug: data.slug,
|
||||
permissions: packedPermission,
|
||||
isTemporary: false,
|
||||
temporaryAccessStartTime: null,
|
||||
temporaryAccessEndTime: null,
|
||||
temporaryRange: null,
|
||||
temporaryMode: null
|
||||
});
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const deleteById = async ({ actorId, id, actor, actorOrgId, actorAuthMethod }: TDeleteIdentityPrivilegeByIdDTO) => {
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findById(id);
|
||||
if (!identityPrivilege) throw new NotFoundError({ message: `Identity privilege with ${id} not found` });
|
||||
|
||||
const identityProjectMembership = await identityProjectDAL.findOne({ id: identityPrivilege.projectMembershipId });
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id);
|
||||
return {
|
||||
...deletedPrivilege,
|
||||
permissions: unpackPermissions(deletedPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const getPrivilegeDetailsById = async ({
|
||||
id,
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod
|
||||
}: TGetIdentityPrivilegeDetailsByIdDTO) => {
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findById(id);
|
||||
if (!identityPrivilege) throw new NotFoundError({ message: `Identity privilege with ${id} not found` });
|
||||
|
||||
const identityProjectMembership = await identityProjectDAL.findOne({ id: identityPrivilege.projectMembershipId });
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
|
||||
return {
|
||||
...identityPrivilege,
|
||||
permissions: unpackPermissions(identityPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const getPrivilegeDetailsBySlug = async ({
|
||||
identityId,
|
||||
slug,
|
||||
projectSlug,
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod
|
||||
}: TGetIdentityPrivilegeDetailsBySlugDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug ${slug} not found` });
|
||||
const projectId = project.id;
|
||||
|
||||
const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId });
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
projectMembershipId: identityProjectMembership.id
|
||||
});
|
||||
if (!identityPrivilege) throw new NotFoundError({ message: "Identity additional privilege not found" });
|
||||
|
||||
return {
|
||||
...identityPrivilege,
|
||||
permissions: unpackPermissions(identityPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const listIdentityProjectPrivileges = async ({
|
||||
identityId,
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
projectId
|
||||
}: TListIdentityPrivilegesDTO) => {
|
||||
const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId });
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find(
|
||||
{
|
||||
projectMembershipId: identityProjectMembership.id
|
||||
},
|
||||
{ sort: [[`${TableName.IdentityProjectAdditionalPrivilege}.slug` as "slug", "asc"]] }
|
||||
);
|
||||
return identityPrivileges;
|
||||
};
|
||||
|
||||
return {
|
||||
getPrivilegeDetailsById,
|
||||
getPrivilegeDetailsBySlug,
|
||||
listIdentityProjectPrivileges,
|
||||
create,
|
||||
updateById,
|
||||
deleteById
|
||||
};
|
||||
};
|
@ -0,0 +1,55 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { TProjectPermissionV2Schema } from "../permission/project-permission";
|
||||
|
||||
export enum IdentityProjectAdditionalPrivilegeTemporaryMode {
|
||||
Relative = "relative"
|
||||
}
|
||||
|
||||
export type TCreateIdentityPrivilegeDTO = {
|
||||
permissions: TProjectPermissionV2Schema[];
|
||||
identityId: string;
|
||||
projectId: string;
|
||||
slug: string;
|
||||
} & (
|
||||
| {
|
||||
isTemporary: false;
|
||||
}
|
||||
| {
|
||||
isTemporary: true;
|
||||
temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative;
|
||||
temporaryRange: string;
|
||||
temporaryAccessStartTime: string;
|
||||
}
|
||||
) &
|
||||
Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateIdentityPrivilegeByIdDTO = { id: string } & Omit<TProjectPermission, "projectId"> & {
|
||||
data: Partial<{
|
||||
permissions: TProjectPermissionV2Schema[];
|
||||
slug: string;
|
||||
isTemporary: boolean;
|
||||
temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative;
|
||||
temporaryRange: string;
|
||||
temporaryAccessStartTime: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
export type TDeleteIdentityPrivilegeByIdDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
id: string;
|
||||
};
|
||||
|
||||
export type TGetIdentityPrivilegeDetailsByIdDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
id: string;
|
||||
};
|
||||
|
||||
export type TListIdentityPrivilegesDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
identityId: string;
|
||||
projectId: string;
|
||||
};
|
||||
|
||||
export type TGetIdentityPrivilegeDetailsBySlugDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
slug: string;
|
||||
identityId: string;
|
||||
projectSlug: string;
|
||||
};
|
@ -1,10 +1,10 @@
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, unpackRules } from "@casl/ability/extra";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -32,16 +32,6 @@ export type TIdentityProjectAdditionalPrivilegeServiceFactory = ReturnType<
|
||||
typeof identityProjectAdditionalPrivilegeServiceFactory
|
||||
>;
|
||||
|
||||
// TODO(akhilmhdh): move this to more centralized
|
||||
export const UnpackedPermissionSchema = z.object({
|
||||
subject: z
|
||||
.union([z.string().min(1), z.string().array()])
|
||||
.transform((el) => (typeof el !== "string" ? el[0] : el))
|
||||
.optional(),
|
||||
action: z.union([z.string().min(1), z.string().array()]).transform((el) => (typeof el === "string" ? [el] : el)),
|
||||
conditions: z.unknown().optional()
|
||||
});
|
||||
|
||||
const unpackPermissions = (permissions: unknown) =>
|
||||
UnpackedPermissionSchema.array().parse(
|
||||
unpackRules((permissions || []) as PackRule<RawRuleOf<MongoAbility<ProjectPermissionSet>>>[])
|
||||
@ -80,14 +70,18 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
@ -97,11 +91,12 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
});
|
||||
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
|
||||
projectMembershipId: identityProjectMembership.id,
|
||||
slug,
|
||||
permissions: customPermission
|
||||
permissions: packedPermission
|
||||
});
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
@ -113,7 +108,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
|
||||
projectMembershipId: identityProjectMembership.id,
|
||||
slug,
|
||||
permissions: customPermission,
|
||||
permissions: packedPermission,
|
||||
isTemporary: true,
|
||||
temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative,
|
||||
temporaryRange: dto.temporaryRange,
|
||||
@ -152,14 +147,19 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
@ -182,23 +182,29 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
}
|
||||
|
||||
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
|
||||
|
||||
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
|
||||
if (isTemporary) {
|
||||
const temporaryAccessStartTime = data?.temporaryAccessStartTime || identityPrivilege?.temporaryAccessStartTime;
|
||||
const temporaryRange = data?.temporaryRange || identityPrivilege?.temporaryRange;
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.updateById(identityPrivilege.id, {
|
||||
...data,
|
||||
slug: data.slug,
|
||||
permissions: packedPermission,
|
||||
isTemporary: data.isTemporary,
|
||||
temporaryRange: data.temporaryRange,
|
||||
temporaryMode: data.temporaryMode,
|
||||
temporaryAccessStartTime: new Date(temporaryAccessStartTime || ""),
|
||||
temporaryAccessEndTime: new Date(new Date(temporaryAccessStartTime || "").getTime() + ms(temporaryRange || ""))
|
||||
});
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
}
|
||||
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.updateById(identityPrivilege.id, {
|
||||
...data,
|
||||
slug: data.slug,
|
||||
permissions: packedPermission,
|
||||
isTemporary: false,
|
||||
temporaryAccessStartTime: null,
|
||||
temporaryAccessEndTime: null,
|
||||
@ -207,7 +213,6 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
});
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
@ -289,7 +294,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -335,7 +340,6 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
});
|
||||
return identityPrivileges.map((el) => ({
|
||||
...el,
|
||||
|
||||
permissions: unpackPermissions(el.permissions)
|
||||
}));
|
||||
};
|
||||
|
@ -1,11 +1,13 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { TProjectPermissionV2Schema } from "../permission/project-permission";
|
||||
|
||||
export enum IdentityProjectAdditionalPrivilegeTemporaryMode {
|
||||
Relative = "relative"
|
||||
}
|
||||
|
||||
export type TCreateIdentityPrivilegeDTO = {
|
||||
permissions: unknown;
|
||||
permissions: TProjectPermissionV2Schema[];
|
||||
identityId: string;
|
||||
projectSlug: string;
|
||||
slug: string;
|
||||
@ -27,7 +29,7 @@ export type TUpdateIdentityPrivilegeDTO = { slug: string; identityId: string; pr
|
||||
"projectId"
|
||||
> & {
|
||||
data: Partial<{
|
||||
permissions: unknown;
|
||||
permissions: TProjectPermissionV2Schema[];
|
||||
slug: string;
|
||||
isTemporary: boolean;
|
||||
temporaryMode: IdentityProjectAdditionalPrivilegeTemporaryMode.Relative;
|
||||
|
@ -129,7 +129,7 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
// this means this is self hosted oss version
|
||||
// this means this is the self-hosted oss version
|
||||
// else it would reach catch statement
|
||||
isValidLicense = true;
|
||||
} catch (error) {
|
||||
|
@ -67,7 +67,7 @@ export const permissionServiceFactory = ({
|
||||
throw new NotFoundError({ name: "OrgRoleInvalid", message: `Organization role '${role}' not found` });
|
||||
}
|
||||
})
|
||||
.reduce((curr, prev) => prev.concat(curr), []);
|
||||
.reduce((prev, curr) => prev.concat(curr), []);
|
||||
|
||||
return createMongoAbility<OrgPermissionSet>(rules, {
|
||||
conditionsMatcher
|
||||
@ -98,7 +98,7 @@ export const permissionServiceFactory = ({
|
||||
});
|
||||
}
|
||||
})
|
||||
.reduce((curr, prev) => prev.concat(curr), []);
|
||||
.reduce((prev, curr) => prev.concat(curr), []);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
@ -11,8 +11,8 @@ export enum PermissionConditionOperators {
|
||||
}
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().min(1).array(),
|
||||
[PermissionConditionOperators.$ALL]: z.string().min(1).array(),
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
[PermissionConditionOperators.$ALL]: z.string().trim().min(1).array(),
|
||||
[PermissionConditionOperators.$REGEX]: z
|
||||
.string()
|
||||
.min(1)
|
||||
|
@ -1,9 +1,8 @@
|
||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
|
||||
import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
@ -23,6 +22,14 @@ export enum ProjectPermissionCmekActions {
|
||||
Decrypt = "decrypt"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionDynamicSecretActions {
|
||||
ReadRootCredential = "read-root-credential",
|
||||
CreateRootCredential = "create-root-credential",
|
||||
EditRootCredential = "edit-root-credential",
|
||||
DeleteRootCredential = "delete-root-credential",
|
||||
Lease = "lease"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSub {
|
||||
Role = "role",
|
||||
Member = "member",
|
||||
@ -38,6 +45,8 @@ export enum ProjectPermissionSub {
|
||||
Project = "workspace",
|
||||
Secrets = "secrets",
|
||||
SecretFolders = "secret-folders",
|
||||
SecretImports = "secret-imports",
|
||||
DynamicSecrets = "dynamic-secrets",
|
||||
SecretRollback = "secret-rollback",
|
||||
SecretApproval = "secret-approval",
|
||||
SecretRotation = "secret-rotation",
|
||||
@ -54,19 +63,8 @@ export enum ProjectPermissionSub {
|
||||
export type SecretSubjectFields = {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
// secretName: string;
|
||||
// secretTags: string[];
|
||||
};
|
||||
|
||||
export const CaslSecretsV2SubjectKnexMapper = (field: string) => {
|
||||
switch (field) {
|
||||
case "secretName":
|
||||
return `${TableName.SecretV2}.key`;
|
||||
case "secretTags":
|
||||
return `${TableName.SecretTag}.slug`;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
secretName?: string;
|
||||
secretTags?: string[];
|
||||
};
|
||||
|
||||
export type SecretFolderSubjectFields = {
|
||||
@ -74,6 +72,16 @@ export type SecretFolderSubjectFields = {
|
||||
secretPath: string;
|
||||
};
|
||||
|
||||
export type DynamicSecretSubjectFields = {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
};
|
||||
|
||||
export type SecretImportSubjectFields = {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
};
|
||||
|
||||
export type ProjectPermissionSet =
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
@ -86,6 +94,20 @@ export type ProjectPermissionSet =
|
||||
| (ForcedSubject<ProjectPermissionSub.SecretFolders> & SecretFolderSubjectFields)
|
||||
)
|
||||
]
|
||||
| [
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
(
|
||||
| ProjectPermissionSub.DynamicSecrets
|
||||
| (ForcedSubject<ProjectPermissionSub.DynamicSecrets> & DynamicSecretSubjectFields)
|
||||
)
|
||||
]
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
(
|
||||
| ProjectPermissionSub.SecretImports
|
||||
| (ForcedSubject<ProjectPermissionSub.SecretImports> & SecretImportSubjectFields)
|
||||
)
|
||||
]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Role]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Tags]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Member]
|
||||
@ -120,7 +142,9 @@ const CASL_ACTION_SCHEMA_NATIVE_ENUM = <ACTION extends z.EnumLike>(actions: ACTI
|
||||
const CASL_ACTION_SCHEMA_ENUM = <ACTION extends z.EnumValues>(actions: ACTION) =>
|
||||
z.union([z.enum(actions), z.enum(actions).array().min(1)]).transform((el) => (typeof el === "string" ? [el] : el));
|
||||
|
||||
const SecretConditionSchema = z
|
||||
// akhilmhdh: don't modify this for v2
|
||||
// if you want to update create a new schema
|
||||
const SecretConditionV1Schema = z
|
||||
.object({
|
||||
environment: z.union([
|
||||
z.string(),
|
||||
@ -146,16 +170,50 @@ const SecretConditionSchema = z
|
||||
})
|
||||
.partial();
|
||||
|
||||
export const ProjectPermissionSchema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionSchema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
const SecretConditionV2Schema = z
|
||||
.object({
|
||||
environment: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
||||
})
|
||||
.partial()
|
||||
]),
|
||||
secretPath: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
||||
})
|
||||
.partial()
|
||||
]),
|
||||
secretName: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
||||
})
|
||||
.partial()
|
||||
]),
|
||||
secretTags: z
|
||||
.object({
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||
})
|
||||
.partial()
|
||||
})
|
||||
.partial();
|
||||
|
||||
const GeneralPermissionSchema = [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
@ -259,7 +317,7 @@ export const ProjectPermissionSchema = z.discriminatedUnion("subject", [
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.CertificateTemplates).describe("The entity this permission pertains to. "),
|
||||
subject: z.literal(ProjectPermissionSub.CertificateTemplates).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
@ -288,26 +346,90 @@ export const ProjectPermissionSchema = z.discriminatedUnion("subject", [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretFolders).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_ENUM([ProjectPermissionActions.Read]).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Cmek).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCmekActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
})
|
||||
];
|
||||
|
||||
export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV1Schema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretFolders).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_ENUM([ProjectPermissionActions.Read]).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
...GeneralPermissionSchema
|
||||
]);
|
||||
|
||||
export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV2Schema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretFolders).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV1Schema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretImports).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV1Schema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.DynamicSecrets).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionDynamicSecretActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV1Schema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
...GeneralPermissionSchema
|
||||
]);
|
||||
|
||||
export type TProjectPermissionV2Schema = z.infer<typeof ProjectPermissionV2Schema>;
|
||||
|
||||
const buildAdminPermissionRules = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
|
||||
// Admins get full access to everything
|
||||
[
|
||||
ProjectPermissionSub.Secrets,
|
||||
ProjectPermissionSub.SecretFolders,
|
||||
ProjectPermissionSub.SecretImports,
|
||||
ProjectPermissionSub.SecretApproval,
|
||||
ProjectPermissionSub.SecretRotation,
|
||||
ProjectPermissionSub.Member,
|
||||
@ -339,6 +461,17 @@ const buildAdminPermissionRules = () => {
|
||||
);
|
||||
});
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
ProjectPermissionDynamicSecretActions.EditRootCredential,
|
||||
ProjectPermissionDynamicSecretActions.CreateRootCredential,
|
||||
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
|
||||
ProjectPermissionDynamicSecretActions.Lease
|
||||
],
|
||||
ProjectPermissionSub.DynamicSecrets
|
||||
);
|
||||
|
||||
can([ProjectPermissionActions.Edit, ProjectPermissionActions.Delete], ProjectPermissionSub.Project);
|
||||
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
|
||||
can([ProjectPermissionActions.Edit], ProjectPermissionSub.Kms);
|
||||
@ -370,6 +503,34 @@ const buildMemberPermissionRules = () => {
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
can(
|
||||
[
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.SecretFolders
|
||||
);
|
||||
can(
|
||||
[
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
ProjectPermissionDynamicSecretActions.EditRootCredential,
|
||||
ProjectPermissionDynamicSecretActions.CreateRootCredential,
|
||||
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
|
||||
ProjectPermissionDynamicSecretActions.Lease
|
||||
],
|
||||
ProjectPermissionSub.DynamicSecrets
|
||||
);
|
||||
can(
|
||||
[
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.SecretImports
|
||||
);
|
||||
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval);
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretRotation);
|
||||
@ -493,6 +654,9 @@ const buildViewerPermissionRules = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders);
|
||||
can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation);
|
||||
@ -530,31 +694,35 @@ export const buildServiceTokenProjectPermission = (
|
||||
const canRead = permission.includes("read");
|
||||
const { can, build } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
scopes.forEach(({ secretPath, environment }) => {
|
||||
if (canWrite) {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
if (canRead) {
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
[ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach(
|
||||
(subject) => {
|
||||
if (canWrite) {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Edit, subject, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Create, subject, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Delete, subject, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
if (canRead) {
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Read, subject, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
return build({ conditionsMatcher });
|
||||
@ -595,17 +763,63 @@ export const isAtLeastAsPrivilegedWorkspace = (
|
||||
};
|
||||
/* eslint-enable */
|
||||
|
||||
export const SecretV2SubjectFieldMapper = (arg: string) => {
|
||||
switch (arg) {
|
||||
case "environment":
|
||||
return null;
|
||||
case "secretPath":
|
||||
return null;
|
||||
case "secretName":
|
||||
return `${TableName.SecretV2}.key`;
|
||||
case "secretTags":
|
||||
return `${TableName.SecretTag}.slug`;
|
||||
default:
|
||||
throw new BadRequestError({ message: `Invalid dynamic knex operator field: ${arg}` });
|
||||
export const backfillPermissionV1SchemaToV2Schema = (
|
||||
data: z.infer<typeof ProjectPermissionV1Schema>[],
|
||||
dontRemoveReadFolderPermission?: boolean
|
||||
) => {
|
||||
let formattedData = UnpackedPermissionSchema.array().parse(data);
|
||||
const secretSubjects = formattedData.filter((el) => el.subject === ProjectPermissionSub.Secrets);
|
||||
|
||||
// this means the folder permission as readonly is set
|
||||
const hasReadOnlyFolder = formattedData.filter((el) => el.subject === ProjectPermissionSub.SecretFolders);
|
||||
const secretImportPolicies = secretSubjects.map(({ subject, ...el }) => ({
|
||||
...el,
|
||||
subject: ProjectPermissionSub.SecretImports as const
|
||||
}));
|
||||
|
||||
const secretFolderPolicies = secretSubjects
|
||||
.map(({ subject, ...el }) => ({
|
||||
...el,
|
||||
// read permission is not needed anymore
|
||||
action: el.action.filter((caslAction) => caslAction !== ProjectPermissionActions.Read),
|
||||
subject: ProjectPermissionSub.SecretFolders
|
||||
}))
|
||||
.filter((el) => el.action?.length > 0);
|
||||
|
||||
const dynamicSecretPolicies = secretSubjects.map(({ subject, ...el }) => {
|
||||
const action = el.action.map((e) => {
|
||||
switch (e) {
|
||||
case ProjectPermissionActions.Edit:
|
||||
return ProjectPermissionDynamicSecretActions.EditRootCredential;
|
||||
case ProjectPermissionActions.Create:
|
||||
return ProjectPermissionDynamicSecretActions.CreateRootCredential;
|
||||
case ProjectPermissionActions.Delete:
|
||||
return ProjectPermissionDynamicSecretActions.DeleteRootCredential;
|
||||
case ProjectPermissionActions.Read:
|
||||
return ProjectPermissionDynamicSecretActions.ReadRootCredential;
|
||||
default:
|
||||
return ProjectPermissionDynamicSecretActions.ReadRootCredential;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
...el,
|
||||
action: el.action.includes(ProjectPermissionActions.Edit)
|
||||
? [...action, ProjectPermissionDynamicSecretActions.Lease]
|
||||
: action,
|
||||
subject: ProjectPermissionSub.DynamicSecrets
|
||||
};
|
||||
});
|
||||
|
||||
if (!dontRemoveReadFolderPermission) {
|
||||
formattedData = formattedData.filter((i) => i.subject !== ProjectPermissionSub.SecretFolders);
|
||||
}
|
||||
|
||||
return formattedData.concat(
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
secretImportPolicies,
|
||||
dynamicSecretPolicies,
|
||||
hasReadOnlyFolder.length ? [] : secretFolderPolicies
|
||||
);
|
||||
};
|
||||
|
@ -1,11 +1,16 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "./project-user-additional-privilege-dal";
|
||||
import {
|
||||
ProjectUserAdditionalPrivilegeTemporaryMode,
|
||||
@ -26,6 +31,11 @@ export type TProjectUserAdditionalPrivilegeServiceFactory = ReturnType<
|
||||
typeof projectUserAdditionalPrivilegeServiceFactory
|
||||
>;
|
||||
|
||||
const unpackPermissions = (permissions: unknown) =>
|
||||
UnpackedPermissionSchema.array().parse(
|
||||
unpackRules((permissions || []) as PackRule<RawRuleOf<MongoAbility<ProjectPermissionSet>>>[])
|
||||
);
|
||||
|
||||
export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
projectMembershipDAL,
|
||||
@ -43,7 +53,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
}: TCreateUserPrivilegeDTO) => {
|
||||
const projectMembership = await projectMembershipDAL.findById(projectMembershipId);
|
||||
if (!projectMembership)
|
||||
throw new NotFoundError({ message: `Project membership with ID '${projectMembershipId}' not found` });
|
||||
throw new NotFoundError({ message: `Project membership with ID ${projectMembershipId} found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -53,22 +63,41 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
const { permission: targetUserPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
projectMembership.userId,
|
||||
projectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
projectId: projectMembership.projectId,
|
||||
userId: projectMembership.userId
|
||||
});
|
||||
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
if (existingSlug)
|
||||
throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` });
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
|
||||
userId: projectMembership.userId,
|
||||
projectId: projectMembership.projectId,
|
||||
slug,
|
||||
permissions: customPermission
|
||||
permissions: packedPermission
|
||||
});
|
||||
return additionalPrivilege;
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
}
|
||||
|
||||
const relativeTempAllocatedTimeInMs = ms(dto.temporaryRange);
|
||||
@ -76,14 +105,17 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
projectId: projectMembership.projectId,
|
||||
userId: projectMembership.userId,
|
||||
slug,
|
||||
permissions: customPermission,
|
||||
permissions: packedPermission,
|
||||
isTemporary: true,
|
||||
temporaryMode: ProjectUserAdditionalPrivilegeTemporaryMode.Relative,
|
||||
temporaryRange: dto.temporaryRange,
|
||||
temporaryAccessStartTime: new Date(dto.temporaryAccessStartTime),
|
||||
temporaryAccessEndTime: new Date(new Date(dto.temporaryAccessStartTime).getTime() + relativeTempAllocatedTimeInMs)
|
||||
});
|
||||
return additionalPrivilege;
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const updateById = async ({
|
||||
@ -96,7 +128,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
}: TUpdateUserPrivilegeDTO) => {
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege)
|
||||
throw new NotFoundError({ message: `User additional privilege with ID '${privilegeId}' not found` });
|
||||
throw new NotFoundError({ message: `User additional privilege with ID ${privilegeId} not found` });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
@ -116,6 +148,20 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
const { permission: targetUserPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
projectMembership.userId,
|
||||
projectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
if (dto?.slug) {
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
@ -124,36 +170,50 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
projectId: projectMembership.projectId
|
||||
});
|
||||
if (existingSlug && existingSlug.id !== userPrivilege.id)
|
||||
throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` });
|
||||
}
|
||||
|
||||
const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary;
|
||||
|
||||
const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions));
|
||||
if (isTemporary) {
|
||||
const temporaryAccessStartTime = dto?.temporaryAccessStartTime || userPrivilege?.temporaryAccessStartTime;
|
||||
const temporaryRange = dto?.temporaryRange || userPrivilege?.temporaryRange;
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.updateById(userPrivilege.id, {
|
||||
...dto,
|
||||
slug: dto.slug,
|
||||
permissions: packedPermission,
|
||||
isTemporary: dto.isTemporary,
|
||||
temporaryRange: dto.temporaryRange,
|
||||
temporaryMode: dto.temporaryMode,
|
||||
temporaryAccessStartTime: new Date(temporaryAccessStartTime || ""),
|
||||
temporaryAccessEndTime: new Date(new Date(temporaryAccessStartTime || "").getTime() + ms(temporaryRange || ""))
|
||||
});
|
||||
return additionalPrivilege;
|
||||
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
}
|
||||
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.updateById(userPrivilege.id, {
|
||||
...dto,
|
||||
slug: dto.slug,
|
||||
permissions: packedPermission,
|
||||
isTemporary: false,
|
||||
temporaryAccessStartTime: null,
|
||||
temporaryAccessEndTime: null,
|
||||
temporaryRange: null,
|
||||
temporaryMode: null
|
||||
});
|
||||
return additionalPrivilege;
|
||||
return {
|
||||
...additionalPrivilege,
|
||||
permissions: unpackPermissions(additionalPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const deleteById = async ({ actorId, actor, actorOrgId, actorAuthMethod, privilegeId }: TDeleteUserPrivilegeDTO) => {
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege)
|
||||
throw new NotFoundError({ message: `User additional privilege with ID '${privilegeId}' not found` });
|
||||
throw new NotFoundError({ message: `User additional privilege with ID ${privilegeId} not found` });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
@ -174,7 +234,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
|
||||
const deletedPrivilege = await projectUserAdditionalPrivilegeDAL.deleteById(userPrivilege.id);
|
||||
return deletedPrivilege;
|
||||
return {
|
||||
...deletedPrivilege,
|
||||
permissions: unpackPermissions(deletedPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const getPrivilegeDetailsById = async ({
|
||||
@ -186,7 +249,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
}: TGetUserPrivilegeDetailsDTO) => {
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege)
|
||||
throw new NotFoundError({ message: `User additional privilege with ID '${privilegeId}' not found` });
|
||||
throw new NotFoundError({ message: `User additional privilege with ID ${privilegeId} not found` });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
@ -206,7 +269,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
|
||||
return userPrivilege;
|
||||
return {
|
||||
...userPrivilege,
|
||||
permissions: unpackPermissions(userPrivilege.permissions)
|
||||
};
|
||||
};
|
||||
|
||||
const listPrivileges = async ({
|
||||
@ -218,7 +284,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
}: TListUserPrivilegesDTO) => {
|
||||
const projectMembership = await projectMembershipDAL.findById(projectMembershipId);
|
||||
if (!projectMembership)
|
||||
throw new NotFoundError({ message: `Project membership with ID '${projectMembershipId}' not found` });
|
||||
throw new NotFoundError({ message: `Project membership with ID ${projectMembershipId} not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -229,10 +295,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
|
||||
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find({
|
||||
userId: projectMembership.userId,
|
||||
projectId: projectMembership.projectId
|
||||
});
|
||||
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find(
|
||||
{
|
||||
userId: projectMembership.userId,
|
||||
projectId: projectMembership.projectId
|
||||
},
|
||||
{ sort: [[`${TableName.ProjectUserAdditionalPrivilege}.slug` as "slug", "asc"]] }
|
||||
);
|
||||
return userPrivileges;
|
||||
};
|
||||
|
||||
|
@ -1,18 +1,20 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { TProjectPermissionV2Schema } from "../permission/project-permission";
|
||||
|
||||
export enum ProjectUserAdditionalPrivilegeTemporaryMode {
|
||||
Relative = "relative"
|
||||
}
|
||||
|
||||
export type TCreateUserPrivilegeDTO = (
|
||||
| {
|
||||
permissions: unknown;
|
||||
permissions: TProjectPermissionV2Schema[];
|
||||
projectMembershipId: string;
|
||||
slug: string;
|
||||
isTemporary: false;
|
||||
}
|
||||
| {
|
||||
permissions: unknown;
|
||||
permissions: TProjectPermissionV2Schema[];
|
||||
projectMembershipId: string;
|
||||
slug: string;
|
||||
isTemporary: true;
|
||||
@ -25,7 +27,7 @@ export type TCreateUserPrivilegeDTO = (
|
||||
|
||||
export type TUpdateUserPrivilegeDTO = { privilegeId: string } & Omit<TProjectPermission, "projectId"> &
|
||||
Partial<{
|
||||
permissions: unknown;
|
||||
permissions: TProjectPermissionV2Schema[];
|
||||
slug: string;
|
||||
isTemporary: boolean;
|
||||
temporaryMode: ProjectUserAdditionalPrivilegeTemporaryMode.Relative;
|
||||
|
@ -14,7 +14,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const secretApprovalPolicyFindQuery = (
|
||||
tx: Knex,
|
||||
filter: TFindFilter<TSecretApprovalPolicies>,
|
||||
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
|
||||
customFilter?: {
|
||||
sapId?: string;
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import picomatch from "picomatch";
|
||||
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
@ -356,17 +356,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
environment,
|
||||
secretPath
|
||||
}: TGetBoardSapDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { secretPath, environment })
|
||||
);
|
||||
await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId);
|
||||
|
||||
return getSecretApprovalPolicy(projectId, environment, secretPath);
|
||||
};
|
||||
|
||||
|
@ -43,7 +43,7 @@ import {
|
||||
fnSecretBulkDelete as fnSecretV2BridgeBulkDelete,
|
||||
fnSecretBulkInsert as fnSecretV2BridgeBulkInsert,
|
||||
fnSecretBulkUpdate as fnSecretV2BridgeBulkUpdate,
|
||||
getAllNestedSecretReferences as getAllNestedSecretReferencesV2Bridge
|
||||
getAllSecretReferences as getAllSecretReferencesV2Bridge
|
||||
} from "@app/services/secret-v2-bridge/secret-v2-bridge-fns";
|
||||
import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
|
||||
import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
|
||||
@ -267,7 +267,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
: "",
|
||||
secretComment: el.secretVersion.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString()
|
||||
: ""
|
||||
: "",
|
||||
tags: el.secretVersion.tags
|
||||
}
|
||||
: undefined
|
||||
}));
|
||||
@ -531,11 +532,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
key: el.key,
|
||||
references: el.encryptedValue
|
||||
? getAllNestedSecretReferencesV2Bridge(
|
||||
? getAllSecretReferencesV2Bridge(
|
||||
secretManagerDecryptor({
|
||||
cipherTextBlob: el.encryptedValue
|
||||
}).toString()
|
||||
)
|
||||
).nestedReferences
|
||||
: [],
|
||||
type: SecretType.Shared
|
||||
})),
|
||||
@ -555,11 +556,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
? {
|
||||
encryptedValue: el.encryptedValue as Buffer,
|
||||
references: el.encryptedValue
|
||||
? getAllNestedSecretReferencesV2Bridge(
|
||||
? getAllSecretReferencesV2Bridge(
|
||||
secretManagerDecryptor({
|
||||
cipherTextBlob: el.encryptedValue
|
||||
}).toString()
|
||||
)
|
||||
).nestedReferences
|
||||
: []
|
||||
}
|
||||
: {};
|
||||
@ -571,7 +572,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
reminderNote: el.reminderNote,
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
key: el.key,
|
||||
tagIds: el?.tags.map(({ id }) => id),
|
||||
tags: el?.tags.map(({ id }) => id),
|
||||
...encryptedValue
|
||||
}
|
||||
};
|
||||
@ -1143,10 +1144,6 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder)
|
||||
@ -1309,7 +1306,24 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
|
||||
const tagIds = unique(Object.values(commitTagIds).flat());
|
||||
const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : [];
|
||||
if (tagIds.length !== tags.length) throw new NotFoundError({ message: "One or more tags not found" });
|
||||
if (tagIds.length !== tags.length) throw new NotFoundError({ message: "Tag not found" });
|
||||
const tagsGroupById = groupBy(tags, (i) => i.id);
|
||||
|
||||
commits.forEach((commit) => {
|
||||
let action = ProjectPermissionActions.Create;
|
||||
if (commit.op === SecretOperations.Update) action = ProjectPermissionActions.Edit;
|
||||
if (commit.op === SecretOperations.Delete) action = ProjectPermissionActions.Delete;
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
action,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: commit.key,
|
||||
secretTags: commitTagIds?.[commit.key]?.map((secretTagId) => tagsGroupById[secretTagId][0].slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
const secretApprovalRequest = await secretApprovalRequestDAL.transaction(async (tx) => {
|
||||
const doc = await secretApprovalRequestDAL.create(
|
||||
|
@ -28,8 +28,7 @@ import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret
|
||||
import {
|
||||
fnSecretBulkInsert as fnSecretV2BridgeBulkInsert,
|
||||
fnSecretBulkUpdate as fnSecretV2BridgeBulkUpdate,
|
||||
getAllNestedSecretReferences,
|
||||
getAllNestedSecretReferences as getAllNestedSecretReferencesV2Bridge
|
||||
getAllSecretReferences
|
||||
} from "@app/services/secret-v2-bridge/secret-v2-bridge-fns";
|
||||
import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
|
||||
import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
|
||||
@ -253,11 +252,12 @@ export const secretReplicationServiceFactory = ({
|
||||
const sourceLocalSecrets = await secretV2BridgeDAL.find({ folderId: folder.id, type: SecretType.Shared });
|
||||
const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id });
|
||||
const sourceImportedSecrets = await fnSecretsV2FromImports({
|
||||
allowedImports: sourceSecretImports,
|
||||
secretImports: sourceSecretImports,
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
folderDAL,
|
||||
secretImportDAL,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""),
|
||||
hasSecretAccess: () => true
|
||||
});
|
||||
// secrets that gets replicated across imports
|
||||
const sourceDecryptedLocalSecrets = sourceLocalSecrets.map((el) => ({
|
||||
@ -419,7 +419,7 @@ export const secretReplicationServiceFactory = ({
|
||||
encryptedValue: doc.encryptedValue,
|
||||
encryptedComment: doc.encryptedComment,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding,
|
||||
references: doc.secretValue ? getAllNestedSecretReferencesV2Bridge(doc.secretValue) : []
|
||||
references: doc.secretValue ? getAllSecretReferences(doc.secretValue).nestedReferences : []
|
||||
};
|
||||
})
|
||||
});
|
||||
@ -445,7 +445,7 @@ export const secretReplicationServiceFactory = ({
|
||||
encryptedValue: doc.encryptedValue as Buffer,
|
||||
encryptedComment: doc.encryptedComment,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding,
|
||||
references: doc.secretValue ? getAllNestedSecretReferencesV2Bridge(doc.secretValue) : []
|
||||
references: doc.secretValue ? getAllSecretReferences(doc.secretValue).nestedReferences : []
|
||||
}
|
||||
};
|
||||
})
|
||||
@ -694,7 +694,7 @@ export const secretReplicationServiceFactory = ({
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding,
|
||||
references: getAllNestedSecretReferences(doc.secretValue)
|
||||
references: getAllSecretReferences(doc.secretValue).nestedReferences
|
||||
};
|
||||
})
|
||||
});
|
||||
@ -730,7 +730,7 @@ export const secretReplicationServiceFactory = ({
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding,
|
||||
references: getAllNestedSecretReferences(doc.secretValue)
|
||||
references: getAllSecretReferences(doc.secretValue).nestedReferences
|
||||
}
|
||||
};
|
||||
})
|
||||
|
@ -85,7 +85,8 @@ export const secretRotationDbFn = async ({
|
||||
password,
|
||||
username,
|
||||
client,
|
||||
variables
|
||||
variables,
|
||||
options
|
||||
}: TSecretRotationDbFn) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
@ -117,7 +118,8 @@ export const secretRotationDbFn = async ({
|
||||
password,
|
||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||
ssl,
|
||||
pool: { min: 0, max: 1 }
|
||||
pool: { min: 0, max: 1 },
|
||||
options
|
||||
}
|
||||
});
|
||||
const data = await db.raw(query, variables);
|
||||
@ -153,6 +155,14 @@ export const getDbSetQuery = (db: TDbProviderClients, variables: { username: str
|
||||
variables: [variables.username]
|
||||
};
|
||||
}
|
||||
|
||||
if (db === TDbProviderClients.MsSqlServer) {
|
||||
return {
|
||||
query: `ALTER LOGIN ?? WITH PASSWORD = '${variables.password}'`,
|
||||
variables: [variables.username]
|
||||
};
|
||||
}
|
||||
|
||||
// add more based on client
|
||||
return {
|
||||
query: `ALTER USER ?? IDENTIFIED BY '${variables.password}'`,
|
||||
|
@ -24,4 +24,5 @@ export type TSecretRotationDbFn = {
|
||||
query: string;
|
||||
variables: unknown[];
|
||||
ca?: string;
|
||||
options?: Record<string, unknown>;
|
||||
};
|
||||
|
@ -94,7 +94,9 @@ export const secretRotationQueueFactory = ({
|
||||
// on prod it this will be in days, in development this will be second
|
||||
every: appCfg.NODE_ENV === "development" ? secondsToMillis(interval) : daysToMillisecond(interval),
|
||||
immediately: true
|
||||
}
|
||||
},
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true
|
||||
}
|
||||
);
|
||||
};
|
||||
@ -114,6 +116,7 @@ export const secretRotationQueueFactory = ({
|
||||
|
||||
queue.start(QueueName.SecretRotation, async (job) => {
|
||||
const { rotationId } = job.data;
|
||||
const appCfg = getConfig();
|
||||
logger.info(`secretRotationQueue.process: [rotationDocument=${rotationId}]`);
|
||||
const secretRotation = await secretRotationDAL.findById(rotationId);
|
||||
const rotationProvider = rotationTemplates.find(({ name }) => name === secretRotation?.provider);
|
||||
@ -172,6 +175,15 @@ export const secretRotationQueueFactory = ({
|
||||
// set a random value for new password
|
||||
newCredential.internal.rotated_password = alphaNumericNanoId(32);
|
||||
const { admin_username: username, admin_password: password, host, database, port, ca } = newCredential.inputs;
|
||||
|
||||
const options =
|
||||
provider.template.client === TDbProviderClients.MsSqlServer
|
||||
? ({
|
||||
encrypt: appCfg.ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT,
|
||||
cryptoCredentialsDetails: ca ? { ca } : {}
|
||||
} as Record<string, unknown>)
|
||||
: undefined;
|
||||
|
||||
const dbFunctionArg = {
|
||||
username,
|
||||
password,
|
||||
@ -179,8 +191,10 @@ export const secretRotationQueueFactory = ({
|
||||
database,
|
||||
port,
|
||||
ca: ca as string,
|
||||
client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client
|
||||
client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client,
|
||||
options
|
||||
} as TSecretRotationDbFn;
|
||||
|
||||
// set function
|
||||
await secretRotationDbFn({
|
||||
...dbFunctionArg,
|
||||
@ -189,12 +203,17 @@ export const secretRotationQueueFactory = ({
|
||||
username: newCredential.internal.username as string
|
||||
})
|
||||
});
|
||||
|
||||
// test function
|
||||
const testQuery =
|
||||
provider.template.client === TDbProviderClients.MsSqlServer ? "SELECT GETDATE()" : "SELECT NOW()";
|
||||
|
||||
await secretRotationDbFn({
|
||||
...dbFunctionArg,
|
||||
query: "SELECT NOW()",
|
||||
query: testQuery,
|
||||
variables: []
|
||||
});
|
||||
|
||||
newCredential.outputs.db_username = newCredential.internal.username;
|
||||
newCredential.outputs.db_password = newCredential.internal.rotated_password;
|
||||
// clean up
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import Ajv from "ajv";
|
||||
|
||||
import { ProjectVersion } from "@app/db/schemas";
|
||||
import { ProjectVersion, TableName } from "@app/db/schemas";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
@ -103,13 +103,14 @@ export const secretRotationServiceFactory = ({
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
const shouldUseBridge = project.version === ProjectVersion.V3;
|
||||
|
||||
if (shouldUseBridge) {
|
||||
const selectedSecrets = await secretV2BridgeDAL.find({
|
||||
folderId: folder.id,
|
||||
$in: { id: Object.values(outputs) }
|
||||
$in: { [`${TableName.SecretV2}.id` as "id"]: Object.values(outputs) }
|
||||
});
|
||||
if (selectedSecrets.length !== Object.values(outputs).length)
|
||||
throw new NotFoundError({ message: `Secrets not found in folder with ID '${folder.id}'` });
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { AWS_IAM_TEMPLATE } from "./aws-iam";
|
||||
import { MSSQL_TEMPLATE } from "./mssql";
|
||||
import { MYSQL_TEMPLATE } from "./mysql";
|
||||
import { POSTGRES_TEMPLATE } from "./postgres";
|
||||
import { SENDGRID_TEMPLATE } from "./sendgrid";
|
||||
@ -26,6 +27,13 @@ export const rotationTemplates: TSecretRotationProviderTemplate[] = [
|
||||
description: "Rotate MySQL@7/MariaDB user credentials",
|
||||
template: MYSQL_TEMPLATE
|
||||
},
|
||||
{
|
||||
name: "mssql",
|
||||
title: "Microsoft SQL Server",
|
||||
image: "mssqlserver.png",
|
||||
description: "Rotate Microsoft SQL server user credentials",
|
||||
template: MSSQL_TEMPLATE
|
||||
},
|
||||
{
|
||||
name: "aws-iam",
|
||||
title: "AWS IAM",
|
||||
|
33
backend/src/ee/services/secret-rotation/templates/mssql.ts
Normal file
33
backend/src/ee/services/secret-rotation/templates/mssql.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import { TDbProviderClients, TProviderFunctionTypes } from "./types";
|
||||
|
||||
export const MSSQL_TEMPLATE = {
|
||||
type: TProviderFunctionTypes.DB as const,
|
||||
client: TDbProviderClients.MsSqlServer,
|
||||
inputs: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
admin_username: { type: "string" as const },
|
||||
admin_password: { type: "string" as const },
|
||||
host: { type: "string" as const },
|
||||
database: { type: "string" as const, default: "master" },
|
||||
port: { type: "integer" as const, default: "1433" },
|
||||
username1: {
|
||||
type: "string",
|
||||
default: "infisical-sql-user1",
|
||||
desc: "SQL Server login name that must be created at server level with a matching database user"
|
||||
},
|
||||
username2: {
|
||||
type: "string",
|
||||
default: "infisical-sql-user2",
|
||||
desc: "SQL Server login name that must be created at server level with a matching database user"
|
||||
},
|
||||
ca: { type: "string", desc: "SSL certificate for db auth(string)" }
|
||||
},
|
||||
required: ["admin_username", "admin_password", "host", "database", "username1", "username2", "port"],
|
||||
additionalProperties: false
|
||||
},
|
||||
outputs: {
|
||||
db_username: { type: "string" },
|
||||
db_password: { type: "string" }
|
||||
}
|
||||
};
|
@ -8,7 +8,9 @@ export enum TDbProviderClients {
|
||||
// postgres, cockroack db, amazon red shift
|
||||
Pg = "pg",
|
||||
// mysql and maria db
|
||||
MySql = "mysql"
|
||||
MySql = "mysql",
|
||||
|
||||
MsSqlServer = "mssql"
|
||||
}
|
||||
|
||||
export enum TAwsProviderSystems {
|
||||
|
@ -29,7 +29,7 @@ export const KeyStorePrefixes = {
|
||||
};
|
||||
|
||||
export const KeyStoreTtls = {
|
||||
SetSyncSecretIntegrationLastRunTimestampInSeconds: 10,
|
||||
SetSyncSecretIntegrationLastRunTimestampInSeconds: 60,
|
||||
AccessTokenStatusUpdateInSeconds: 120
|
||||
};
|
||||
|
||||
|
@ -5,31 +5,31 @@ export const GROUPS = {
|
||||
role: "The role of the group to create."
|
||||
},
|
||||
UPDATE: {
|
||||
id: "The id of the group to update",
|
||||
id: "The ID of the group to update.",
|
||||
name: "The new name of the group to update to.",
|
||||
slug: "The new slug of the group to update to.",
|
||||
role: "The new role of the group to update to."
|
||||
},
|
||||
DELETE: {
|
||||
id: "The id of the group to delete",
|
||||
slug: "The slug of the group to delete"
|
||||
id: "The ID of the group to delete.",
|
||||
slug: "The slug of the group to delete."
|
||||
},
|
||||
LIST_USERS: {
|
||||
id: "The id of the group to list users for",
|
||||
id: "The ID of the group to list users for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th user.",
|
||||
limit: "The number of users to return.",
|
||||
username: "The username to search for.",
|
||||
search: "The text string that user email or name will be filtered by."
|
||||
},
|
||||
ADD_USER: {
|
||||
id: "The id of the group to add the user to.",
|
||||
id: "The ID of the group to add the user to.",
|
||||
username: "The username of the user to add to the group."
|
||||
},
|
||||
GET_BY_ID: {
|
||||
id: "The id of the group to fetch"
|
||||
id: "The ID of the group to fetch."
|
||||
},
|
||||
DELETE_USER: {
|
||||
id: "The id of the group to remove the user from.",
|
||||
id: "The ID of the group to remove the user from.",
|
||||
username: "The username of the user to remove from the group."
|
||||
}
|
||||
} as const;
|
||||
@ -119,7 +119,7 @@ export const AWS_AUTH = {
|
||||
identityId: "The ID of the identity to login.",
|
||||
iamHttpRequestMethod: "The HTTP request method used in the signed request.",
|
||||
iamRequestUrl:
|
||||
"The base64-encoded HTTP URL used in the signed request. Most likely, the base64-encoding of https://sts.amazonaws.com/",
|
||||
"The base64-encoded HTTP URL used in the signed request. Most likely, the base64-encoding of https://sts.amazonaws.com/.",
|
||||
iamRequestBody:
|
||||
"The base64-encoded body of the signed request. Most likely, the base64-encoding of Action=GetCallerIdentity&Version=2011-06-15.",
|
||||
iamRequestHeaders: "The base64-encoded headers of the sts:GetCallerIdentity signed request."
|
||||
@ -130,8 +130,8 @@ export const AWS_AUTH = {
|
||||
"The comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical.",
|
||||
allowedAccountIds:
|
||||
"The comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.",
|
||||
accessTokenTTL: "The lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
|
||||
stsEndpoint: "The endpoint URL for the AWS STS API.",
|
||||
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from."
|
||||
@ -142,8 +142,8 @@ export const AWS_AUTH = {
|
||||
"The new comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical.",
|
||||
allowedAccountIds:
|
||||
"The new comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.",
|
||||
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
stsEndpoint: "The new endpoint URL for the AWS STS API.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
|
||||
@ -167,8 +167,8 @@ export const AZURE_AUTH = {
|
||||
allowedServicePrincipalIds:
|
||||
"The comma-separated list of Azure AD service principal IDs that are allowed to authenticate with Infisical.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
|
||||
},
|
||||
UPDATE: {
|
||||
@ -178,8 +178,8 @@ export const AZURE_AUTH = {
|
||||
allowedServicePrincipalIds:
|
||||
"The new comma-separated list of Azure AD service principal IDs that are allowed to authenticate with Infisical.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
|
||||
},
|
||||
RETRIEVE: {
|
||||
@ -203,8 +203,8 @@ export const GCP_AUTH = {
|
||||
allowedZones:
|
||||
"The comma-separated list of trusted zones that the GCE instances must belong to authenticate with Infisical.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
|
||||
},
|
||||
UPDATE: {
|
||||
@ -216,8 +216,8 @@ export const GCP_AUTH = {
|
||||
allowedZones:
|
||||
"The new comma-separated list of trusted zones that the GCE instances must belong to authenticate with Infisical.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
|
||||
},
|
||||
RETRIEVE: {
|
||||
@ -244,8 +244,8 @@ export const KUBERNETES_AUTH = {
|
||||
allowedAudience:
|
||||
"The optional audience claim that the service account JWT token must have to authenticate with Infisical.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
|
||||
},
|
||||
UPDATE: {
|
||||
@ -276,15 +276,15 @@ export const TOKEN_AUTH = {
|
||||
ATTACH: {
|
||||
identityId: "The ID of the identity to attach the configuration onto.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
|
||||
},
|
||||
UPDATE: {
|
||||
identityId: "The ID of the identity to update the auth method for.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
|
||||
},
|
||||
RETRIEVE: {
|
||||
@ -296,18 +296,18 @@ export const TOKEN_AUTH = {
|
||||
GET_TOKENS: {
|
||||
identityId: "The ID of the identity to list token metadata for.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th token.",
|
||||
limit: "The number of tokens to return"
|
||||
limit: "The number of tokens to return."
|
||||
},
|
||||
CREATE_TOKEN: {
|
||||
identityId: "The ID of the identity to create the token for.",
|
||||
name: "The name of the token to create"
|
||||
name: "The name of the token to create."
|
||||
},
|
||||
UPDATE_TOKEN: {
|
||||
tokenId: "The ID of the token to update metadata for",
|
||||
name: "The name of the token to update to"
|
||||
tokenId: "The ID of the token to update metadata for.",
|
||||
name: "The name of the token to update to."
|
||||
},
|
||||
REVOKE_TOKEN: {
|
||||
tokenId: "The ID of the token to revoke"
|
||||
tokenId: "The ID of the token to revoke."
|
||||
}
|
||||
} as const;
|
||||
|
||||
@ -324,8 +324,8 @@ export const OIDC_AUTH = {
|
||||
boundClaims: "The attributes that should be present in the JWT for it to be valid.",
|
||||
boundSubject: "The expected principal that is the subject of the JWT.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
|
||||
},
|
||||
UPDATE: {
|
||||
@ -337,8 +337,8 @@ export const OIDC_AUTH = {
|
||||
boundClaims: "The new attributes that should be present in the JWT for it to be valid.",
|
||||
boundSubject: "The new expected principal that is the subject of the JWT.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
|
||||
},
|
||||
RETRIEVE: {
|
||||
@ -475,6 +475,7 @@ export const PROJECT_USERS = {
|
||||
},
|
||||
GET_USER_MEMBERSHIP: {
|
||||
workspaceId: "The ID of the project to get memberships from.",
|
||||
membershipId: "The ID of the user's project membership.",
|
||||
username: "The username to get project membership of. Email is the default username."
|
||||
},
|
||||
UPDATE_USER_MEMBERSHIP: {
|
||||
@ -506,8 +507,8 @@ export const PROJECT_IDENTITIES = {
|
||||
isTemporary:
|
||||
"Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.",
|
||||
temporaryMode: "Type of temporary expiry.",
|
||||
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
|
||||
temporaryAccessStartTime: "Time to which the temporary access starts"
|
||||
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s, 2m ,3h, etc.",
|
||||
temporaryAccessStartTime: "Time to which the temporary access starts."
|
||||
}
|
||||
},
|
||||
DELETE_IDENTITY_MEMBERSHIP: {
|
||||
@ -524,8 +525,8 @@ export const PROJECT_IDENTITIES = {
|
||||
isTemporary:
|
||||
"Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.",
|
||||
temporaryMode: "Type of temporary expiry.",
|
||||
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
|
||||
temporaryAccessStartTime: "Time to which the temporary access starts"
|
||||
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s, 2m, 3h, etc.",
|
||||
temporaryAccessStartTime: "Time to which the temporary access starts."
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -562,7 +563,7 @@ export const FOLDERS = {
|
||||
directory: "The directory to list folders from. (Deprecated in favor of path)"
|
||||
},
|
||||
GET_BY_ID: {
|
||||
folderId: "The id of the folder to get details."
|
||||
folderId: "The ID of the folder to get details."
|
||||
},
|
||||
CREATE: {
|
||||
workspaceId: "The ID of the project to create the folder in.",
|
||||
@ -595,22 +596,22 @@ export const SECRETS = {
|
||||
secretPath: "The path of the secret to attach tags to.",
|
||||
type: "The type of the secret to attach tags to. (shared/personal)",
|
||||
environment: "The slug of the environment where the secret is located",
|
||||
projectSlug: "The slug of the project where the secret is located",
|
||||
projectSlug: "The slug of the project where the secret is located.",
|
||||
tagSlugs: "An array of existing tag slugs to attach to the secret."
|
||||
},
|
||||
DETACH_TAGS: {
|
||||
secretName: "The name of the secret to detach tags from.",
|
||||
secretPath: "The path of the secret to detach tags from.",
|
||||
type: "The type of the secret to attach tags to. (shared/personal)",
|
||||
environment: "The slug of the environment where the secret is located",
|
||||
projectSlug: "The slug of the project where the secret is located",
|
||||
environment: "The slug of the environment where the secret is located.",
|
||||
projectSlug: "The slug of the project where the secret is located.",
|
||||
tagSlugs: "An array of existing tag slugs to detach from the secret."
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const RAW_SECRETS = {
|
||||
LIST: {
|
||||
expand: "Whether or not to expand secret references",
|
||||
expand: "Whether or not to expand secret references.",
|
||||
recursive:
|
||||
"Whether or not to fetch all secrets from the specified base path, and all of its subdirectories. Note, the max depth is 20 deep.",
|
||||
workspaceId: "The ID of the project to list secrets from.",
|
||||
@ -619,7 +620,7 @@ export const RAW_SECRETS = {
|
||||
environment: "The slug of the environment to list secrets from.",
|
||||
secretPath: "The secret path to list secrets from.",
|
||||
includeImports: "Weather to include imported secrets or not.",
|
||||
tagSlugs: "The comma separated tag slugs to filter secrets"
|
||||
tagSlugs: "The comma separated tag slugs to filter secrets."
|
||||
},
|
||||
CREATE: {
|
||||
secretName: "The name of the secret to create.",
|
||||
@ -632,11 +633,11 @@ export const RAW_SECRETS = {
|
||||
type: "The type of the secret to create.",
|
||||
workspaceId: "The ID of the project to create the secret in.",
|
||||
tagIds: "The ID of the tags to be attached to the created secret.",
|
||||
secretReminderRepeatDays: "Interval for secret rotation notifications, measured in days",
|
||||
secretReminderNote: "Note to be attached in notification email"
|
||||
secretReminderRepeatDays: "Interval for secret rotation notifications, measured in days.",
|
||||
secretReminderNote: "Note to be attached in notification email."
|
||||
},
|
||||
GET: {
|
||||
expand: "Whether or not to expand secret references",
|
||||
expand: "Whether or not to expand secret references.",
|
||||
secretName: "The name of the secret to get.",
|
||||
workspaceId: "The ID of the project to get the secret from.",
|
||||
workspaceSlug: "The slug of the project to get the secret from.",
|
||||
@ -650,16 +651,16 @@ export const RAW_SECRETS = {
|
||||
secretName: "The name of the secret to update.",
|
||||
secretComment: "Update comment to the secret.",
|
||||
environment: "The slug of the environment where the secret is located.",
|
||||
secretPath: "The path of the secret to update",
|
||||
secretPath: "The path of the secret to update.",
|
||||
secretValue: "The new value of the secret.",
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to update.",
|
||||
projectSlug: "The slug of the project to update the secret in.",
|
||||
workspaceId: "The ID of the project to update the secret in.",
|
||||
tagIds: "The ID of the tags to be attached to the updated secret.",
|
||||
secretReminderRepeatDays: "Interval for secret rotation notifications, measured in days",
|
||||
secretReminderNote: "Note to be attached in notification email",
|
||||
newSecretName: "The new name for the secret"
|
||||
secretReminderRepeatDays: "Interval for secret rotation notifications, measured in days.",
|
||||
secretReminderNote: "Note to be attached in notification email.",
|
||||
newSecretName: "The new name for the secret."
|
||||
},
|
||||
DELETE: {
|
||||
secretName: "The name of the secret to delete.",
|
||||
@ -668,6 +669,12 @@ export const RAW_SECRETS = {
|
||||
type: "The type of the secret to delete.",
|
||||
projectSlug: "The slug of the project to delete the secret in.",
|
||||
workspaceId: "The ID of the project where the secret is located."
|
||||
},
|
||||
GET_REFERENCE_TREE: {
|
||||
secretName: "The name of the secret to get the reference tree for.",
|
||||
workspaceId: "The ID of the project where the secret is located.",
|
||||
environment: "The slug of the environment where the the secret is located.",
|
||||
secretPath: "The folder path where the secret is located."
|
||||
}
|
||||
} as const;
|
||||
|
||||
@ -790,7 +797,7 @@ export const DYNAMIC_SECRETS = {
|
||||
environmentSlug: "The slug of the environment to update the dynamic secret in.",
|
||||
path: "The path to update the dynamic secret in.",
|
||||
name: "The name of the dynamic secret.",
|
||||
inputs: "The new partial values for the configurated provider of the dynamic secret",
|
||||
inputs: "The new partial values for the configured provider of the dynamic secret",
|
||||
defaultTTL: "The default TTL that will be applied for all the leases.",
|
||||
maxTTL: "The maximum limit a TTL can be leases or renewed.",
|
||||
newName: "The new name for the dynamic secret."
|
||||
@ -801,7 +808,7 @@ export const DYNAMIC_SECRETS = {
|
||||
path: "The path to delete the dynamic secret in.",
|
||||
name: "The name of the dynamic secret.",
|
||||
isForced:
|
||||
"A boolean flag to delete the the dynamic secret from infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally."
|
||||
"A boolean flag to delete the the dynamic secret from Infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally."
|
||||
}
|
||||
} as const;
|
||||
|
||||
@ -817,7 +824,7 @@ export const DYNAMIC_SECRET_LEASES = {
|
||||
environmentSlug: "The slug of the environment of the dynamic secret in.",
|
||||
path: "The path of the dynamic secret in.",
|
||||
dynamicSecretName: "The name of the dynamic secret.",
|
||||
ttl: "The lease lifetime ttl. If not provided the default TTL of dynamic secret will be used."
|
||||
ttl: "The lease lifetime TTL. If not provided the default TTL of dynamic secret will be used."
|
||||
},
|
||||
RENEW: {
|
||||
projectSlug: "The slug of the project of the dynamic secret in.",
|
||||
@ -832,7 +839,7 @@ export const DYNAMIC_SECRET_LEASES = {
|
||||
path: "The path of the dynamic secret in.",
|
||||
leaseId: "The ID of the dynamic secret lease.",
|
||||
isForced:
|
||||
"A boolean flag to delete the the dynamic secret from infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally."
|
||||
"A boolean flag to delete the the dynamic secret from Infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally."
|
||||
}
|
||||
} as const;
|
||||
export const SECRET_TAGS = {
|
||||
@ -841,11 +848,11 @@ export const SECRET_TAGS = {
|
||||
},
|
||||
GET_TAG_BY_ID: {
|
||||
projectId: "The ID of the project to get tags from.",
|
||||
tagId: "The ID of the tag to get details"
|
||||
tagId: "The ID of the tag to get details."
|
||||
},
|
||||
GET_TAG_BY_SLUG: {
|
||||
projectId: "The ID of the project to get tags from.",
|
||||
tagSlug: "The slug of the tag to get details"
|
||||
tagSlug: "The slug of the tag to get details."
|
||||
},
|
||||
CREATE: {
|
||||
projectId: "The ID of the project to create the tag in.",
|
||||
@ -855,7 +862,7 @@ export const SECRET_TAGS = {
|
||||
},
|
||||
UPDATE: {
|
||||
projectId: "The ID of the project to update the tag in.",
|
||||
tagId: "The ID of the tag to get details",
|
||||
tagId: "The ID of the tag to get details.",
|
||||
name: "The name of the tag to update.",
|
||||
slug: "The slug of the tag to update.",
|
||||
color: "The color of the tag to update."
|
||||
@ -889,8 +896,8 @@ The permission object for the privilege.
|
||||
privilegePermission: "The permission object for the privilege.",
|
||||
isPackPermission: "Whether the server should pack(compact) the permission object.",
|
||||
isTemporary: "Whether the privilege is temporary.",
|
||||
temporaryMode: "Type of temporary access given. Types: relative",
|
||||
temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d",
|
||||
temporaryMode: "Type of temporary access given. Types: relative.",
|
||||
temporaryRange: "TTL for the temporary time. Eg: 1m, 1h, 1d.",
|
||||
temporaryAccessStartTime: "ISO time for which temporary access should begin."
|
||||
},
|
||||
UPDATE: {
|
||||
@ -915,8 +922,8 @@ The permission object for the privilege.
|
||||
`,
|
||||
privilegePermission: "The permission object for the privilege.",
|
||||
isTemporary: "Whether the privilege is temporary.",
|
||||
temporaryMode: "Type of temporary access given. Types: relative",
|
||||
temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d",
|
||||
temporaryMode: "Type of temporary access given. Types: relative.",
|
||||
temporaryRange: "TTL for the temporary time. Eg: 1m, 1h, 1d.",
|
||||
temporaryAccessStartTime: "ISO time for which temporary access should begin."
|
||||
},
|
||||
DELETE: {
|
||||
@ -932,62 +939,102 @@ The permission object for the privilege.
|
||||
LIST: {
|
||||
projectSlug: "The slug of the project of the identity in.",
|
||||
identityId: "The ID of the identity to list.",
|
||||
unpacked: "Whether the system should send the permissions as unpacked"
|
||||
unpacked: "Whether the system should send the permissions as unpacked."
|
||||
}
|
||||
};
|
||||
|
||||
export const PROJECT_USER_ADDITIONAL_PRIVILEGE = {
|
||||
CREATE: {
|
||||
projectMembershipId: "Project membership id of user",
|
||||
projectMembershipId: "Project membership ID of user.",
|
||||
slug: "The slug of the privilege to create.",
|
||||
permissions:
|
||||
"The permission object for the privilege. Refer https://casl.js.org/v6/en/guide/define-rules#the-shape-of-raw-rule to understand the shape",
|
||||
isPackPermission: "Whether the server should pack(compact) the permission object.",
|
||||
"The permission object for the privilege. Refer https://casl.js.org/v6/en/guide/define-rules#the-shape-of-raw-rule to understand the shape.",
|
||||
isPackPermission: "Whether the server should pack (compact) the permission object.",
|
||||
isTemporary: "Whether the privilege is temporary.",
|
||||
temporaryMode: "Type of temporary access given. Types: relative",
|
||||
temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d",
|
||||
temporaryMode: "Type of temporary access given. Types: relative.",
|
||||
temporaryRange: "TTL for the temporary time. Eg: 1m, 1h, 1d.",
|
||||
temporaryAccessStartTime: "ISO time for which temporary access should begin."
|
||||
},
|
||||
UPDATE: {
|
||||
privilegeId: "The id of privilege object",
|
||||
privilegeId: "The ID of privilege object.",
|
||||
slug: "The slug of the privilege to create.",
|
||||
newSlug: "The new slug of the privilege to create.",
|
||||
permissions:
|
||||
"The permission object for the privilege. Refer https://casl.js.org/v6/en/guide/define-rules#the-shape-of-raw-rule to understand the shape",
|
||||
isPackPermission: "Whether the server should pack(compact) the permission object.",
|
||||
"The permission object for the privilege. Refer https://casl.js.org/v6/en/guide/define-rules#the-shape-of-raw-rule to understand the shape.",
|
||||
isPackPermission: "Whether the server should pack (compact) the permission object.",
|
||||
isTemporary: "Whether the privilege is temporary.",
|
||||
temporaryMode: "Type of temporary access given. Types: relative",
|
||||
temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d",
|
||||
temporaryMode: "Type of temporary access given. Types: relative.",
|
||||
temporaryRange: "TTL for the temporary time. Eg: 1m, 1h, 1d.",
|
||||
temporaryAccessStartTime: "ISO time for which temporary access should begin."
|
||||
},
|
||||
DELETE: {
|
||||
privilegeId: "The id of privilege object"
|
||||
privilegeId: "The ID of privilege object."
|
||||
},
|
||||
GET_BY_PRIVILEGEID: {
|
||||
privilegeId: "The id of privilege object"
|
||||
GET_BY_PRIVILEGE_ID: {
|
||||
privilegeId: "The ID of privilege object."
|
||||
},
|
||||
LIST: {
|
||||
projectMembershipId: "Project membership id of user"
|
||||
projectMembershipId: "Project membership ID of user."
|
||||
}
|
||||
};
|
||||
|
||||
export const IDENTITY_ADDITIONAL_PRIVILEGE_V2 = {
|
||||
CREATE: {
|
||||
identityId: "The ID of the identity to create the privilege for.",
|
||||
projectId: "The ID of the project of the identity in.",
|
||||
slug: "The slug of the privilege to create.",
|
||||
permission: "The permission for the privilege.",
|
||||
isTemporary: "Whether the privilege is temporary or permanent.",
|
||||
temporaryMode: "Type of temporary access given. Types: relative.",
|
||||
temporaryRange: "The TTL for the temporary access given. Eg: 1m, 1h, 1d.",
|
||||
temporaryAccessStartTime: "The start time in ISO format when the temporary access should begin."
|
||||
},
|
||||
UPDATE: {
|
||||
id: "The ID of the identity privilege.",
|
||||
identityId: "The ID of the identity to update.",
|
||||
slug: "The slug of the privilege to update.",
|
||||
privilegePermission: "The permission for the privilege.",
|
||||
isTemporary: "Whether the privilege is temporary.",
|
||||
temporaryMode: "Type of temporary access given. Types: relative.",
|
||||
temporaryRange: "The TTL for the temporary access given. Eg: 1m, 1h, 1d.",
|
||||
temporaryAccessStartTime: "The start time in ISO format when the temporary access should begin."
|
||||
},
|
||||
DELETE: {
|
||||
id: "The ID of the identity privilege.",
|
||||
identityId: "The ID of the identity to delete.",
|
||||
slug: "The slug of the privilege to delete."
|
||||
},
|
||||
GET_BY_SLUG: {
|
||||
projectSlug: "The slug of the project of the identity in.",
|
||||
identityId: "The ID of the identity to list.",
|
||||
slug: "The slug of the privilege."
|
||||
},
|
||||
GET_BY_ID: {
|
||||
id: "The ID of the identity privilege."
|
||||
},
|
||||
LIST: {
|
||||
projectId: "The ID of the project that the identity is in.",
|
||||
identityId: "The ID of the identity to list."
|
||||
}
|
||||
};
|
||||
|
||||
export const INTEGRATION_AUTH = {
|
||||
GET: {
|
||||
integrationAuthId: "The id of integration authentication object."
|
||||
integrationAuthId: "The ID of integration authentication object."
|
||||
},
|
||||
DELETE: {
|
||||
integration: "The slug of the integration to be unauthorized.",
|
||||
projectId: "The ID of the project to delete the integration auth from."
|
||||
},
|
||||
DELETE_BY_ID: {
|
||||
integrationAuthId: "The id of integration authentication object to delete."
|
||||
integrationAuthId: "The ID of integration authentication object to delete."
|
||||
},
|
||||
CREATE_ACCESS_TOKEN: {
|
||||
workspaceId: "The ID of the project to create the integration auth for.",
|
||||
integration: "The slug of integration for the auth object.",
|
||||
accessId: "The unique authorized access id of the external integration provider.",
|
||||
accessId: "The unique authorized access ID of the external integration provider.",
|
||||
accessToken: "The unique authorized access token of the external integration provider.",
|
||||
awsAssumeIamRoleArn: "The AWS IAM Role to be assumed by Infisical",
|
||||
awsAssumeIamRoleArn: "The AWS IAM Role to be assumed by Infisical.",
|
||||
url: "",
|
||||
namespace: "",
|
||||
refreshToken: "The refresh token for integration authorization."
|
||||
@ -1006,16 +1053,16 @@ export const INTEGRATION = {
|
||||
targetEnvironment:
|
||||
"The target environment of the integration provider. Used in cloudflare pages, TeamCity, Gitlab integrations.",
|
||||
targetEnvironmentId:
|
||||
"The target environment id of the integration provider. Used in cloudflare pages, teamcity, gitlab integrations.",
|
||||
"The target environment ID of the integration provider. Used in cloudflare pages, teamcity, gitlab integrations.",
|
||||
targetService:
|
||||
"The service based grouping identifier of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank",
|
||||
"The service based grouping identifier of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank.",
|
||||
targetServiceId:
|
||||
"The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank",
|
||||
"The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank.",
|
||||
owner: "External integration providers service entity owner. Used in Github.",
|
||||
url: "The self-hosted URL of the platform to integrate with",
|
||||
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault",
|
||||
url: "The self-hosted URL of the platform to integrate with.",
|
||||
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault.",
|
||||
region: "AWS region to sync secrets to.",
|
||||
scope: "Scope of the provider. Used by Github, Qovery",
|
||||
scope: "Scope of the provider. Used by Github, Qovery.",
|
||||
metadata: {
|
||||
secretPrefix: "The prefix for the saved secret. Used by GCP.",
|
||||
secretSuffix: "The suffix for the saved secret. Used by GCP.",
|
||||
@ -1027,12 +1074,12 @@ export const INTEGRATION = {
|
||||
githubVisibility:
|
||||
"Define where the secrets from the Github Integration should be visible. Option 'selected' lets you directly define which repositories to sync secrets to.",
|
||||
githubVisibilityRepoIds:
|
||||
"The repository IDs to sync secrets to when using the Github Integration. Only applicable when using Organization scope, and visibility is set to 'selected'",
|
||||
"The repository IDs to sync secrets to when using the Github Integration. Only applicable when using Organization scope, and visibility is set to 'selected'.",
|
||||
kmsKeyId: "The ID of the encryption key from AWS KMS.",
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
|
||||
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets"
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets."
|
||||
}
|
||||
},
|
||||
UPDATE: {
|
||||
@ -1051,7 +1098,7 @@ export const INTEGRATION = {
|
||||
integrationId: "The ID of the integration object."
|
||||
},
|
||||
SYNC: {
|
||||
integrationId: "The ID of the integration object to manually sync"
|
||||
integrationId: "The ID of the integration object to manually sync."
|
||||
}
|
||||
};
|
||||
|
||||
@ -1059,7 +1106,7 @@ export const AUDIT_LOG_STREAMS = {
|
||||
CREATE: {
|
||||
url: "The HTTP URL to push logs to.",
|
||||
headers: {
|
||||
desc: "The HTTP headers attached for the external prrovider requests.",
|
||||
desc: "The HTTP headers attached for the external provider requests.",
|
||||
key: "The HTTP header key name.",
|
||||
value: "The HTTP header value."
|
||||
}
|
||||
@ -1068,7 +1115,7 @@ export const AUDIT_LOG_STREAMS = {
|
||||
id: "The ID of the audit log stream to update.",
|
||||
url: "The HTTP URL to push logs to.",
|
||||
headers: {
|
||||
desc: "The HTTP headers attached for the external prrovider requests.",
|
||||
desc: "The HTTP headers attached for the external provider requests.",
|
||||
key: "The HTTP header key name.",
|
||||
value: "The HTTP header value."
|
||||
}
|
||||
@ -1084,16 +1131,16 @@ export const AUDIT_LOG_STREAMS = {
|
||||
export const CERTIFICATE_AUTHORITIES = {
|
||||
CREATE: {
|
||||
projectSlug: "Slug of the project to create the CA in.",
|
||||
type: "The type of CA to create",
|
||||
friendlyName: "A friendly name for the CA",
|
||||
organization: "The organization (O) for the CA",
|
||||
ou: "The organization unit (OU) for the CA",
|
||||
country: "The country name (C) for the CA",
|
||||
province: "The state of province name for the CA",
|
||||
locality: "The locality name for the CA",
|
||||
commonName: "The common name (CN) for the CA",
|
||||
notBefore: "The date and time when the CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
type: "The type of CA to create.",
|
||||
friendlyName: "A friendly name for the CA.",
|
||||
organization: "The organization (O) for the CA.",
|
||||
ou: "The organization unit (OU) for the CA.",
|
||||
country: "The country name (C) for the CA.",
|
||||
province: "The state of province name for the CA.",
|
||||
locality: "The locality name for the CA.",
|
||||
commonName: "The common name (CN) for the CA.",
|
||||
notBefore: "The date and time when the CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
notAfter: "The date and time when the CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
maxPathLength:
|
||||
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
|
||||
keyAlgorithm:
|
||||
@ -1102,238 +1149,240 @@ export const CERTIFICATE_AUTHORITIES = {
|
||||
"Whether or not certificates for this CA can only be issued through certificate templates."
|
||||
},
|
||||
GET: {
|
||||
caId: "The ID of the CA to get"
|
||||
caId: "The ID of the CA to get."
|
||||
},
|
||||
UPDATE: {
|
||||
caId: "The ID of the CA to update",
|
||||
status: "The status of the CA to update to. This can be one of active or disabled",
|
||||
caId: "The ID of the CA to update.",
|
||||
status: "The status of the CA to update to. This can be one of active or disabled.",
|
||||
requireTemplateForIssuance:
|
||||
"Whether or not certificates for this CA can only be issued through certificate templates."
|
||||
},
|
||||
DELETE: {
|
||||
caId: "The ID of the CA to delete"
|
||||
caId: "The ID of the CA to delete."
|
||||
},
|
||||
GET_CSR: {
|
||||
caId: "The ID of the CA to generate CSR from",
|
||||
csr: "The generated CSR from the CA"
|
||||
caId: "The ID of the CA to generate CSR from.",
|
||||
csr: "The generated CSR from the CA."
|
||||
},
|
||||
RENEW_CA_CERT: {
|
||||
caId: "The ID of the CA to renew the CA certificate for",
|
||||
caId: "The ID of the CA to renew the CA certificate for.",
|
||||
type: "The type of behavior to use for the renewal operation. Currently Infisical is only able to renew a CA certificate with the same key pair.",
|
||||
notAfter: "The expiry date and time for the renewed CA certificate in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
certificate: "The renewed CA certificate body",
|
||||
certificateChain: "The certificate chain of the CA",
|
||||
serialNumber: "The serial number of the renewed CA certificate"
|
||||
notAfter: "The expiry date and time for the renewed CA certificate in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
certificate: "The renewed CA certificate body.",
|
||||
certificateChain: "The certificate chain of the CA.",
|
||||
serialNumber: "The serial number of the renewed CA certificate."
|
||||
},
|
||||
GET_CERT: {
|
||||
caId: "The ID of the CA to get the certificate body and certificate chain from",
|
||||
certificate: "The certificate body of the CA",
|
||||
certificateChain: "The certificate chain of the CA",
|
||||
serialNumber: "The serial number of the CA certificate"
|
||||
caId: "The ID of the CA to get the certificate body and certificate chain from.",
|
||||
certificate: "The certificate body of the CA.",
|
||||
certificateChain: "The certificate chain of the CA.",
|
||||
serialNumber: "The serial number of the CA certificate."
|
||||
},
|
||||
GET_CERT_BY_ID: {
|
||||
caId: "The ID of the CA to get the CA certificate from",
|
||||
caCertId: "The ID of the CA certificate to get"
|
||||
caId: "The ID of the CA to get the CA certificate from.",
|
||||
caCertId: "The ID of the CA certificate to get."
|
||||
},
|
||||
GET_CA_CERTS: {
|
||||
caId: "The ID of the CA to get the CA certificates for",
|
||||
certificate: "The certificate body of the CA certificate",
|
||||
certificateChain: "The certificate chain of the CA certificate",
|
||||
serialNumber: "The serial number of the CA certificate",
|
||||
caId: "The ID of the CA to get the CA certificates for.",
|
||||
certificate: "The certificate body of the CA certificate.",
|
||||
certificateChain: "The certificate chain of the CA certificate.",
|
||||
serialNumber: "The serial number of the CA certificate.",
|
||||
version: "The version of the CA certificate. The version is incremented for each CA renewal operation."
|
||||
},
|
||||
SIGN_INTERMEDIATE: {
|
||||
caId: "The ID of the CA to sign the intermediate certificate with",
|
||||
csr: "The pem-encoded CSR to sign with the CA",
|
||||
notBefore: "The date and time when the intermediate CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the intermediate CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
caId: "The ID of the CA to sign the intermediate certificate with.",
|
||||
csr: "The pem-encoded CSR to sign with the CA.",
|
||||
notBefore: "The date and time when the intermediate CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
notAfter: "The date and time when the intermediate CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
maxPathLength:
|
||||
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
|
||||
certificate: "The signed intermediate certificate",
|
||||
certificateChain: "The certificate chain of the intermediate certificate",
|
||||
issuingCaCertificate: "The certificate of the issuing CA",
|
||||
serialNumber: "The serial number of the intermediate certificate"
|
||||
certificate: "The signed intermediate certificate.",
|
||||
certificateChain: "The certificate chain of the intermediate certificate.",
|
||||
issuingCaCertificate: "The certificate of the issuing CA.",
|
||||
serialNumber: "The serial number of the intermediate certificate."
|
||||
},
|
||||
IMPORT_CERT: {
|
||||
caId: "The ID of the CA to import the certificate for",
|
||||
certificate: "The certificate body to import",
|
||||
certificateChain: "The certificate chain to import"
|
||||
caId: "The ID of the CA to import the certificate for.",
|
||||
certificate: "The certificate body to import.",
|
||||
certificateChain: "The certificate chain to import."
|
||||
},
|
||||
ISSUE_CERT: {
|
||||
caId: "The ID of the CA to issue the certificate from",
|
||||
certificateTemplateId: "The ID of the certificate template to issue the certificate from",
|
||||
pkiCollectionId: "The ID of the PKI collection to add the certificate to",
|
||||
friendlyName: "A friendly name for the certificate",
|
||||
commonName: "The common name (CN) for the certificate",
|
||||
caId: "The ID of the CA to issue the certificate from.",
|
||||
certificateTemplateId: "The ID of the certificate template to issue the certificate from.",
|
||||
pkiCollectionId: "The ID of the PKI collection to add the certificate to.",
|
||||
friendlyName: "A friendly name for the certificate.",
|
||||
commonName: "The common name (CN) for the certificate.",
|
||||
altNames:
|
||||
"A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.",
|
||||
ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...",
|
||||
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
certificate: "The issued certificate",
|
||||
issuingCaCertificate: "The certificate of the issuing CA",
|
||||
certificateChain: "The certificate chain of the issued certificate",
|
||||
privateKey: "The private key of the issued certificate",
|
||||
serialNumber: "The serial number of the issued certificate",
|
||||
keyUsages: "The key usage extension of the certificate",
|
||||
extendedKeyUsages: "The extended key usage extension of the certificate"
|
||||
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
certificate: "The issued certificate.",
|
||||
issuingCaCertificate: "The certificate of the issuing CA.",
|
||||
certificateChain: "The certificate chain of the issued certificate.",
|
||||
privateKey: "The private key of the issued certificate.",
|
||||
serialNumber: "The serial number of the issued certificate.",
|
||||
keyUsages: "The key usage extension of the certificate.",
|
||||
extendedKeyUsages: "The extended key usage extension of the certificate."
|
||||
},
|
||||
SIGN_CERT: {
|
||||
caId: "The ID of the CA to issue the certificate from",
|
||||
pkiCollectionId: "The ID of the PKI collection to add the certificate to",
|
||||
keyUsages: "The key usage extension of the certificate",
|
||||
extendedKeyUsages: "The extended key usage extension of the certificate",
|
||||
csr: "The pem-encoded CSR to sign with the CA to be used for certificate issuance",
|
||||
friendlyName: "A friendly name for the certificate",
|
||||
commonName: "The common name (CN) for the certificate",
|
||||
caId: "The ID of the CA to issue the certificate from.",
|
||||
pkiCollectionId: "The ID of the PKI collection to add the certificate to.",
|
||||
keyUsages: "The key usage extension of the certificate.",
|
||||
extendedKeyUsages: "The extended key usage extension of the certificate.",
|
||||
csr: "The pem-encoded CSR to sign with the CA to be used for certificate issuance.",
|
||||
friendlyName: "A friendly name for the certificate.",
|
||||
commonName: "The common name (CN) for the certificate.",
|
||||
altNames:
|
||||
"A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.",
|
||||
ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...",
|
||||
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
certificate: "The issued certificate",
|
||||
issuingCaCertificate: "The certificate of the issuing CA",
|
||||
certificateChain: "The certificate chain of the issued certificate",
|
||||
serialNumber: "The serial number of the issued certificate"
|
||||
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format.",
|
||||
certificate: "The issued certificate.",
|
||||
issuingCaCertificate: "The certificate of the issuing CA.",
|
||||
certificateChain: "The certificate chain of the issued certificate.",
|
||||
serialNumber: "The serial number of the issued certificate."
|
||||
},
|
||||
GET_CRLS: {
|
||||
caId: "The ID of the CA to get the certificate revocation lists (CRLs) for",
|
||||
id: "The ID of certificate revocation list (CRL)",
|
||||
crl: "The certificate revocation list (CRL)"
|
||||
caId: "The ID of the CA to get the certificate revocation lists (CRLs) for.",
|
||||
id: "The ID of certificate revocation list (CRL).",
|
||||
crl: "The certificate revocation list (CRL)."
|
||||
}
|
||||
};
|
||||
|
||||
export const CERTIFICATES = {
|
||||
GET: {
|
||||
serialNumber: "The serial number of the certificate to get"
|
||||
serialNumber: "The serial number of the certificate to get."
|
||||
},
|
||||
REVOKE: {
|
||||
serialNumber:
|
||||
"The serial number of the certificate to revoke. The revoked certificate will be added to the certificate revocation list (CRL) of the CA.",
|
||||
revocationReason: "The reason for revoking the certificate.",
|
||||
revokedAt: "The date and time when the certificate was revoked",
|
||||
revokedAt: "The date and time when the certificate was revoked.",
|
||||
serialNumberRes: "The serial number of the revoked certificate."
|
||||
},
|
||||
DELETE: {
|
||||
serialNumber: "The serial number of the certificate to delete"
|
||||
serialNumber: "The serial number of the certificate to delete."
|
||||
},
|
||||
GET_CERT: {
|
||||
serialNumber: "The serial number of the certificate to get the certificate body and certificate chain for",
|
||||
certificate: "The certificate body of the certificate",
|
||||
certificateChain: "The certificate chain of the certificate",
|
||||
serialNumberRes: "The serial number of the certificate"
|
||||
serialNumber: "The serial number of the certificate to get the certificate body and certificate chain for.",
|
||||
certificate: "The certificate body of the certificate.",
|
||||
certificateChain: "The certificate chain of the certificate.",
|
||||
serialNumberRes: "The serial number of the certificate."
|
||||
}
|
||||
};
|
||||
|
||||
export const CERTIFICATE_TEMPLATES = {
|
||||
CREATE: {
|
||||
caId: "The ID of the certificate authority to associate the template with",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the template",
|
||||
name: "The name of the template",
|
||||
commonName: "The regular expression string to use for validating common names",
|
||||
subjectAlternativeName: "The regular expression string to use for validating subject alternative names",
|
||||
ttl: "The max TTL for the template",
|
||||
keyUsages: "The key usage constraint or default value for when template is used during certificate issuance",
|
||||
caId: "The ID of the certificate authority to associate the template with.",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the template.",
|
||||
name: "The name of the template.",
|
||||
commonName: "The regular expression string to use for validating common names.",
|
||||
subjectAlternativeName: "The regular expression string to use for validating subject alternative names.",
|
||||
ttl: "The max TTL for the template.",
|
||||
keyUsages: "The key usage constraint or default value for when template is used during certificate issuance.",
|
||||
extendedKeyUsages:
|
||||
"The extended key usage constraint or default value for when template is used during certificate issuance"
|
||||
"The extended key usage constraint or default value for when template is used during certificate issuance."
|
||||
},
|
||||
GET: {
|
||||
certificateTemplateId: "The ID of the certificate template to get"
|
||||
certificateTemplateId: "The ID of the certificate template to get."
|
||||
},
|
||||
UPDATE: {
|
||||
certificateTemplateId: "The ID of the certificate template to update",
|
||||
caId: "The ID of the certificate authority to update the association with the template",
|
||||
pkiCollectionId: "The ID of the PKI collection to update the binding to the template",
|
||||
name: "The updated name of the template",
|
||||
commonName: "The updated regular expression string for validating common names",
|
||||
subjectAlternativeName: "The updated regular expression string for validating subject alternative names",
|
||||
ttl: "The updated max TTL for the template",
|
||||
certificateTemplateId: "The ID of the certificate template to update.",
|
||||
caId: "The ID of the certificate authority to update the association with the template.",
|
||||
pkiCollectionId: "The ID of the PKI collection to update the binding to the template.",
|
||||
name: "The updated name of the template.",
|
||||
commonName: "The updated regular expression string for validating common names.",
|
||||
subjectAlternativeName: "The updated regular expression string for validating subject alternative names.",
|
||||
ttl: "The updated max TTL for the template.",
|
||||
keyUsages:
|
||||
"The updated key usage constraint or default value for when template is used during certificate issuance",
|
||||
"The updated key usage constraint or default value for when template is used during certificate issuance.",
|
||||
extendedKeyUsages:
|
||||
"The updated extended key usage constraint or default value for when template is used during certificate issuance"
|
||||
"The updated extended key usage constraint or default value for when template is used during certificate issuance."
|
||||
},
|
||||
DELETE: {
|
||||
certificateTemplateId: "The ID of the certificate template to delete"
|
||||
certificateTemplateId: "The ID of the certificate template to delete."
|
||||
}
|
||||
};
|
||||
|
||||
export const CA_CRLS = {
|
||||
GET: {
|
||||
crlId: "The ID of the certificate revocation list (CRL) to get",
|
||||
crl: "The certificate revocation list (CRL)"
|
||||
crlId: "The ID of the certificate revocation list (CRL) to get.",
|
||||
crl: "The certificate revocation list (CRL)."
|
||||
}
|
||||
};
|
||||
|
||||
export const ALERTS = {
|
||||
CREATE: {
|
||||
projectId: "The ID of the project to create the alert in",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the alert",
|
||||
name: "The name of the alert",
|
||||
alertBeforeDays: "The number of days before the certificate expires to trigger the alert",
|
||||
emails: "The email addresses to send the alert email to"
|
||||
projectId: "The ID of the project to create the alert in.",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the alert.",
|
||||
name: "The name of the alert.",
|
||||
alertBeforeDays: "The number of days before the certificate expires to trigger the alert.",
|
||||
emails: "The email addresses to send the alert email to."
|
||||
},
|
||||
GET: {
|
||||
alertId: "The ID of the alert to get"
|
||||
alertId: "The ID of the alert to get."
|
||||
},
|
||||
UPDATE: {
|
||||
alertId: "The ID of the alert to update",
|
||||
name: "The name of the alert to update to",
|
||||
alertBeforeDays: "The number of days before the certificate expires to trigger the alert to update to",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the alert to update to",
|
||||
emails: "The email addresses to send the alert email to update to"
|
||||
alertId: "The ID of the alert to update.",
|
||||
name: "The name of the alert to update to.",
|
||||
alertBeforeDays: "The number of days before the certificate expires to trigger the alert to update to.",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the alert to update to.",
|
||||
emails: "The email addresses to send the alert email to update to."
|
||||
},
|
||||
DELETE: {
|
||||
alertId: "The ID of the alert to delete"
|
||||
alertId: "The ID of the alert to delete."
|
||||
}
|
||||
};
|
||||
|
||||
export const PKI_COLLECTIONS = {
|
||||
CREATE: {
|
||||
projectId: "The ID of the project to create the PKI collection in",
|
||||
name: "The name of the PKI collection",
|
||||
description: "A description for the PKI collection"
|
||||
projectId: "The ID of the project to create the PKI collection in.",
|
||||
name: "The name of the PKI collection.",
|
||||
description: "A description for the PKI collection."
|
||||
},
|
||||
GET: {
|
||||
collectionId: "The ID of the PKI collection to get"
|
||||
collectionId: "The ID of the PKI collection to get."
|
||||
},
|
||||
UPDATE: {
|
||||
collectionId: "The ID of the PKI collection to update",
|
||||
name: "The name of the PKI collection to update to",
|
||||
description: "The description for the PKI collection to update to"
|
||||
collectionId: "The ID of the PKI collection to update.",
|
||||
name: "The name of the PKI collection to update to.",
|
||||
description: "The description for the PKI collection to update to."
|
||||
},
|
||||
DELETE: {
|
||||
collectionId: "The ID of the PKI collection to delete"
|
||||
collectionId: "The ID of the PKI collection to delete."
|
||||
},
|
||||
LIST_ITEMS: {
|
||||
collectionId: "The ID of the PKI collection to list items from",
|
||||
type: "The type of the PKI collection item to list",
|
||||
offset: "The offset to start from",
|
||||
limit: "The number of items to return"
|
||||
collectionId: "The ID of the PKI collection to list items from.",
|
||||
type: "The type of the PKI collection item to list.",
|
||||
offset: "The offset to start from.",
|
||||
limit: "The number of items to return."
|
||||
},
|
||||
ADD_ITEM: {
|
||||
collectionId: "The ID of the PKI collection to add the item to",
|
||||
type: "The type of the PKI collection item to add",
|
||||
itemId: "The resource ID of the PKI collection item to add"
|
||||
collectionId: "The ID of the PKI collection to add the item to.",
|
||||
type: "The type of the PKI collection item to add.",
|
||||
itemId: "The resource ID of the PKI collection item to add."
|
||||
},
|
||||
DELETE_ITEM: {
|
||||
collectionId: "The ID of the PKI collection to delete the item from",
|
||||
collectionItemId: "The ID of the PKI collection item to delete",
|
||||
type: "The type of the deleted PKI collection item",
|
||||
itemId: "The resource ID of the deleted PKI collection item"
|
||||
collectionId: "The ID of the PKI collection to delete the item from.",
|
||||
collectionItemId: "The ID of the PKI collection item to delete.",
|
||||
type: "The type of the deleted PKI collection item.",
|
||||
itemId: "The resource ID of the deleted PKI collection item."
|
||||
}
|
||||
};
|
||||
|
||||
export const PROJECT_ROLE = {
|
||||
CREATE: {
|
||||
projectSlug: "Slug of the project to create the role for.",
|
||||
projectId: "Id of the project to create the role for.",
|
||||
slug: "The slug of the role.",
|
||||
name: "The name of the role.",
|
||||
description: "The description for the role.",
|
||||
permissions: "The permissions assigned to the role."
|
||||
},
|
||||
UPDATE: {
|
||||
projectSlug: "Slug of the project to update the role for.",
|
||||
projectSlug: "The slug of the project to update the role for.",
|
||||
projectId: "The ID of the project to update the role for.",
|
||||
roleId: "The ID of the role to update",
|
||||
slug: "The slug of the role.",
|
||||
name: "The name of the role.",
|
||||
@ -1341,15 +1390,18 @@ export const PROJECT_ROLE = {
|
||||
permissions: "The permissions assigned to the role."
|
||||
},
|
||||
DELETE: {
|
||||
projectSlug: "Slug of the project to delete this role for.",
|
||||
projectSlug: "The slug of the project to delete this role for.",
|
||||
projectId: "The ID of the project to delete the role for.",
|
||||
roleId: "The ID of the role to update"
|
||||
},
|
||||
GET_ROLE_BY_SLUG: {
|
||||
projectSlug: "The slug of the project.",
|
||||
roleSlug: "The slug of the role to get details"
|
||||
projectId: "The ID of the project.",
|
||||
roleSlug: "The slug of the role to get details."
|
||||
},
|
||||
LIST: {
|
||||
projectSlug: "The slug of the project to list the roles of."
|
||||
projectSlug: "The slug of the project to list the roles of.",
|
||||
projectId: "The ID of the project."
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,111 +0,0 @@
|
||||
import { AnyAbility, ExtractSubjectType } from "@casl/ability";
|
||||
import { AbilityQuery, rulesToQuery } from "@casl/ability/extra";
|
||||
import { Tables } from "knex/types/tables";
|
||||
|
||||
import { BadRequestError, UnauthorizedError } from "../errors";
|
||||
import { TKnexDynamicOperator } from "../knex/dynamic";
|
||||
|
||||
type TBuildKnexQueryFromCaslDTO<K extends AnyAbility> = {
|
||||
ability: K;
|
||||
subject: ExtractSubjectType<Parameters<K["rulesFor"]>[1]>;
|
||||
action: Parameters<K["rulesFor"]>[0];
|
||||
};
|
||||
|
||||
export const buildKnexQueryFromCaslOperators = <K extends AnyAbility>({
|
||||
ability,
|
||||
subject,
|
||||
action
|
||||
}: TBuildKnexQueryFromCaslDTO<K>) => {
|
||||
const query = rulesToQuery(ability, action, subject, (rule) => {
|
||||
if (!rule.ast) throw new Error("Ast not defined");
|
||||
return rule.ast;
|
||||
});
|
||||
|
||||
if (query === null) throw new UnauthorizedError({ message: `You don't have permission to do ${action} ${subject}` });
|
||||
return query;
|
||||
};
|
||||
|
||||
type TFieldMapper<T extends keyof Tables> = {
|
||||
[K in T]: `${K}.${Exclude<keyof Tables[K]["base"], symbol>}`;
|
||||
}[T];
|
||||
|
||||
type TFormatCaslFieldsWithTableNames<T extends keyof Tables> = {
|
||||
// handle if any missing operator else throw error let the app break because this is executing again the db
|
||||
missingOperatorCallback?: (operator: string) => void;
|
||||
fieldMapping: (arg: string) => TFieldMapper<T> | null;
|
||||
dynamicQuery: TKnexDynamicOperator;
|
||||
};
|
||||
|
||||
export const formatCaslOperatorFieldsWithTableNames = <T extends keyof Tables>({
|
||||
missingOperatorCallback = (arg) => {
|
||||
throw new BadRequestError({ message: `Unknown permission operator: ${arg}` });
|
||||
},
|
||||
dynamicQuery: dynamicQueryAst,
|
||||
fieldMapping
|
||||
}: TFormatCaslFieldsWithTableNames<T>) => {
|
||||
const stack: [TKnexDynamicOperator, TKnexDynamicOperator | null][] = [[dynamicQueryAst, null]];
|
||||
|
||||
while (stack.length) {
|
||||
const [filterAst, parentAst] = stack.pop()!;
|
||||
|
||||
if (filterAst.operator === "and" || filterAst.operator === "or" || filterAst.operator === "not") {
|
||||
filterAst.value.forEach((el) => {
|
||||
stack.push([el, filterAst]);
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
filterAst.operator === "eq" ||
|
||||
filterAst.operator === "ne" ||
|
||||
filterAst.operator === "in" ||
|
||||
filterAst.operator === "endsWith" ||
|
||||
filterAst.operator === "startsWith"
|
||||
) {
|
||||
const attrPath = fieldMapping(filterAst.field);
|
||||
if (attrPath) {
|
||||
filterAst.field = attrPath;
|
||||
} else if (parentAst && Array.isArray(parentAst.value)) {
|
||||
parentAst.value = parentAst.value.filter((childAst) => childAst !== filterAst) as string[];
|
||||
} else throw new Error("Unknown casl field");
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
if (parentAst && Array.isArray(parentAst.value)) {
|
||||
parentAst.value = parentAst.value.filter((childAst) => childAst !== filterAst) as string[];
|
||||
} else {
|
||||
missingOperatorCallback?.(filterAst.operator);
|
||||
}
|
||||
}
|
||||
return dynamicQueryAst;
|
||||
};
|
||||
|
||||
export const convertCaslOperatorToKnexOperator = <T extends keyof Tables>(
|
||||
caslKnexOperators: AbilityQuery,
|
||||
fieldMapping: (arg: string) => TFieldMapper<T> | null
|
||||
) => {
|
||||
const value = [];
|
||||
if (caslKnexOperators.$and) {
|
||||
value.push({
|
||||
operator: "not" as const,
|
||||
value: caslKnexOperators.$and as TKnexDynamicOperator[]
|
||||
});
|
||||
}
|
||||
if (caslKnexOperators.$or) {
|
||||
value.push({
|
||||
operator: "or" as const,
|
||||
value: caslKnexOperators.$or as TKnexDynamicOperator[]
|
||||
});
|
||||
}
|
||||
|
||||
return formatCaslOperatorFieldsWithTableNames({
|
||||
dynamicQuery: {
|
||||
operator: "and",
|
||||
value
|
||||
},
|
||||
fieldMapping
|
||||
});
|
||||
};
|
@ -162,7 +162,8 @@ const envSchema = z
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"),
|
||||
WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string().optional()),
|
||||
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional())
|
||||
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true")
|
||||
})
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
|
@ -81,3 +81,25 @@ export const chunkArray = <T>(array: T[], chunkSize: number): T[][] => {
|
||||
}
|
||||
return chunks;
|
||||
};
|
||||
|
||||
/*
|
||||
* Returns all items from the first list that
|
||||
* do not exist in the second list.
|
||||
*/
|
||||
export const diff = <T>(
|
||||
root: readonly T[],
|
||||
other: readonly T[],
|
||||
identity: (item: T) => string | number | symbol = (t: T) => t as unknown as string | number | symbol
|
||||
): T[] => {
|
||||
if (!root?.length && !other?.length) return [];
|
||||
if (root?.length === undefined) return [...other];
|
||||
if (!other?.length) return [...root];
|
||||
const bKeys = other.reduce(
|
||||
(acc, item) => {
|
||||
acc[identity(item)] = true;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string | number | symbol, boolean>
|
||||
);
|
||||
return root.filter((a) => !bKeys[identity(a)]);
|
||||
};
|
||||
|
@ -2,32 +2,31 @@ import { Knex } from "knex";
|
||||
|
||||
import { UnauthorizedError } from "../errors";
|
||||
|
||||
type TKnexDynamicPrimitiveOperator = {
|
||||
type TKnexDynamicPrimitiveOperator<T extends object> = {
|
||||
operator: "eq" | "ne" | "startsWith" | "endsWith";
|
||||
value: string;
|
||||
field: string;
|
||||
field: Extract<keyof T, string>;
|
||||
};
|
||||
|
||||
type TKnexDynamicInOperator = {
|
||||
type TKnexDynamicInOperator<T extends object> = {
|
||||
operator: "in";
|
||||
value: string[] | number[];
|
||||
field: string;
|
||||
field: Extract<keyof T, string>;
|
||||
};
|
||||
|
||||
type TKnexNonGroupOperator = TKnexDynamicInOperator | TKnexDynamicPrimitiveOperator;
|
||||
type TKnexNonGroupOperator<T extends object> = TKnexDynamicInOperator<T> | TKnexDynamicPrimitiveOperator<T>;
|
||||
|
||||
type TKnexGroupOperator = {
|
||||
type TKnexGroupOperator<T extends object> = {
|
||||
operator: "and" | "or" | "not";
|
||||
value: (TKnexNonGroupOperator | TKnexGroupOperator)[];
|
||||
value: (TKnexNonGroupOperator<T> | TKnexGroupOperator<T>)[];
|
||||
};
|
||||
|
||||
// akhilmhdh: This is still in pending state and not yet ready. If you want to use it ping me.
|
||||
// used when you need to write a complex query with the orm
|
||||
// use it when you need complex or and and condition - most of the time not needed
|
||||
// majorly used with casl permission to filter data based on permission
|
||||
export type TKnexDynamicOperator = TKnexGroupOperator | TKnexNonGroupOperator;
|
||||
export type TKnexDynamicOperator<T extends object> = TKnexGroupOperator<T> | TKnexNonGroupOperator<T>;
|
||||
|
||||
export const buildDynamicKnexQuery = (dynamicQuery: TKnexDynamicOperator, rootQueryBuild: Knex.QueryBuilder) => {
|
||||
export const buildDynamicKnexQuery = <T extends object>(
|
||||
rootQueryBuild: Knex.QueryBuilder,
|
||||
dynamicQuery: TKnexDynamicOperator<T>
|
||||
) => {
|
||||
const stack = [{ filterAst: dynamicQuery, queryBuilder: rootQueryBuild }];
|
||||
|
||||
while (stack.length) {
|
||||
@ -50,34 +49,25 @@ export const buildDynamicKnexQuery = (dynamicQuery: TKnexDynamicOperator, rootQu
|
||||
break;
|
||||
}
|
||||
case "and": {
|
||||
void queryBuilder.andWhere((subQueryBuilder) => {
|
||||
filterAst.value.forEach((el) => {
|
||||
stack.push({
|
||||
queryBuilder: subQueryBuilder,
|
||||
filterAst: el
|
||||
});
|
||||
filterAst.value.forEach((el) => {
|
||||
void queryBuilder.andWhere((subQueryBuilder) => {
|
||||
buildDynamicKnexQuery(subQueryBuilder, el);
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "or": {
|
||||
void queryBuilder.orWhere((subQueryBuilder) => {
|
||||
filterAst.value.forEach((el) => {
|
||||
stack.push({
|
||||
queryBuilder: subQueryBuilder,
|
||||
filterAst: el
|
||||
});
|
||||
filterAst.value.forEach((el) => {
|
||||
void queryBuilder.orWhere((subQueryBuilder) => {
|
||||
buildDynamicKnexQuery(subQueryBuilder, el);
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "not": {
|
||||
void queryBuilder.whereNot((subQueryBuilder) => {
|
||||
filterAst.value.forEach((el) => {
|
||||
stack.push({
|
||||
queryBuilder: subQueryBuilder,
|
||||
filterAst: el
|
||||
});
|
||||
filterAst.value.forEach((el) => {
|
||||
void queryBuilder.whereNot((subQueryBuilder) => {
|
||||
buildDynamicKnexQuery(subQueryBuilder, el);
|
||||
});
|
||||
});
|
||||
break;
|
||||
|
@ -3,6 +3,7 @@ import { Knex } from "knex";
|
||||
import { Tables } from "knex/types/tables";
|
||||
|
||||
import { DatabaseError } from "../errors";
|
||||
import { buildDynamicKnexQuery, TKnexDynamicOperator } from "./dynamic";
|
||||
|
||||
export * from "./connection";
|
||||
export * from "./join";
|
||||
@ -20,9 +21,10 @@ export const withTransaction = <K extends object>(db: Knex, dal: K) => ({
|
||||
export type TFindFilter<R extends object = object> = Partial<R> & {
|
||||
$in?: Partial<{ [k in keyof R]: R[k][] }>;
|
||||
$search?: Partial<{ [k in keyof R]: R[k] }>;
|
||||
$complex?: TKnexDynamicOperator<R>;
|
||||
};
|
||||
export const buildFindFilter =
|
||||
<R extends object = object>({ $in, $search, ...filter }: TFindFilter<R>) =>
|
||||
<R extends object = object>({ $in, $search, $complex, ...filter }: TFindFilter<R>) =>
|
||||
(bd: Knex.QueryBuilder<R, R>) => {
|
||||
void bd.where(filter);
|
||||
if ($in) {
|
||||
@ -39,6 +41,9 @@ export const buildFindFilter =
|
||||
}
|
||||
});
|
||||
}
|
||||
if ($complex) {
|
||||
return buildDynamicKnexQuery(bd, $complex);
|
||||
}
|
||||
return bd;
|
||||
};
|
||||
|
||||
|
@ -57,3 +57,10 @@ export enum OrderByDirection {
|
||||
ASC = "asc",
|
||||
DESC = "desc"
|
||||
}
|
||||
|
||||
export type ProjectServiceActor = {
|
||||
type: ActorType;
|
||||
id: string;
|
||||
authMethod: ActorAuthMethod;
|
||||
orgId: string;
|
||||
};
|
||||
|
@ -63,7 +63,7 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "PermissionDenied",
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType}`
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType} - ${JSON.stringify(error.subject)}`
|
||||
});
|
||||
} else if (error instanceof ForbiddenRequestError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
|
@ -15,8 +15,12 @@ export const fastifySwagger = fp(async (fastify) => {
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: "https://app.infisical.com",
|
||||
description: "Production server"
|
||||
url: "https://us.infisical.com",
|
||||
description: "Production server (US)"
|
||||
},
|
||||
{
|
||||
url: "https://eu.infisical.com",
|
||||
description: "Production server (EU)"
|
||||
},
|
||||
{
|
||||
url: "http://localhost:8080",
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { registerCertificateEstRouter } from "@app/ee/routes/est/certificate-est-router";
|
||||
import { registerV1EERoutes } from "@app/ee/routes/v1";
|
||||
import { registerV2EERoutes } from "@app/ee/routes/v2";
|
||||
import { accessApprovalPolicyApproverDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-approver-dal";
|
||||
import { accessApprovalPolicyDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-dal";
|
||||
import { accessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
@ -32,6 +33,7 @@ import { groupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal";
|
||||
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { identityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
import { ldapConfigDALFactory } from "@app/ee/services/ldap-config/ldap-config-dal";
|
||||
import { ldapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { ldapGroupMapDALFactory } from "@app/ee/services/ldap-config/ldap-group-map-dal";
|
||||
@ -1075,9 +1077,16 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
identityProjectDAL
|
||||
});
|
||||
|
||||
const identityProjectAdditionalPrivilegeV2Service = identityProjectAdditionalPrivilegeV2ServiceFactory({
|
||||
projectDAL,
|
||||
identityProjectAdditionalPrivilegeDAL,
|
||||
permissionService,
|
||||
identityProjectDAL
|
||||
});
|
||||
|
||||
const identityTokenAuthService = identityTokenAuthServiceFactory({
|
||||
identityTokenAuthDAL,
|
||||
identityDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
permissionService,
|
||||
@ -1086,7 +1095,6 @@ export const registerRoutes = async (
|
||||
const identityUaService = identityUaServiceFactory({
|
||||
identityOrgMembershipDAL,
|
||||
permissionService,
|
||||
identityDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityUaClientSecretDAL,
|
||||
identityUaDAL,
|
||||
@ -1096,7 +1104,6 @@ export const registerRoutes = async (
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
orgBotDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
@ -1105,7 +1112,6 @@ export const registerRoutes = async (
|
||||
identityGcpAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
});
|
||||
@ -1114,7 +1120,6 @@ export const registerRoutes = async (
|
||||
identityAccessTokenDAL,
|
||||
identityAwsAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityDAL,
|
||||
licenseService,
|
||||
permissionService
|
||||
});
|
||||
@ -1123,7 +1128,6 @@ export const registerRoutes = async (
|
||||
identityAzureAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
});
|
||||
@ -1132,7 +1136,6 @@ export const registerRoutes = async (
|
||||
identityOidcAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
orgBotDAL
|
||||
@ -1324,6 +1327,7 @@ export const registerRoutes = async (
|
||||
telemetry: telemetryService,
|
||||
projectUserAdditionalPrivilege: projectUserAdditionalPrivilegeService,
|
||||
identityProjectAdditionalPrivilege: identityProjectAdditionalPrivilegeService,
|
||||
identityProjectAdditionalPrivilegeV2: identityProjectAdditionalPrivilegeV2Service,
|
||||
secretSharing: secretSharingService,
|
||||
userEngagement: userEngagementService,
|
||||
externalKms: externalKmsService,
|
||||
@ -1422,7 +1426,13 @@ export const registerRoutes = async (
|
||||
},
|
||||
{ prefix: "/api/v1" }
|
||||
);
|
||||
await server.register(registerV2Routes, { prefix: "/api/v2" });
|
||||
await server.register(
|
||||
async (v2Server) => {
|
||||
await v2Server.register(registerV2EERoutes);
|
||||
await v2Server.register(registerV2Routes);
|
||||
},
|
||||
{ prefix: "/api/v2" }
|
||||
);
|
||||
await server.register(registerV3Routes, { prefix: "/api/v3" });
|
||||
|
||||
server.addHook("onClose", async () => {
|
||||
|
@ -9,9 +9,10 @@ import {
|
||||
SecretApprovalPoliciesSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { UnpackedPermissionSchema } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
|
||||
import { UnpackedPermissionSchema } from "./santizedSchemas/permission";
|
||||
|
||||
// sometimes the return data must be santizied to avoid leaking important values
|
||||
// always prefer pick over omit in zod
|
||||
export const integrationAuthPubSchema = IntegrationAuthsSchema.pick({
|
||||
@ -149,13 +150,44 @@ export const ProjectSpecificPrivilegePermissionSchema = z.object({
|
||||
});
|
||||
|
||||
export const SanitizedIdentityPrivilegeSchema = IdentityProjectAdditionalPrivilegeSchema.extend({
|
||||
permissions: UnpackedPermissionSchema.array()
|
||||
permissions: UnpackedPermissionSchema.array().transform((permissions) =>
|
||||
permissions.filter(
|
||||
(caslRule) =>
|
||||
![
|
||||
ProjectPermissionSub.DynamicSecrets,
|
||||
ProjectPermissionSub.SecretImports,
|
||||
ProjectPermissionSub.SecretFolders
|
||||
].includes((caslRule?.subject as ProjectPermissionSub) || "")
|
||||
)
|
||||
)
|
||||
});
|
||||
|
||||
export const SanitizedRoleSchema = ProjectRolesSchema.extend({
|
||||
permissions: UnpackedPermissionSchema.array()
|
||||
});
|
||||
|
||||
export const SanitizedRoleSchemaV1 = ProjectRolesSchema.extend({
|
||||
permissions: UnpackedPermissionSchema.array().transform((caslPermission) =>
|
||||
// first map and remove other actions of folder permission
|
||||
caslPermission
|
||||
.map((caslRule) =>
|
||||
caslRule.subject === ProjectPermissionSub.SecretFolders
|
||||
? {
|
||||
...caslRule,
|
||||
action: caslRule.action.filter((caslAction) => caslAction === ProjectPermissionActions.Read)
|
||||
}
|
||||
: caslRule
|
||||
)
|
||||
// now filter out dynamic secret, secret import permission
|
||||
.filter(
|
||||
(caslRule) =>
|
||||
![ProjectPermissionSub.DynamicSecrets, ProjectPermissionSub.SecretImports].includes(
|
||||
(caslRule?.subject as ProjectPermissionSub) || ""
|
||||
) && caslRule.action.length > 0
|
||||
)
|
||||
)
|
||||
});
|
||||
|
||||
export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
|
||||
inputIV: true,
|
||||
inputTag: true,
|
||||
|
@ -0,0 +1,7 @@
|
||||
import { IdentityProjectAdditionalPrivilegeSchema } from "@app/db/schemas";
|
||||
|
||||
import { UnpackedPermissionSchema } from "./permission";
|
||||
|
||||
export const SanitizedIdentityPrivilegeSchema = IdentityProjectAdditionalPrivilegeSchema.extend({
|
||||
permissions: UnpackedPermissionSchema.array()
|
||||
});
|
16
backend/src/server/routes/santizedSchemas/permission.ts
Normal file
16
backend/src/server/routes/santizedSchemas/permission.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, unpackRules } from "@casl/ability/extra";
|
||||
import { z } from "zod";
|
||||
|
||||
export const UnpackedPermissionSchema = z.object({
|
||||
subject: z
|
||||
.union([z.string().min(1), z.string().array()])
|
||||
.transform((el) => (typeof el !== "string" ? el[0] : el))
|
||||
.optional(),
|
||||
action: z.union([z.string().min(1), z.string().array()]).transform((el) => (typeof el === "string" ? [el] : el)),
|
||||
conditions: z.unknown().optional(),
|
||||
inverted: z.boolean().optional()
|
||||
});
|
||||
|
||||
export const unpackPermissions = (permissions: unknown) =>
|
||||
UnpackedPermissionSchema.array().parse(unpackRules((permissions || []) as PackRule<RawRuleOf<MongoAbility>>[]));
|
@ -0,0 +1,7 @@
|
||||
import { ProjectUserAdditionalPrivilegeSchema } from "@app/db/schemas";
|
||||
|
||||
import { UnpackedPermissionSchema } from "./permission";
|
||||
|
||||
export const SanitizedUserProjectAdditionalPrivilegeSchema = ProjectUserAdditionalPrivilegeSchema.extend({
|
||||
permissions: UnpackedPermissionSchema.array()
|
||||
});
|
@ -29,6 +29,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
}).extend({
|
||||
isMigrationModeOn: z.boolean(),
|
||||
defaultAuthOrgSlug: z.string().nullable(),
|
||||
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
|
||||
defaultAuthOrgAuthMethod: z.string().nullish(),
|
||||
isSecretScanningDisabled: z.boolean()
|
||||
})
|
||||
})
|
||||
|
@ -293,10 +293,10 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
description: "Decrypt data with KMS key",
|
||||
params: z.object({
|
||||
keyId: z.string().uuid().describe(KMS.ENCRYPT.keyId)
|
||||
keyId: z.string().uuid().describe(KMS.DECRYPT.keyId)
|
||||
}),
|
||||
body: z.object({
|
||||
ciphertext: base64Schema.describe(KMS.ENCRYPT.plaintext)
|
||||
ciphertext: base64Schema.describe(KMS.DECRYPT.ciphertext)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -3,7 +3,10 @@ import { z } from "zod";
|
||||
|
||||
import { SecretFoldersSchema, SecretImportsSchema, SecretTagsSchema } from "@app/db/schemas";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { DASHBOARD } from "@app/lib/api-docs";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
@ -17,6 +20,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
const MAX_DEEP_SEARCH_LIMIT = 500; // arbitrary limit to prevent excessive results
|
||||
|
||||
// handle querystring boolean values
|
||||
const booleanSchema = z
|
||||
.union([z.boolean(), z.string().trim()])
|
||||
@ -31,6 +36,35 @@ const booleanSchema = z
|
||||
.optional()
|
||||
.default(true);
|
||||
|
||||
const parseSecretPathSearch = (search?: string) => {
|
||||
if (!search)
|
||||
return {
|
||||
searchName: "",
|
||||
searchPath: ""
|
||||
};
|
||||
|
||||
if (!search.includes("/"))
|
||||
return {
|
||||
searchName: search,
|
||||
searchPath: ""
|
||||
};
|
||||
|
||||
if (search === "/")
|
||||
return {
|
||||
searchName: "",
|
||||
searchPath: "/"
|
||||
};
|
||||
|
||||
const [searchName, ...searchPathSegments] = search.split("/").reverse();
|
||||
let searchPath = removeTrailingSlash(searchPathSegments.reverse().join("/").toLowerCase());
|
||||
if (!searchPath.startsWith("/")) searchPath = `/${searchPath}`;
|
||||
|
||||
return {
|
||||
searchName,
|
||||
searchPath
|
||||
};
|
||||
};
|
||||
|
||||
export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
@ -131,7 +165,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
let folders: Awaited<ReturnType<typeof server.services.folder.getFoldersMultiEnv>> | undefined;
|
||||
let secrets: Awaited<ReturnType<typeof server.services.secret.getSecretsRawMultiEnv>> | undefined;
|
||||
let dynamicSecrets:
|
||||
| Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByFolderIds>>
|
||||
| Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByEnvs>>
|
||||
| undefined;
|
||||
|
||||
let totalFolderCount: number | undefined;
|
||||
@ -192,15 +226,15 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
req.permission.orgId
|
||||
);
|
||||
|
||||
const permissiveEnvs = // filter envs user has access to
|
||||
const allowedDynamicSecretEnvironments = // filter envs user has access to
|
||||
environments.filter((environment) =>
|
||||
permission.can(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment, secretPath })
|
||||
)
|
||||
);
|
||||
|
||||
if (includeDynamicSecrets && permissiveEnvs.length) {
|
||||
if (includeDynamicSecrets && allowedDynamicSecretEnvironments.length) {
|
||||
// this is the unique count, ie duplicate secrets across envs only count as 1
|
||||
totalDynamicSecretCount = await server.services.dynamicSecret.getCountMultiEnv({
|
||||
actor: req.permission.type,
|
||||
@ -209,13 +243,13 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId,
|
||||
search,
|
||||
environmentSlugs: permissiveEnvs,
|
||||
environmentSlugs: allowedDynamicSecretEnvironments,
|
||||
path: secretPath,
|
||||
isInternal: true
|
||||
});
|
||||
|
||||
if (remainingLimit > 0 && totalDynamicSecretCount > adjustedOffset) {
|
||||
dynamicSecrets = await server.services.dynamicSecret.listDynamicSecretsByFolderIds({
|
||||
dynamicSecrets = await server.services.dynamicSecret.listDynamicSecretsByEnvs({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -224,7 +258,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
search,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
environmentSlugs: permissiveEnvs,
|
||||
environmentSlugs: allowedDynamicSecretEnvironments,
|
||||
path: secretPath,
|
||||
limit: remainingLimit,
|
||||
offset: adjustedOffset,
|
||||
@ -241,13 +275,13 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
}
|
||||
|
||||
if (includeSecrets && permissiveEnvs.length) {
|
||||
if (includeSecrets) {
|
||||
// this is the unique count, ie duplicate secrets across envs only count as 1
|
||||
totalSecretCount = await server.services.secret.getSecretsCountMultiEnv({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
environments: permissiveEnvs,
|
||||
environments,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId,
|
||||
path: secretPath,
|
||||
@ -260,7 +294,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
environments: permissiveEnvs,
|
||||
environments,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId,
|
||||
path: secretPath,
|
||||
@ -272,7 +306,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
isInternal: true
|
||||
});
|
||||
|
||||
for await (const environment of permissiveEnvs) {
|
||||
for await (const environment of environments) {
|
||||
const secretCountFromEnv = secrets.filter((secret) => secret.environment === environment).length;
|
||||
|
||||
if (secretCountFromEnv) {
|
||||
@ -630,4 +664,180 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/secrets-deep-search",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim(),
|
||||
environments: z.string().trim().transform(decodeURIComponent),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
search: z.string().trim().optional(),
|
||||
tags: z.string().trim().transform(decodeURIComponent).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
folders: SecretFoldersSchema.extend({ path: z.string() }).array().optional(),
|
||||
dynamicSecrets: SanitizedDynamicSecretSchema.extend({ path: z.string(), environment: z.string() })
|
||||
.array()
|
||||
.optional(),
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretPath: z.string().optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { secretPath, projectId, search } = req.query;
|
||||
|
||||
const environments = req.query.environments.split(",").filter((env) => Boolean(env.trim()));
|
||||
if (!environments.length) throw new BadRequestError({ message: "One or more environments required" });
|
||||
|
||||
const tags = req.query.tags?.split(",").filter((tag) => Boolean(tag.trim())) ?? [];
|
||||
if (!search && !tags.length) throw new BadRequestError({ message: "Search or tags required" });
|
||||
|
||||
const searchHasTags = Boolean(tags.length);
|
||||
|
||||
const allFolders = await server.services.folder.getFoldersDeepByEnvs(
|
||||
{
|
||||
projectId,
|
||||
environments,
|
||||
secretPath
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
const { searchName, searchPath } = parseSecretPathSearch(search);
|
||||
|
||||
const folderMappings = allFolders.map((folder) => ({
|
||||
folderId: folder.id,
|
||||
path: folder.path,
|
||||
environment: folder.environment
|
||||
}));
|
||||
|
||||
const sharedFilters = {
|
||||
search: searchName,
|
||||
limit: MAX_DEEP_SEARCH_LIMIT,
|
||||
orderBy: SecretsOrderBy.Name
|
||||
};
|
||||
|
||||
const secrets = await server.services.secret.getSecretsRawByFolderMappings(
|
||||
{
|
||||
projectId,
|
||||
folderMappings,
|
||||
filters: {
|
||||
...sharedFilters,
|
||||
tagSlugs: tags,
|
||||
includeTagsInSearch: true
|
||||
}
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
const dynamicSecrets = searchHasTags
|
||||
? []
|
||||
: await server.services.dynamicSecret.listDynamicSecretsByFolderIds(
|
||||
{
|
||||
projectId,
|
||||
folderMappings,
|
||||
filters: sharedFilters
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
for await (const environment of environments) {
|
||||
const secretCountForEnv = secrets.filter((secret) => secret.environment === environment).length;
|
||||
|
||||
if (secretCountForEnv) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.GET_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath,
|
||||
numberOfSecrets: secretCountForEnv
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secretCountForEnv,
|
||||
workspaceId: projectId,
|
||||
environment,
|
||||
secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const sliceQuickSearch = <T>(array: T[]) => array.slice(0, 25);
|
||||
|
||||
return {
|
||||
secrets: sliceQuickSearch(
|
||||
searchPath ? secrets.filter((secret) => secret.secretPath.endsWith(searchPath)) : secrets
|
||||
),
|
||||
dynamicSecrets: sliceQuickSearch(
|
||||
searchPath
|
||||
? dynamicSecrets.filter((dynamicSecret) => dynamicSecret.path.endsWith(searchPath))
|
||||
: dynamicSecrets
|
||||
),
|
||||
folders: searchHasTags
|
||||
? []
|
||||
: sliceQuickSearch(
|
||||
allFolders.filter((folder) => {
|
||||
const [folderName, ...folderPathSegments] = folder.path.split("/").reverse();
|
||||
const folderPath = folderPathSegments.reverse().join("/").toLowerCase() || "/";
|
||||
|
||||
if (searchPath) {
|
||||
if (searchPath === "/") {
|
||||
// only show root folders if no folder name search
|
||||
if (!searchName) return folderPath === searchPath;
|
||||
|
||||
// start partial match on root folders
|
||||
return folderName.toLowerCase().startsWith(searchName.toLowerCase());
|
||||
}
|
||||
|
||||
// support ending partial path match
|
||||
return (
|
||||
folderPath.endsWith(searchPath) && folderName.toLowerCase().startsWith(searchName.toLowerCase())
|
||||
);
|
||||
}
|
||||
|
||||
// no search path, "fuzzy" match all folders
|
||||
return folderName.toLowerCase().includes(searchName.toLowerCase());
|
||||
})
|
||||
)
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -37,7 +37,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identity: IdentitiesSchema
|
||||
identity: IdentitiesSchema.extend({
|
||||
authMethods: z.array(z.string())
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -216,7 +218,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
||||
permissions: true,
|
||||
description: true
|
||||
}).optional(),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -261,7 +265,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
||||
permissions: true,
|
||||
description: true
|
||||
}).optional(),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
})
|
||||
}).array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
@ -319,7 +325,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
||||
temporaryAccessEndTime: z.date().nullable().optional()
|
||||
})
|
||||
),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
}),
|
||||
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
||||
})
|
||||
)
|
||||
|
@ -39,7 +39,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
id: true
|
||||
id: true,
|
||||
username: true
|
||||
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
|
||||
roles: z.array(
|
||||
z.object({
|
||||
@ -56,7 +57,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
})
|
||||
)
|
||||
})
|
||||
.omit({ createdAt: true, updatedAt: true })
|
||||
.omit({ updatedAt: true })
|
||||
.array()
|
||||
})
|
||||
}
|
||||
@ -74,6 +75,65 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:workspaceId/memberships/:membershipId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Return project user membership",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().min(1).trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIP.workspaceId),
|
||||
membershipId: z.string().min(1).trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIP.membershipId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
membership: ProjectMembershipsSchema.extend({
|
||||
user: UsersSchema.pick({
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
id: true,
|
||||
username: true
|
||||
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
|
||||
roles: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
role: z.string(),
|
||||
customRoleId: z.string().optional().nullable(),
|
||||
customRoleName: z.string().optional().nullable(),
|
||||
customRoleSlug: z.string().optional().nullable(),
|
||||
isTemporary: z.boolean(),
|
||||
temporaryMode: z.string().optional().nullable(),
|
||||
temporaryRange: z.string().nullable().optional(),
|
||||
temporaryAccessStartTime: z.date().nullable().optional(),
|
||||
temporaryAccessEndTime: z.date().nullable().optional()
|
||||
})
|
||||
)
|
||||
}).omit({ updatedAt: true })
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const membership = await server.services.projectMembership.getProjectMembershipById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId: req.params.workspaceId,
|
||||
id: req.params.membershipId
|
||||
});
|
||||
return { membership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:workspaceId/memberships/details",
|
||||
|
@ -58,7 +58,9 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
|
||||
permissions: true,
|
||||
description: true
|
||||
}).optional(),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
})
|
||||
})
|
||||
).array(),
|
||||
totalCount: z.number()
|
||||
|
@ -264,7 +264,9 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
temporaryAccessEndTime: z.date().nullable().optional()
|
||||
})
|
||||
),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
}),
|
||||
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
||||
})
|
||||
.array(),
|
||||
@ -285,6 +287,7 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
orderDirection: req.query.orderDirection,
|
||||
search: req.query.search
|
||||
});
|
||||
|
||||
return { identityMemberships, totalCount };
|
||||
}
|
||||
});
|
||||
@ -328,7 +331,9 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
temporaryAccessEndTime: z.date().nullable().optional()
|
||||
})
|
||||
),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
}),
|
||||
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
||||
})
|
||||
})
|
||||
|
@ -23,6 +23,18 @@ import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import { secretRawSchema } from "../sanitizedSchemas";
|
||||
|
||||
const SecretReferenceNode = z.object({
|
||||
key: z.string(),
|
||||
value: z.string().optional(),
|
||||
environment: z.string(),
|
||||
secretPath: z.string()
|
||||
});
|
||||
type TSecretReferenceNode = z.infer<typeof SecretReferenceNode> & { children: TSecretReferenceNode[] };
|
||||
|
||||
const SecretReferenceNodeTree: z.ZodType<TSecretReferenceNode> = SecretReferenceNode.extend({
|
||||
children: z.lazy(() => SecretReferenceNodeTree.array())
|
||||
});
|
||||
|
||||
export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
@ -2102,6 +2114,58 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/raw/:secretName/secret-reference-tree",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get secret reference tree",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
secretName: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.secretName)
|
||||
}),
|
||||
querystring: z.object({
|
||||
workspaceId: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.workspaceId),
|
||||
environment: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.environment),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.GET_REFERENCE_TREE.secretPath)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
tree: SecretReferenceNodeTree,
|
||||
value: z.string().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { secretName } = req.params;
|
||||
const { secretPath, environment, workspaceId } = req.query;
|
||||
const { tree, value } = await server.services.secret.getSecretReferenceTree({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId: workspaceId,
|
||||
secretName,
|
||||
secretPath,
|
||||
environment
|
||||
});
|
||||
|
||||
return { tree, value };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/backfill-secret-references",
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { FastifyRequest } from "fastify";
|
||||
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionCmekActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ProjectServiceActor } from "@app/lib/types";
|
||||
import {
|
||||
TCmekDecryptDTO,
|
||||
TCmekEncryptDTO,
|
||||
@ -23,7 +23,7 @@ type TCmekServiceFactoryDep = {
|
||||
export type TCmekServiceFactory = ReturnType<typeof cmekServiceFactory>;
|
||||
|
||||
export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TCmekServiceFactoryDep) => {
|
||||
const createCmek = async ({ projectId, ...dto }: TCreateCmekDTO, actor: FastifyRequest["permission"]) => {
|
||||
const createCmek = async ({ projectId, ...dto }: TCreateCmekDTO, actor: ProjectServiceActor) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
@ -43,7 +43,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
||||
return cmek;
|
||||
};
|
||||
|
||||
const updateCmekById = async ({ keyId, ...data }: TUpdabteCmekByIdDTO, actor: FastifyRequest["permission"]) => {
|
||||
const updateCmekById = async ({ keyId, ...data }: TUpdabteCmekByIdDTO, actor: ProjectServiceActor) => {
|
||||
const key = await kmsDAL.findById(keyId);
|
||||
|
||||
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
||||
@ -65,7 +65,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
||||
return cmek;
|
||||
};
|
||||
|
||||
const deleteCmekById = async (keyId: string, actor: FastifyRequest["permission"]) => {
|
||||
const deleteCmekById = async (keyId: string, actor: ProjectServiceActor) => {
|
||||
const key = await kmsDAL.findById(keyId);
|
||||
|
||||
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
||||
@ -89,7 +89,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
||||
|
||||
const listCmeksByProjectId = async (
|
||||
{ projectId, ...filters }: TListCmeksByProjectIdDTO,
|
||||
actor: FastifyRequest["permission"]
|
||||
actor: ProjectServiceActor
|
||||
) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor.type,
|
||||
@ -106,7 +106,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
||||
return { cmeks, totalCount };
|
||||
};
|
||||
|
||||
const cmekEncrypt = async ({ keyId, plaintext }: TCmekEncryptDTO, actor: FastifyRequest["permission"]) => {
|
||||
const cmekEncrypt = async ({ keyId, plaintext }: TCmekEncryptDTO, actor: ProjectServiceActor) => {
|
||||
const key = await kmsDAL.findById(keyId);
|
||||
|
||||
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
||||
@ -132,7 +132,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
||||
return cipherTextBlob.toString("base64");
|
||||
};
|
||||
|
||||
const cmekDecrypt = async ({ keyId, ciphertext }: TCmekDecryptDTO, actor: FastifyRequest["permission"]) => {
|
||||
const cmekDecrypt = async ({ keyId, ciphertext }: TCmekDecryptDTO, actor: ProjectServiceActor) => {
|
||||
const key = await kmsDAL.findById(keyId);
|
||||
|
||||
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { FastifyRequest } from "fastify";
|
||||
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectServiceActor } from "@app/lib/types";
|
||||
import { constructGroupOrgMembershipRoleMappings } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-fns";
|
||||
import { TSyncExternalGroupOrgMembershipRoleMappingsDTO } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-types";
|
||||
import { TOrgRoleDALFactory } from "@app/services/org/org-role-dal";
|
||||
@ -25,7 +25,7 @@ export const externalGroupOrgRoleMappingServiceFactory = ({
|
||||
permissionService,
|
||||
orgRoleDAL
|
||||
}: TExternalGroupOrgRoleMappingServiceFactoryDep) => {
|
||||
const listExternalGroupOrgRoleMappings = async (actor: FastifyRequest["permission"]) => {
|
||||
const listExternalGroupOrgRoleMappings = async (actor: ProjectServiceActor) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
@ -46,7 +46,7 @@ export const externalGroupOrgRoleMappingServiceFactory = ({
|
||||
|
||||
const updateExternalGroupOrgRoleMappings = async (
|
||||
dto: TSyncExternalGroupOrgMembershipRoleMappingsDTO,
|
||||
actor: FastifyRequest["permission"]
|
||||
actor: ProjectServiceActor
|
||||
) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
|
@ -4,7 +4,7 @@ import sjcl from "sjcl";
|
||||
import tweetnacl from "tweetnacl";
|
||||
import tweetnaclUtil from "tweetnacl-util";
|
||||
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
import { SecretType, TSecretFolders } from "@app/db/schemas";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@ -19,7 +19,7 @@ import { TProjectEnvServiceFactory } from "../project-env/project-env-service";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal";
|
||||
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { fnSecretBulkInsert, getAllNestedSecretReferences } from "../secret-v2-bridge/secret-v2-bridge-fns";
|
||||
import { fnSecretBulkInsert, getAllSecretReferences } from "../secret-v2-bridge/secret-v2-bridge-fns";
|
||||
import type { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bridge-service";
|
||||
import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal";
|
||||
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
|
||||
@ -35,7 +35,7 @@ export type TImportDataIntoInfisicalDTO = {
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "create">;
|
||||
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath" | "findById">;
|
||||
projectService: Pick<TProjectServiceFactory, "createProject">;
|
||||
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
|
||||
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
|
||||
@ -67,6 +67,7 @@ export const parseEnvKeyDataFn = async (decryptedJson: string): Promise<Infisica
|
||||
const infisicalImportData: InfisicalImportData = {
|
||||
projects: [],
|
||||
environments: [],
|
||||
folders: [],
|
||||
secrets: []
|
||||
};
|
||||
|
||||
@ -80,25 +81,410 @@ export const parseEnvKeyDataFn = async (decryptedJson: string): Promise<Infisica
|
||||
envTemplates.set(env.id, env.defaultName);
|
||||
}
|
||||
|
||||
// environments
|
||||
for (const env of parsedJson.baseEnvironments) {
|
||||
infisicalImportData.environments.push({
|
||||
id: env.id,
|
||||
name: envTemplates.get(env.environmentRoleId)!,
|
||||
projectId: env.envParentId
|
||||
});
|
||||
// custom base environments
|
||||
for (const env of parsedJson.nonDefaultEnvironmentRoles) {
|
||||
envTemplates.set(env.id, env.name);
|
||||
}
|
||||
|
||||
// secrets
|
||||
// environments
|
||||
for (const env of parsedJson.baseEnvironments) {
|
||||
const appId = parsedJson.apps.find((a) => a.id === env.envParentId)?.id;
|
||||
|
||||
// If we find the app from the envParentId, we know this is a root-level environment.
|
||||
if (appId) {
|
||||
infisicalImportData.environments.push({
|
||||
id: env.id,
|
||||
name: envTemplates.get(env.environmentRoleId)!,
|
||||
projectId: appId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const findRootInheritedSecret = (
|
||||
secret: { val?: string; inheritsEnvironmentId?: string },
|
||||
secretName: string,
|
||||
envs: typeof parsedJson.envs
|
||||
): { val?: string } => {
|
||||
if (!secret) {
|
||||
return {
|
||||
val: ""
|
||||
};
|
||||
}
|
||||
|
||||
// If we have a direct value, return it
|
||||
if (secret.val !== undefined) {
|
||||
return secret;
|
||||
}
|
||||
|
||||
// If there's no inheritance, return the secret as is
|
||||
if (!secret.inheritsEnvironmentId) {
|
||||
return secret;
|
||||
}
|
||||
|
||||
const inheritedEnv = envs[secret.inheritsEnvironmentId];
|
||||
if (!inheritedEnv) return secret;
|
||||
return findRootInheritedSecret(inheritedEnv.variables[secretName], secretName, envs);
|
||||
};
|
||||
|
||||
const targetIdToFolderIdsMap = new Map<string, string>();
|
||||
|
||||
const processBranches = () => {
|
||||
for (const subEnv of parsedJson.subEnvironments) {
|
||||
const app = parsedJson.apps.find((a) => a.id === subEnv.envParentId);
|
||||
const block = parsedJson.blocks.find((b) => b.id === subEnv.envParentId);
|
||||
|
||||
if (app) {
|
||||
// Handle regular app branches
|
||||
const branchEnvironment = infisicalImportData.environments.find((e) => e.id === subEnv.parentEnvironmentId);
|
||||
|
||||
// check if the folder already exists in the same parent environment with the same name
|
||||
|
||||
const folderExists = infisicalImportData.folders.some(
|
||||
(f) => f.name === subEnv.subName && f.parentFolderId === subEnv.parentEnvironmentId
|
||||
);
|
||||
|
||||
// No need to map to target ID's here, because we are not dealing with blocks
|
||||
if (!folderExists) {
|
||||
infisicalImportData.folders.push({
|
||||
name: subEnv.subName,
|
||||
parentFolderId: subEnv.parentEnvironmentId,
|
||||
environmentId: branchEnvironment!.id,
|
||||
id: subEnv.id
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (block) {
|
||||
// Handle block branches
|
||||
// 1. Find all apps that use this block
|
||||
const appsUsingBlock = parsedJson.appBlocks.filter((ab) => ab.blockId === block.id);
|
||||
|
||||
for (const { appId, orderIndex } of appsUsingBlock) {
|
||||
// 2. Find the matching environment in the app based on the environment role
|
||||
const blockBaseEnv = parsedJson.baseEnvironments.find((be) => be.id === subEnv.parentEnvironmentId);
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!blockBaseEnv) continue;
|
||||
|
||||
const matchingAppEnv = parsedJson.baseEnvironments.find(
|
||||
(be) => be.envParentId === appId && be.environmentRoleId === blockBaseEnv.environmentRoleId
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchingAppEnv) continue;
|
||||
|
||||
const folderExists = infisicalImportData.folders.some(
|
||||
(f) => f.name === subEnv.subName && f.parentFolderId === matchingAppEnv.id
|
||||
);
|
||||
|
||||
if (!folderExists) {
|
||||
// 3. Create a folder in the matching app environment
|
||||
infisicalImportData.folders.push({
|
||||
name: subEnv.subName,
|
||||
parentFolderId: matchingAppEnv.id,
|
||||
environmentId: matchingAppEnv.id,
|
||||
id: `${subEnv.id}-${appId}` // Create unique ID for each app's copy of the branch
|
||||
});
|
||||
} else {
|
||||
// folder already exists, so lets map the old folder id to the new folder id
|
||||
targetIdToFolderIdsMap.set(subEnv.id, `${subEnv.id}-${appId}`);
|
||||
}
|
||||
|
||||
// 4. Process secrets in the block branch for this app
|
||||
const branchSecrets = parsedJson.envs[subEnv.id]?.variables || {};
|
||||
for (const [secretName, secretData] of Object.entries(branchSecrets)) {
|
||||
if (secretData.inheritsEnvironmentId) {
|
||||
const resolvedSecret = findRootInheritedSecret(secretData, secretName, parsedJson.envs);
|
||||
|
||||
// If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence.
|
||||
const preExistingSecretIndex = infisicalImportData.secrets.findIndex(
|
||||
(s) => s.name === secretName && s.environmentId === matchingAppEnv.id
|
||||
);
|
||||
|
||||
if (preExistingSecretIndex !== -1) {
|
||||
const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex];
|
||||
|
||||
if (
|
||||
preExistingSecret.appBlockOrderIndex !== undefined &&
|
||||
orderIndex > preExistingSecret.appBlockOrderIndex
|
||||
) {
|
||||
// if the existing secret has a lower orderIndex, we should replace it
|
||||
infisicalImportData.secrets[preExistingSecretIndex] = {
|
||||
...preExistingSecret,
|
||||
value: resolvedSecret.val || "",
|
||||
appBlockOrderIndex: orderIndex
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
infisicalImportData.secrets.push({
|
||||
id: randomUUID(),
|
||||
name: secretName,
|
||||
environmentId: matchingAppEnv.id,
|
||||
value: resolvedSecret.val || "",
|
||||
folderId: `${subEnv.id}-${appId}`,
|
||||
appBlockOrderIndex: orderIndex
|
||||
});
|
||||
} else {
|
||||
// If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence.
|
||||
const preExistingSecretIndex = infisicalImportData.secrets.findIndex(
|
||||
(s) => s.name === secretName && s.environmentId === matchingAppEnv.id
|
||||
);
|
||||
|
||||
if (preExistingSecretIndex !== -1) {
|
||||
const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex];
|
||||
|
||||
if (
|
||||
preExistingSecret.appBlockOrderIndex !== undefined &&
|
||||
orderIndex > preExistingSecret.appBlockOrderIndex
|
||||
) {
|
||||
// if the existing secret has a lower orderIndex, we should replace it
|
||||
infisicalImportData.secrets[preExistingSecretIndex] = {
|
||||
...preExistingSecret,
|
||||
value: secretData.val || "",
|
||||
appBlockOrderIndex: orderIndex
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
infisicalImportData.secrets.push({
|
||||
id: randomUUID(),
|
||||
name: secretName,
|
||||
environmentId: matchingAppEnv.id,
|
||||
value: secretData.val || "",
|
||||
folderId: `${subEnv.id}-${appId}`,
|
||||
appBlockOrderIndex: orderIndex
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const processBlocksForApp = (appIds: string[]) => {
|
||||
for (const appId of appIds) {
|
||||
const blocksInApp = parsedJson.appBlocks.filter((ab) => ab.appId === appId);
|
||||
logger.info(
|
||||
{
|
||||
blocksInApp
|
||||
},
|
||||
"[processBlocksForApp]: Processing blocks for app"
|
||||
);
|
||||
|
||||
for (const appBlock of blocksInApp) {
|
||||
// 1. find all base environments for this block
|
||||
const blockBaseEnvironments = parsedJson.baseEnvironments.filter((env) => env.envParentId === appBlock.blockId);
|
||||
logger.info(
|
||||
{
|
||||
blockBaseEnvironments
|
||||
},
|
||||
"[processBlocksForApp]: Processing block base environments"
|
||||
);
|
||||
|
||||
for (const blockBaseEnvironment of blockBaseEnvironments) {
|
||||
// 2. find the corresponding environment that is not from the block
|
||||
const matchingEnv = parsedJson.baseEnvironments.find(
|
||||
(be) =>
|
||||
be.environmentRoleId === blockBaseEnvironment.environmentRoleId && be.envParentId !== appBlock.blockId
|
||||
);
|
||||
|
||||
if (!matchingEnv) {
|
||||
throw new Error(`Could not find environment for block ${appBlock.blockId}`);
|
||||
}
|
||||
|
||||
// 3. find all the secrets for this environment block
|
||||
const blockSecrets = parsedJson.envs[blockBaseEnvironment.id].variables;
|
||||
|
||||
logger.info(
|
||||
{
|
||||
blockSecretsLength: Object.keys(blockSecrets).length
|
||||
},
|
||||
"[processBlocksForApp]: Processing block secrets"
|
||||
);
|
||||
|
||||
// 4. process each secret
|
||||
for (const secret of Object.keys(blockSecrets)) {
|
||||
const selectedSecret = blockSecrets[secret];
|
||||
|
||||
if (selectedSecret.inheritsEnvironmentId) {
|
||||
const resolvedSecret = findRootInheritedSecret(selectedSecret, secret, parsedJson.envs);
|
||||
|
||||
// If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence.
|
||||
const preExistingSecretIndex = infisicalImportData.secrets.findIndex(
|
||||
(s) => s.name === secret && s.environmentId === matchingEnv.id
|
||||
);
|
||||
|
||||
if (preExistingSecretIndex !== -1) {
|
||||
const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex];
|
||||
|
||||
if (
|
||||
preExistingSecret.appBlockOrderIndex !== undefined &&
|
||||
appBlock.orderIndex > preExistingSecret.appBlockOrderIndex
|
||||
) {
|
||||
// if the existing secret has a lower orderIndex, we should replace it
|
||||
infisicalImportData.secrets[preExistingSecretIndex] = {
|
||||
...preExistingSecret,
|
||||
value: selectedSecret.val || "",
|
||||
appBlockOrderIndex: appBlock.orderIndex
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
infisicalImportData.secrets.push({
|
||||
id: randomUUID(),
|
||||
name: secret,
|
||||
environmentId: matchingEnv.id,
|
||||
value: resolvedSecret.val || "",
|
||||
appBlockOrderIndex: appBlock.orderIndex
|
||||
});
|
||||
} else {
|
||||
// If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence.
|
||||
const preExistingSecretIndex = infisicalImportData.secrets.findIndex(
|
||||
(s) => s.name === secret && s.environmentId === matchingEnv.id
|
||||
);
|
||||
|
||||
if (preExistingSecretIndex !== -1) {
|
||||
const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex];
|
||||
|
||||
if (
|
||||
preExistingSecret.appBlockOrderIndex !== undefined &&
|
||||
appBlock.orderIndex > preExistingSecret.appBlockOrderIndex
|
||||
) {
|
||||
// if the existing secret has a lower orderIndex, we should replace it
|
||||
infisicalImportData.secrets[preExistingSecretIndex] = {
|
||||
...preExistingSecret,
|
||||
value: selectedSecret.val || "",
|
||||
appBlockOrderIndex: appBlock.orderIndex
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
infisicalImportData.secrets.push({
|
||||
id: randomUUID(),
|
||||
name: secret,
|
||||
environmentId: matchingEnv.id,
|
||||
value: selectedSecret.val || "",
|
||||
appBlockOrderIndex: appBlock.orderIndex
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
processBranches();
|
||||
processBlocksForApp(infisicalImportData.projects.map((app) => app.id));
|
||||
|
||||
for (const env of Object.keys(parsedJson.envs)) {
|
||||
if (!env.includes("|")) {
|
||||
const envData = parsedJson.envs[env];
|
||||
for (const secret of Object.keys(envData.variables)) {
|
||||
// Skip user-specific environments
|
||||
// eslint-disable-next-line no-continue
|
||||
if (env.includes("|")) continue;
|
||||
|
||||
const envData = parsedJson.envs[env];
|
||||
const baseEnv = parsedJson.baseEnvironments.find((be) => be.id === env);
|
||||
const subEnv = parsedJson.subEnvironments.find((se) => se.id === env);
|
||||
|
||||
// Skip if we can't find either a base environment or sub-environment
|
||||
if (!baseEnv && !subEnv) {
|
||||
logger.info(
|
||||
{
|
||||
envId: env
|
||||
},
|
||||
"[parseEnvKeyDataFn]: Could not find base or sub environment for env, skipping"
|
||||
);
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// If this is a base environment of a block, skip it (handled by processBlocksForApp)
|
||||
if (baseEnv) {
|
||||
const isBlock = parsedJson.appBlocks.some((block) => block.blockId === baseEnv.envParentId);
|
||||
if (isBlock) {
|
||||
logger.info(
|
||||
{
|
||||
envId: env,
|
||||
baseEnv
|
||||
},
|
||||
"[parseEnvKeyDataFn]: Skipping block environment (handled separately)"
|
||||
);
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Process each secret in this environment or branch
|
||||
for (const [secretName, secretData] of Object.entries(envData.variables)) {
|
||||
const indexOfExistingSecret = infisicalImportData.secrets.findIndex(
|
||||
(s) =>
|
||||
s.name === secretName &&
|
||||
(s.environmentId === subEnv?.parentEnvironmentId || s.environmentId === env) &&
|
||||
(s.folderId ? s.folderId === subEnv?.id : true) &&
|
||||
(secretData.val ? s.value === secretData.val : true)
|
||||
);
|
||||
|
||||
if (secretData.inheritsEnvironmentId) {
|
||||
const resolvedSecret = findRootInheritedSecret(secretData, secretName, parsedJson.envs);
|
||||
// Check if there's already a secret with this name in the environment, if there is, we should override it. Because if there's already one, we know its coming from a block.
|
||||
// Variables from the normal environment should take precedence over variables from the block.
|
||||
if (indexOfExistingSecret !== -1) {
|
||||
// if a existing secret is found, we should replace it directly
|
||||
const newSecret: (typeof infisicalImportData.secrets)[number] = {
|
||||
...infisicalImportData.secrets[indexOfExistingSecret],
|
||||
value: resolvedSecret.val || ""
|
||||
};
|
||||
|
||||
infisicalImportData.secrets[indexOfExistingSecret] = newSecret;
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
infisicalImportData.secrets.push({
|
||||
id: randomUUID(),
|
||||
name: secret,
|
||||
environmentId: env,
|
||||
value: envData.variables[secret].val
|
||||
name: secretName,
|
||||
environmentId: subEnv ? subEnv.parentEnvironmentId : env,
|
||||
value: resolvedSecret.val || "",
|
||||
...(subEnv && { folderId: subEnv.id }) // Add folderId if this is a branch secret
|
||||
});
|
||||
} else {
|
||||
// Check if there's already a secret with this name in the environment, if there is, we should override it. Because if there's already one, we know its coming from a block.
|
||||
// Variables from the normal environment should take precedence over variables from the block.
|
||||
|
||||
if (indexOfExistingSecret !== -1) {
|
||||
// if a existing secret is found, we should replace it directly
|
||||
const newSecret: (typeof infisicalImportData.secrets)[number] = {
|
||||
...infisicalImportData.secrets[indexOfExistingSecret],
|
||||
value: secretData.val || ""
|
||||
};
|
||||
|
||||
infisicalImportData.secrets[indexOfExistingSecret] = newSecret;
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const folderId = targetIdToFolderIdsMap.get(subEnv?.id || "") || subEnv?.id;
|
||||
|
||||
infisicalImportData.secrets.push({
|
||||
id: randomUUID(),
|
||||
name: secretName,
|
||||
environmentId: subEnv ? subEnv.parentEnvironmentId : env,
|
||||
value: secretData.val || "",
|
||||
...(folderId && { folderId })
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -125,7 +511,17 @@ export const importDataIntoInfisicalFn = async ({
|
||||
}
|
||||
|
||||
const originalToNewProjectId = new Map<string, string>();
|
||||
const originalToNewEnvironmentId = new Map<string, string>();
|
||||
const originalToNewEnvironmentId = new Map<
|
||||
string,
|
||||
{ envId: string; envSlug: string; rootFolderId: string; projectId: string }
|
||||
>();
|
||||
const originalToNewFolderId = new Map<
|
||||
string,
|
||||
{
|
||||
folderId: string;
|
||||
projectId: string;
|
||||
}
|
||||
>();
|
||||
const projectsNotImported: string[] = [];
|
||||
|
||||
await projectDAL.transaction(async (tx) => {
|
||||
@ -170,65 +566,176 @@ export const importDataIntoInfisicalFn = async ({
|
||||
|
||||
const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx);
|
||||
const doc = await projectEnvDAL.create({ slug, name: environment.name, projectId, position: lastPos + 1 }, tx);
|
||||
await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx);
|
||||
const folder = await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx);
|
||||
|
||||
originalToNewEnvironmentId.set(environment.id, doc.slug);
|
||||
originalToNewEnvironmentId.set(environment.id, {
|
||||
envSlug: doc.slug,
|
||||
envId: doc.id,
|
||||
rootFolderId: folder.id,
|
||||
projectId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (data.folders) {
|
||||
for await (const folder of data.folders) {
|
||||
const parentEnv = originalToNewEnvironmentId.get(folder.parentFolderId as string);
|
||||
|
||||
if (!parentEnv) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const newFolder = await folderDAL.create(
|
||||
{
|
||||
name: folder.name,
|
||||
envId: parentEnv.envId,
|
||||
parentId: parentEnv.rootFolderId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
originalToNewFolderId.set(folder.id, {
|
||||
folderId: newFolder.id,
|
||||
projectId: parentEnv.projectId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Useful for debugging:
|
||||
// console.log("data.secrets", data.secrets);
|
||||
// console.log("data.folders", data.folders);
|
||||
// console.log("data.environment", data.environments);
|
||||
|
||||
if (data.secrets && data.secrets.length > 0) {
|
||||
const mappedToEnvironmentId = new Map<
|
||||
string,
|
||||
{
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
folderId?: string;
|
||||
isFromBlock?: boolean;
|
||||
}[]
|
||||
>();
|
||||
|
||||
for (const secret of data.secrets) {
|
||||
if (!originalToNewEnvironmentId.get(secret.environmentId)) {
|
||||
const targetId = secret.folderId || secret.environmentId;
|
||||
|
||||
// Skip if we can't find either an environment or folder mapping for this secret
|
||||
if (!originalToNewEnvironmentId.get(secret.environmentId) && !originalToNewFolderId.get(targetId)) {
|
||||
logger.info({ secret }, "[importDataIntoInfisicalFn]: Could not find environment or folder for secret");
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!mappedToEnvironmentId.has(secret.environmentId)) {
|
||||
mappedToEnvironmentId.set(secret.environmentId, []);
|
||||
if (!mappedToEnvironmentId.has(targetId)) {
|
||||
mappedToEnvironmentId.set(targetId, []);
|
||||
}
|
||||
mappedToEnvironmentId.get(secret.environmentId)!.push({
|
||||
|
||||
const alreadyHasSecret = mappedToEnvironmentId
|
||||
.get(targetId)!
|
||||
.find((el) => el.secretKey === secret.name && el.folderId === secret.folderId);
|
||||
|
||||
if (alreadyHasSecret && alreadyHasSecret.isFromBlock) {
|
||||
// remove the existing secret if any
|
||||
mappedToEnvironmentId
|
||||
.get(targetId)!
|
||||
.splice(mappedToEnvironmentId.get(targetId)!.indexOf(alreadyHasSecret), 1);
|
||||
}
|
||||
mappedToEnvironmentId.get(targetId)!.push({
|
||||
secretKey: secret.name,
|
||||
secretValue: secret.value || ""
|
||||
secretValue: secret.value || "",
|
||||
folderId: secret.folderId,
|
||||
isFromBlock: secret.appBlockOrderIndex !== undefined
|
||||
});
|
||||
}
|
||||
|
||||
// for each of the mappedEnvironmentId
|
||||
for await (const [envId, secrets] of mappedToEnvironmentId) {
|
||||
const environment = data.environments.find((env) => env.id === envId);
|
||||
const projectId = originalToNewProjectId.get(environment?.projectId as string)!;
|
||||
for await (const [targetId, secrets] of mappedToEnvironmentId) {
|
||||
logger.info("[importDataIntoInfisicalFn]: Processing secrets for targetId", targetId);
|
||||
|
||||
if (!projectId) {
|
||||
throw new BadRequestError({ message: `Failed to import secret, project not found` });
|
||||
let selectedFolder: TSecretFolders | undefined;
|
||||
let selectedProjectId: string | undefined;
|
||||
|
||||
// Case 1: Secret belongs to a folder / branch / branch of a block
|
||||
const foundFolder = originalToNewFolderId.get(targetId);
|
||||
if (foundFolder) {
|
||||
logger.info("[importDataIntoInfisicalFn]: Processing secrets for folder");
|
||||
selectedFolder = await folderDAL.findById(foundFolder.folderId, tx);
|
||||
selectedProjectId = foundFolder.projectId;
|
||||
} else {
|
||||
logger.info("[importDataIntoInfisicalFn]: Processing secrets for normal environment");
|
||||
const environment = data.environments.find((env) => env.id === targetId);
|
||||
if (!environment) {
|
||||
logger.info(
|
||||
{
|
||||
targetId
|
||||
},
|
||||
"[importDataIntoInfisicalFn]: Could not find environment for secret"
|
||||
);
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const projectId = originalToNewProjectId.get(environment.projectId)!;
|
||||
|
||||
if (!projectId) {
|
||||
throw new BadRequestError({ message: `Failed to import secret, project not found` });
|
||||
}
|
||||
|
||||
const env = originalToNewEnvironmentId.get(targetId);
|
||||
if (!env) {
|
||||
logger.info(
|
||||
{
|
||||
targetId
|
||||
},
|
||||
"[importDataIntoInfisicalFn]: Could not find environment for secret"
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, env.envSlug, "/", tx);
|
||||
|
||||
if (!folder) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder not found for the given environment slug (${env.envSlug}) & secret path (/)`,
|
||||
name: "Create secret"
|
||||
});
|
||||
}
|
||||
|
||||
selectedFolder = folder;
|
||||
selectedProjectId = projectId;
|
||||
}
|
||||
|
||||
if (!selectedFolder) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder not found for the given environment slug & secret path`,
|
||||
name: "CreateSecret"
|
||||
});
|
||||
}
|
||||
|
||||
if (!selectedProjectId) {
|
||||
throw new NotFoundError({
|
||||
message: `Project not found for the given environment slug & secret path`,
|
||||
name: "CreateSecret"
|
||||
});
|
||||
}
|
||||
|
||||
const { encryptor: secretManagerEncrypt } = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
projectId: selectedProjectId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const envSlug = originalToNewEnvironmentId.get(envId)!;
|
||||
const folder = await folderDAL.findBySecretPath(projectId, envSlug, "/", tx);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Folder not found for the given environment slug (${envSlug}) & secret path (/)`,
|
||||
name: "Create secret"
|
||||
});
|
||||
|
||||
const secretBatches = chunkArray(secrets, 2500);
|
||||
for await (const secretBatch of secretBatches) {
|
||||
const secretsByKeys = await secretDAL.findBySecretKeys(
|
||||
folder.id,
|
||||
selectedFolder.id,
|
||||
secretBatch.map((el) => ({
|
||||
key: el.secretKey,
|
||||
type: SecretType.Shared
|
||||
@ -242,7 +749,7 @@ export const importDataIntoInfisicalFn = async ({
|
||||
}
|
||||
await fnSecretBulkInsert({
|
||||
inputSecrets: secretBatch.map((el) => {
|
||||
const references = getAllNestedSecretReferences(el.secretValue);
|
||||
const references = getAllSecretReferences(el.secretValue).nestedReferences;
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
@ -254,7 +761,7 @@ export const importDataIntoInfisicalFn = async ({
|
||||
type: SecretType.Shared
|
||||
};
|
||||
}),
|
||||
folderId: folder.id,
|
||||
folderId: selectedFolder.id,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
|
@ -31,7 +31,7 @@ export type TExternalMigrationQueueFactoryDep = {
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "create">;
|
||||
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath" | "findOne" | "findById">;
|
||||
projectService: Pick<TProjectServiceFactory, "createProject">;
|
||||
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
|
||||
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
|
||||
|
@ -2,8 +2,16 @@ import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
|
||||
export type InfisicalImportData = {
|
||||
projects: Array<{ name: string; id: string }>;
|
||||
environments: Array<{ name: string; id: string; projectId: string }>;
|
||||
secrets: Array<{ name: string; id: string; environmentId: string; value: string }>;
|
||||
environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }>;
|
||||
folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }>;
|
||||
secrets: Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
environmentId: string;
|
||||
value: string;
|
||||
folderId?: string;
|
||||
appBlockOrderIndex?: number; // Not used for infisical import, only used for building the import structure to determine which block(s) take precedence.
|
||||
}>;
|
||||
};
|
||||
|
||||
export type TImportEnvKeyDataCreate = {
|
||||
@ -28,62 +36,62 @@ export type TEnvKeyExportJSON = {
|
||||
org: {
|
||||
id: string;
|
||||
name: string;
|
||||
settings: {
|
||||
auth: {
|
||||
inviteExpirationMs: number;
|
||||
deviceGrantExpirationMs: number;
|
||||
tokenExpirationMs: number;
|
||||
};
|
||||
crypto: {
|
||||
requiresPassphrase: boolean;
|
||||
requiresLockout: boolean;
|
||||
};
|
||||
envs: {
|
||||
autoCaps: boolean;
|
||||
autoCommitLocals: boolean;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
// Apps are projects
|
||||
apps: {
|
||||
id: string;
|
||||
name: string;
|
||||
settings: Record<string, unknown>;
|
||||
}[];
|
||||
defaultOrgRoles: {
|
||||
// Blocks are basically global projects that can be imported in other projects
|
||||
blocks: {
|
||||
id: string;
|
||||
defaultName: string;
|
||||
name: string;
|
||||
}[];
|
||||
defaultAppRoles: {
|
||||
id: string;
|
||||
defaultName: string;
|
||||
|
||||
appBlocks: {
|
||||
appId: string;
|
||||
blockId: string;
|
||||
orderIndex: number;
|
||||
}[];
|
||||
|
||||
defaultEnvironmentRoles: {
|
||||
id: string;
|
||||
defaultName: string;
|
||||
settings: {
|
||||
autoCommit: boolean;
|
||||
};
|
||||
}[];
|
||||
|
||||
nonDefaultEnvironmentRoles: {
|
||||
id: string;
|
||||
name: string;
|
||||
}[];
|
||||
|
||||
baseEnvironments: {
|
||||
id: string;
|
||||
envParentId: string;
|
||||
environmentRoleId: string;
|
||||
settings: Record<string, unknown>;
|
||||
}[];
|
||||
orgUsers: {
|
||||
|
||||
// Branches for both blocks and apps
|
||||
subEnvironments: {
|
||||
id: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
email: string;
|
||||
provider: string;
|
||||
orgRoleId: string;
|
||||
uid: string;
|
||||
envParentId: string;
|
||||
environmentRoleId: string;
|
||||
parentEnvironmentId: string;
|
||||
subName: string;
|
||||
}[];
|
||||
|
||||
envs: Record<
|
||||
string,
|
||||
{
|
||||
variables: Record<string, { val: string }>;
|
||||
inherits: Record<string, unknown>;
|
||||
variables: Record<
|
||||
string,
|
||||
{
|
||||
val?: string;
|
||||
inheritsEnvironmentId?: string;
|
||||
}
|
||||
>;
|
||||
|
||||
inherits: Record<string, string[]>;
|
||||
}
|
||||
>;
|
||||
};
|
||||
|
@ -158,6 +158,7 @@ export const groupProjectDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.UserGroupMembership),
|
||||
db.ref("createdAt").withSchema(TableName.UserGroupMembership),
|
||||
db.ref("isGhost").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
@ -181,7 +182,18 @@ export const groupProjectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const members = sqlNestRelationships({
|
||||
data: docs,
|
||||
parentMapper: ({ email, firstName, username, lastName, publicKey, isGhost, id, userId, projectName }) => ({
|
||||
parentMapper: ({
|
||||
email,
|
||||
firstName,
|
||||
username,
|
||||
lastName,
|
||||
publicKey,
|
||||
isGhost,
|
||||
id,
|
||||
userId,
|
||||
projectName,
|
||||
createdAt
|
||||
}) => ({
|
||||
isGroupMember: true,
|
||||
id,
|
||||
userId,
|
||||
@ -190,7 +202,8 @@ export const groupProjectDALFactory = (db: TDbClient) => {
|
||||
id: projectId,
|
||||
name: projectName
|
||||
},
|
||||
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost }
|
||||
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost },
|
||||
createdAt
|
||||
}),
|
||||
key: "id",
|
||||
childrenMapper: [
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@ -17,54 +17,27 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
const doc = await (tx || db.replicaNode())(TableName.IdentityAccessToken)
|
||||
.where(filter)
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.leftJoin(TableName.IdentityUaClientSecret, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.Univeral])).andOn(
|
||||
`${TableName.IdentityAccessToken}.identityUAClientSecretId`,
|
||||
`${TableName.IdentityUaClientSecret}.id`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityUniversalAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.Univeral])).andOn(
|
||||
`${TableName.IdentityUaClientSecret}.identityUAId`,
|
||||
`${TableName.IdentityUniversalAuth}.id`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityGcpAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.GCP_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityGcpAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityAwsAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.AWS_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityAwsAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityAzureAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.AZURE_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityAzureAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityKubernetesAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.KUBERNETES_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityOidcAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.OIDC_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityOidcAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityTokenAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.TOKEN_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityTokenAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.IdentityUaClientSecret,
|
||||
`${TableName.IdentityAccessToken}.identityUAClientSecretId`,
|
||||
`${TableName.IdentityUaClientSecret}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.IdentityUniversalAuth,
|
||||
`${TableName.IdentityUaClientSecret}.identityUAId`,
|
||||
`${TableName.IdentityUniversalAuth}.id`
|
||||
)
|
||||
.leftJoin(TableName.IdentityGcpAuth, `${TableName.Identity}.id`, `${TableName.IdentityGcpAuth}.identityId`)
|
||||
.leftJoin(TableName.IdentityAwsAuth, `${TableName.Identity}.id`, `${TableName.IdentityAwsAuth}.identityId`)
|
||||
.leftJoin(TableName.IdentityAzureAuth, `${TableName.Identity}.id`, `${TableName.IdentityAzureAuth}.identityId`)
|
||||
.leftJoin(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
)
|
||||
.leftJoin(TableName.IdentityOidcAuth, `${TableName.Identity}.id`, `${TableName.IdentityOidcAuth}.identityId`)
|
||||
.leftJoin(TableName.IdentityTokenAuth, `${TableName.Identity}.id`, `${TableName.IdentityTokenAuth}.identityId`)
|
||||
|
||||
.select(selectAllTableCols(TableName.IdentityAccessToken))
|
||||
.select(
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityUniversalAuth).as("accessTokenTrustedIpsUa"),
|
||||
@ -82,14 +55,13 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
|
||||
return {
|
||||
...doc,
|
||||
accessTokenTrustedIps:
|
||||
doc.accessTokenTrustedIpsUa ||
|
||||
doc.accessTokenTrustedIpsGcp ||
|
||||
doc.accessTokenTrustedIpsAws ||
|
||||
doc.accessTokenTrustedIpsAzure ||
|
||||
doc.accessTokenTrustedIpsK8s ||
|
||||
doc.accessTokenTrustedIpsOidc ||
|
||||
doc.accessTokenTrustedIpsToken
|
||||
trustedIpsUniversalAuth: doc.accessTokenTrustedIpsUa,
|
||||
trustedIpsGcpAuth: doc.accessTokenTrustedIpsGcp,
|
||||
trustedIpsAwsAuth: doc.accessTokenTrustedIpsAws,
|
||||
trustedIpsAzureAuth: doc.accessTokenTrustedIpsAzure,
|
||||
trustedIpsKubernetesAuth: doc.accessTokenTrustedIpsK8s,
|
||||
trustedIpsOidcAuth: doc.accessTokenTrustedIpsOidc,
|
||||
trustedIpsAccessTokenAuth: doc.accessTokenTrustedIpsToken
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "IdAccessTokenFindOne" });
|
||||
|
@ -1,6 +1,6 @@
|
||||
import jwt, { JwtPayload } from "jsonwebtoken";
|
||||
|
||||
import { TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { checkIPAgainstBlocklist, TIp } from "@app/lib/ip";
|
||||
@ -164,10 +164,22 @@ export const identityAccessTokenServiceFactory = ({
|
||||
message: "Failed to authorize revoked access token, access token is revoked"
|
||||
});
|
||||
|
||||
if (ipAddress && identityAccessToken) {
|
||||
const trustedIpsMap: Record<IdentityAuthMethod, unknown> = {
|
||||
[IdentityAuthMethod.UNIVERSAL_AUTH]: identityAccessToken.trustedIpsUniversalAuth,
|
||||
[IdentityAuthMethod.GCP_AUTH]: identityAccessToken.trustedIpsGcpAuth,
|
||||
[IdentityAuthMethod.AWS_AUTH]: identityAccessToken.trustedIpsAwsAuth,
|
||||
[IdentityAuthMethod.AZURE_AUTH]: identityAccessToken.trustedIpsAzureAuth,
|
||||
[IdentityAuthMethod.KUBERNETES_AUTH]: identityAccessToken.trustedIpsKubernetesAuth,
|
||||
[IdentityAuthMethod.OIDC_AUTH]: identityAccessToken.trustedIpsOidcAuth,
|
||||
[IdentityAuthMethod.TOKEN_AUTH]: identityAccessToken.trustedIpsAccessTokenAuth
|
||||
};
|
||||
|
||||
const trustedIps = trustedIpsMap[identityAccessToken.authMethod as IdentityAuthMethod];
|
||||
|
||||
if (ipAddress) {
|
||||
checkIPAgainstBlocklist({
|
||||
ipAddress,
|
||||
trustedIps: identityAccessToken?.accessTokenTrustedIps as TIp[]
|
||||
trustedIps: trustedIps as TIp[]
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -13,7 +13,6 @@ import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedErro
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityDALFactory } from "../identity/identity-dal";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
@ -33,7 +32,6 @@ type TIdentityAwsAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
};
|
||||
@ -44,7 +42,6 @@ export const identityAwsAuthServiceFactory = ({
|
||||
identityAccessTokenDAL,
|
||||
identityAwsAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityDAL,
|
||||
licenseService,
|
||||
permissionService
|
||||
}: TIdentityAwsAuthServiceFactoryDep) => {
|
||||
@ -113,7 +110,8 @@ export const identityAwsAuthServiceFactory = ({
|
||||
accessTokenTTL: identityAwsAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityAwsAuth.accessTokenMaxTTL,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityAwsAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityAwsAuth.accessTokenNumUsesLimit,
|
||||
authMethod: IdentityAuthMethod.AWS_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -155,10 +153,12 @@ export const identityAwsAuthServiceFactory = ({
|
||||
}: TAttachAwsAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity.authMethod)
|
||||
|
||||
if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to add AWS Auth to already configured identity"
|
||||
});
|
||||
}
|
||||
|
||||
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
@ -206,13 +206,6 @@ export const identityAwsAuthServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await identityDAL.updateById(
|
||||
identityMembershipOrg.identityId,
|
||||
{
|
||||
authMethod: IdentityAuthMethod.AWS_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
return doc;
|
||||
});
|
||||
return { ...identityAwsAuth, orgId: identityMembershipOrg.orgId };
|
||||
@ -234,10 +227,12 @@ export const identityAwsAuthServiceFactory = ({
|
||||
}: TUpdateAwsAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AWS_AUTH)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update AWS Auth"
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) {
|
||||
throw new NotFoundError({
|
||||
message: "The identity does not have AWS Auth attached"
|
||||
});
|
||||
}
|
||||
|
||||
const identityAwsAuth = await identityAwsAuthDAL.findOne({ identityId });
|
||||
|
||||
@ -293,10 +288,12 @@ export const identityAwsAuthServiceFactory = ({
|
||||
const getAwsAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetAwsAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AWS_AUTH)
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have AWS Auth attached"
|
||||
});
|
||||
}
|
||||
|
||||
const awsIdentityAuth = await identityAwsAuthDAL.findOne({ identityId });
|
||||
|
||||
@ -320,10 +317,11 @@ export const identityAwsAuthServiceFactory = ({
|
||||
}: TRevokeAwsAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AWS_AUTH)
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have aws auth"
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -348,7 +346,6 @@ export const identityAwsAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
||||
await identityDAL.updateById(identityId, { authMethod: null }, tx);
|
||||
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAwsAuth;
|
||||
|
@ -11,7 +11,6 @@ import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedErro
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityDALFactory } from "../identity/identity-dal";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
@ -32,7 +31,6 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -43,7 +41,6 @@ export const identityAzureAuthServiceFactory = ({
|
||||
identityAzureAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TIdentityAzureAuthServiceFactoryDep) => {
|
||||
@ -84,7 +81,8 @@ export const identityAzureAuthServiceFactory = ({
|
||||
accessTokenTTL: identityAzureAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityAzureAuth.accessTokenMaxTTL,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityAzureAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityAzureAuth.accessTokenNumUsesLimit,
|
||||
authMethod: IdentityAuthMethod.AZURE_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -126,11 +124,12 @@ export const identityAzureAuthServiceFactory = ({
|
||||
}: TAttachAzureAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity.authMethod)
|
||||
|
||||
if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AZURE_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to add Azure Auth to already configured identity"
|
||||
});
|
||||
|
||||
}
|
||||
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
}
|
||||
@ -176,13 +175,7 @@ export const identityAzureAuthServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await identityDAL.updateById(
|
||||
identityMembershipOrg.identityId,
|
||||
{
|
||||
authMethod: IdentityAuthMethod.AZURE_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return doc;
|
||||
});
|
||||
return { ...identityAzureAuth, orgId: identityMembershipOrg.orgId };
|
||||
@ -204,10 +197,11 @@ export const identityAzureAuthServiceFactory = ({
|
||||
}: TUpdateAzureAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AZURE_AUTH)
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AZURE_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update Azure Auth"
|
||||
});
|
||||
}
|
||||
|
||||
const identityGcpAuth = await identityAzureAuthDAL.findOne({ identityId });
|
||||
|
||||
@ -266,10 +260,11 @@ export const identityAzureAuthServiceFactory = ({
|
||||
const getAzureAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetAzureAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AZURE_AUTH)
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AZURE_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have Azure Auth attached"
|
||||
});
|
||||
}
|
||||
|
||||
const identityAzureAuth = await identityAzureAuthDAL.findOne({ identityId });
|
||||
|
||||
@ -294,10 +289,11 @@ export const identityAzureAuthServiceFactory = ({
|
||||
}: TRevokeAzureAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AZURE_AUTH)
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AZURE_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have azure auth"
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -321,7 +317,6 @@ export const identityAzureAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx);
|
||||
await identityDAL.updateById(identityId, { authMethod: null }, tx);
|
||||
return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAzureAuth;
|
||||
|
@ -11,7 +11,6 @@ import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedErro
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityDALFactory } from "../identity/identity-dal";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
@ -30,7 +29,6 @@ type TIdentityGcpAuthServiceFactoryDep = {
|
||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -41,7 +39,6 @@ export const identityGcpAuthServiceFactory = ({
|
||||
identityGcpAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TIdentityGcpAuthServiceFactoryDep) => {
|
||||
@ -125,7 +122,8 @@ export const identityGcpAuthServiceFactory = ({
|
||||
accessTokenTTL: identityGcpAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityGcpAuth.accessTokenMaxTTL,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityGcpAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityGcpAuth.accessTokenNumUsesLimit,
|
||||
authMethod: IdentityAuthMethod.GCP_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -168,10 +166,12 @@ export const identityGcpAuthServiceFactory = ({
|
||||
}: TAttachGcpAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity.authMethod)
|
||||
|
||||
if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.GCP_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to add GCP Auth to already configured identity"
|
||||
});
|
||||
}
|
||||
|
||||
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
@ -219,13 +219,6 @@ export const identityGcpAuthServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await identityDAL.updateById(
|
||||
identityMembershipOrg.identityId,
|
||||
{
|
||||
authMethod: IdentityAuthMethod.GCP_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
return doc;
|
||||
});
|
||||
return { ...identityGcpAuth, orgId: identityMembershipOrg.orgId };
|
||||
@ -248,10 +241,12 @@ export const identityGcpAuthServiceFactory = ({
|
||||
}: TUpdateGcpAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.GCP_AUTH)
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.GCP_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update GCP Auth"
|
||||
});
|
||||
}
|
||||
|
||||
const identityGcpAuth = await identityGcpAuthDAL.findOne({ identityId });
|
||||
|
||||
@ -311,10 +306,12 @@ export const identityGcpAuthServiceFactory = ({
|
||||
const getGcpAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetGcpAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.GCP_AUTH)
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.GCP_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have GCP Auth attached"
|
||||
});
|
||||
}
|
||||
|
||||
const identityGcpAuth = await identityGcpAuthDAL.findOne({ identityId });
|
||||
|
||||
@ -339,10 +336,12 @@ export const identityGcpAuthServiceFactory = ({
|
||||
}: TRevokeGcpAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.GCP_AUTH)
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.GCP_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have gcp auth"
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -366,7 +365,6 @@ export const identityGcpAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||
const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx);
|
||||
await identityDAL.updateById(identityId, { authMethod: null }, tx);
|
||||
return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityGcpAuth;
|
||||
|
@ -22,7 +22,6 @@ import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityDALFactory } from "../identity/identity-dal";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
@ -44,7 +43,6 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@ -56,7 +54,6 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
orgBotDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
@ -215,7 +212,8 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit,
|
||||
authMethod: IdentityAuthMethod.KUBERNETES_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -260,10 +258,12 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
}: TAttachKubernetesAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity.authMethod)
|
||||
|
||||
if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.KUBERNETES_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to add Kubernetes Auth to already configured identity"
|
||||
});
|
||||
}
|
||||
|
||||
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
@ -372,13 +372,6 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await identityDAL.updateById(
|
||||
identityMembershipOrg.identityId,
|
||||
{
|
||||
authMethod: IdentityAuthMethod.KUBERNETES_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
return doc;
|
||||
});
|
||||
|
||||
@ -404,10 +397,12 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
}: TUpdateKubernetesAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.KUBERNETES_AUTH)
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.KUBERNETES_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update Kubernetes Auth"
|
||||
});
|
||||
}
|
||||
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId });
|
||||
|
||||
@ -532,11 +527,12 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
}: TGetKubernetesAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.KUBERNETES_AUTH)
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.KUBERNETES_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have Kubernetes Auth attached"
|
||||
});
|
||||
|
||||
}
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
@ -597,10 +593,12 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
}: TRevokeKubernetesAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.KUBERNETES_AUTH)
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.KUBERNETES_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have kubernetes auth"
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -624,7 +622,6 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx);
|
||||
await identityDAL.updateById(identityId, { authMethod: null }, tx);
|
||||
return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityKubernetesAuth;
|
||||
|
@ -22,7 +22,6 @@ import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedErro
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityDALFactory } from "../identity/identity-dal";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
@ -41,7 +40,6 @@ type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
@ -52,7 +50,6 @@ export type TIdentityOidcAuthServiceFactory = ReturnType<typeof identityOidcAuth
|
||||
export const identityOidcAuthServiceFactory = ({
|
||||
identityOidcAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
identityAccessTokenDAL,
|
||||
@ -61,7 +58,7 @@ export const identityOidcAuthServiceFactory = ({
|
||||
const login = async ({ identityId, jwt: oidcJwt }: TLoginOidcAuthDTO) => {
|
||||
const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId });
|
||||
if (!identityOidcAuth) {
|
||||
throw new NotFoundError({ message: "GCP auth method not found for identity, did you configure GCP auth?" });
|
||||
throw new NotFoundError({ message: "OIDC auth method not found for identity, did you configure OIDC auth?" });
|
||||
}
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({
|
||||
@ -181,7 +178,8 @@ export const identityOidcAuthServiceFactory = ({
|
||||
accessTokenTTL: identityOidcAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityOidcAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityOidcAuth.accessTokenNumUsesLimit,
|
||||
authMethod: IdentityAuthMethod.OIDC_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -228,10 +226,11 @@ export const identityOidcAuthServiceFactory = ({
|
||||
if (!identityMembershipOrg) {
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
}
|
||||
if (identityMembershipOrg.identity.authMethod)
|
||||
if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.OIDC_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to add OIDC Auth to already configured identity"
|
||||
});
|
||||
}
|
||||
|
||||
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
@ -334,13 +333,6 @@ export const identityOidcAuthServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await identityDAL.updateById(
|
||||
identityMembershipOrg.identityId,
|
||||
{
|
||||
authMethod: IdentityAuthMethod.OIDC_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
return doc;
|
||||
});
|
||||
return { ...identityOidcAuth, orgId: identityMembershipOrg.orgId, caCert };
|
||||
@ -364,11 +356,9 @@ export const identityOidcAuthServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TUpdateOidcAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
}
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.OIDC_AUTH) {
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.OIDC_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update OIDC Auth"
|
||||
});
|
||||
@ -467,11 +457,9 @@ export const identityOidcAuthServiceFactory = ({
|
||||
|
||||
const getOidcAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetOidcAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
}
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.OIDC_AUTH) {
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.OIDC_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have OIDC Auth attached"
|
||||
});
|
||||
@ -519,7 +507,7 @@ export const identityOidcAuthServiceFactory = ({
|
||||
throw new NotFoundError({ message: "Failed to find identity" });
|
||||
}
|
||||
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.OIDC_AUTH) {
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.OIDC_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have OIDC auth"
|
||||
});
|
||||
@ -551,7 +539,6 @@ export const identityOidcAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
||||
await identityDAL.updateById(identityId, { authMethod: null }, tx);
|
||||
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
|
||||
|
@ -1,12 +1,24 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TIdentities } from "@app/db/schemas";
|
||||
import {
|
||||
TableName,
|
||||
TIdentities,
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAzureAuths,
|
||||
TIdentityGcpAuths,
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityTokenAuths,
|
||||
TIdentityUniversalAuths
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { ProjectIdentityOrderBy, TListProjectIdentityDTO } from "@app/services/identity-project/identity-project-types";
|
||||
|
||||
import { buildAuthMethods } from "../identity/identity-fns";
|
||||
|
||||
export type TIdentityProjectDALFactory = ReturnType<typeof identityProjectDALFactory>;
|
||||
|
||||
export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
@ -33,11 +45,48 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
`${TableName.IdentityProjectMembership}.id`,
|
||||
`${TableName.IdentityProjectAdditionalPrivilege}.projectMembershipId`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.IdentityUniversalAuth,
|
||||
`${TableName.IdentityProjectMembership}.identityId`,
|
||||
`${TableName.IdentityUniversalAuth}.identityId`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.IdentityGcpAuth,
|
||||
`${TableName.IdentityProjectMembership}.identityId`,
|
||||
`${TableName.IdentityGcpAuth}.identityId`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.IdentityAwsAuth,
|
||||
`${TableName.IdentityProjectMembership}.identityId`,
|
||||
`${TableName.IdentityAwsAuth}.identityId`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
`${TableName.IdentityProjectMembership}.identityId`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.IdentityOidcAuth,
|
||||
`${TableName.IdentityProjectMembership}.identityId`,
|
||||
`${TableName.IdentityOidcAuth}.identityId`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.IdentityAzureAuth,
|
||||
`${TableName.IdentityProjectMembership}.identityId`,
|
||||
`${TableName.IdentityAzureAuth}.identityId`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.IdentityTokenAuth,
|
||||
`${TableName.IdentityProjectMembership}.identityId`,
|
||||
`${TableName.IdentityTokenAuth}.identityId`
|
||||
)
|
||||
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.IdentityProjectMembership),
|
||||
db.ref("createdAt").withSchema(TableName.IdentityProjectMembership),
|
||||
db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership),
|
||||
db.ref("authMethod").as("identityAuthMethod").withSchema(TableName.Identity),
|
||||
|
||||
db.ref("id").as("identityId").withSchema(TableName.Identity),
|
||||
db.ref("name").as("identityName").withSchema(TableName.Identity),
|
||||
db.ref("id").withSchema(TableName.IdentityProjectMembership),
|
||||
@ -52,12 +101,33 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
db.ref("temporaryAccessStartTime").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("projectId").withSchema(TableName.IdentityProjectMembership),
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project)
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project),
|
||||
db.ref("id").as("uaId").withSchema(TableName.IdentityUniversalAuth),
|
||||
db.ref("id").as("gcpId").withSchema(TableName.IdentityGcpAuth),
|
||||
db.ref("id").as("awsId").withSchema(TableName.IdentityAwsAuth),
|
||||
db.ref("id").as("kubernetesId").withSchema(TableName.IdentityKubernetesAuth),
|
||||
db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth),
|
||||
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
|
||||
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth)
|
||||
);
|
||||
|
||||
const members = sqlNestRelationships({
|
||||
data: docs,
|
||||
parentMapper: ({ identityName, identityAuthMethod, id, createdAt, updatedAt, projectId, projectName }) => ({
|
||||
parentMapper: ({
|
||||
identityName,
|
||||
uaId,
|
||||
awsId,
|
||||
gcpId,
|
||||
kubernetesId,
|
||||
oidcId,
|
||||
azureId,
|
||||
tokenId,
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
projectId,
|
||||
projectName
|
||||
}) => ({
|
||||
id,
|
||||
identityId,
|
||||
createdAt,
|
||||
@ -65,7 +135,15 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
identity: {
|
||||
id: identityId,
|
||||
name: identityName,
|
||||
authMethod: identityAuthMethod
|
||||
authMethods: buildAuthMethods({
|
||||
uaId,
|
||||
awsId,
|
||||
gcpId,
|
||||
kubernetesId,
|
||||
oidcId,
|
||||
azureId,
|
||||
tokenId
|
||||
})
|
||||
},
|
||||
project: {
|
||||
id: projectId,
|
||||
@ -150,7 +228,7 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
})
|
||||
.where((qb) => {
|
||||
if (filter.identityId) {
|
||||
void qb.where("identityId", filter.identityId);
|
||||
void qb.where(`${TableName.IdentityProjectMembership}.identityId`, filter.identityId);
|
||||
}
|
||||
})
|
||||
.join(
|
||||
@ -168,6 +246,43 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
`${TableName.IdentityProjectMembership}.id`,
|
||||
`${TableName.IdentityProjectAdditionalPrivilege}.projectMembershipId`
|
||||
)
|
||||
|
||||
.leftJoin<TIdentityUniversalAuths>(
|
||||
TableName.IdentityUniversalAuth,
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityUniversalAuth}.identityId`
|
||||
)
|
||||
.leftJoin<TIdentityGcpAuths>(
|
||||
TableName.IdentityGcpAuth,
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityGcpAuth}.identityId`
|
||||
)
|
||||
.leftJoin<TIdentityAwsAuths>(
|
||||
TableName.IdentityAwsAuth,
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityAwsAuth}.identityId`
|
||||
)
|
||||
.leftJoin<TIdentityKubernetesAuths>(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
)
|
||||
.leftJoin<TIdentityOidcAuths>(
|
||||
TableName.IdentityOidcAuth,
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityOidcAuth}.identityId`
|
||||
)
|
||||
.leftJoin<TIdentityAzureAuths>(
|
||||
TableName.IdentityAzureAuth,
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityAzureAuth}.identityId`
|
||||
)
|
||||
.leftJoin<TIdentityTokenAuths>(
|
||||
TableName.IdentityTokenAuth,
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityTokenAuth}.identityId`
|
||||
)
|
||||
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.IdentityProjectMembership),
|
||||
db.ref("createdAt").withSchema(TableName.IdentityProjectMembership),
|
||||
@ -186,7 +301,14 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
db.ref("temporaryRange").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("temporaryAccessStartTime").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project)
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project),
|
||||
db.ref("id").as("uaId").withSchema(TableName.IdentityUniversalAuth),
|
||||
db.ref("id").as("gcpId").withSchema(TableName.IdentityGcpAuth),
|
||||
db.ref("id").as("awsId").withSchema(TableName.IdentityAwsAuth),
|
||||
db.ref("id").as("kubernetesId").withSchema(TableName.IdentityKubernetesAuth),
|
||||
db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth),
|
||||
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
|
||||
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth)
|
||||
);
|
||||
|
||||
// TODO: scott - joins seem to reorder identities so need to order again, for the sake of urgency will optimize at a later point
|
||||
@ -204,7 +326,21 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
|
||||
const members = sqlNestRelationships({
|
||||
data: docs,
|
||||
parentMapper: ({ identityId, identityName, identityAuthMethod, id, createdAt, updatedAt, projectName }) => ({
|
||||
parentMapper: ({
|
||||
identityId,
|
||||
identityName,
|
||||
uaId,
|
||||
awsId,
|
||||
gcpId,
|
||||
kubernetesId,
|
||||
oidcId,
|
||||
azureId,
|
||||
tokenId,
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
projectName
|
||||
}) => ({
|
||||
id,
|
||||
identityId,
|
||||
createdAt,
|
||||
@ -212,7 +348,15 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
identity: {
|
||||
id: identityId,
|
||||
name: identityName,
|
||||
authMethod: identityAuthMethod
|
||||
authMethods: buildAuthMethods({
|
||||
uaId,
|
||||
awsId,
|
||||
gcpId,
|
||||
kubernetesId,
|
||||
oidcId,
|
||||
azureId,
|
||||
tokenId
|
||||
})
|
||||
},
|
||||
project: {
|
||||
id: projectId,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user