mirror of
https://github.com/Infisical/infisical.git
synced 2025-06-29 04:31:59 +00:00
Compare commits
175 Commits
misc/add-e
...
infisical/
Author | SHA1 | Date | |
---|---|---|---|
cd4f2cccf8 | |||
ff4ff0588f | |||
993024662a | |||
a03c152abf | |||
45d2cc05b3 | |||
483f26d863 | |||
da094383b8 | |||
fce772bc20 | |||
5e1a7cfb6e | |||
323d5d2d27 | |||
dd79d0385a | |||
0a28ac4a7d | |||
196c616986 | |||
bf6060d353 | |||
438e2dfa07 | |||
3ad50a4386 | |||
ed94e7a8e7 | |||
09ad1cce96 | |||
d7f9cff43e | |||
5d8d75ac93 | |||
db5a85d3ca | |||
a1a931d3dd | |||
e639f5ee49 | |||
a2c9c4529b | |||
0a338ee539 | |||
2a7679005e | |||
838d132898 | |||
68d07f0136 | |||
10a3c7015e | |||
03b0334fa0 | |||
10a3658328 | |||
e8ece6be3f | |||
c765c20539 | |||
5cdabd3e61 | |||
75ca093b24 | |||
6c0889f117 | |||
8f49d45309 | |||
a4a162ab65 | |||
5b11232325 | |||
8b53f63d69 | |||
6c0975554d | |||
697543e4a2 | |||
73b5ca5b4f | |||
a1318d54b1 | |||
44afe2fc1d | |||
956d0f6c5d | |||
c376add0fa | |||
977c02357b | |||
d4125443a3 | |||
8e3ac6ca29 | |||
fa9bdd21ff | |||
accf42de2e | |||
348a412cda | |||
c5a5ad93a8 | |||
d55ddcd577 | |||
67a0e5ae68 | |||
37cbb4c55b | |||
506b56b657 | |||
351304fda6 | |||
2af515c486 | |||
cdfec32195 | |||
8d6bd5d537 | |||
b6d67df966 | |||
3897f0ece5 | |||
7719ebb112 | |||
f03f02786d | |||
c60840e979 | |||
6fe7a5f069 | |||
14b7d763ad | |||
bc1b7ddcc5 | |||
dff729ffc1 | |||
786f5d9e09 | |||
ef6abedfe0 | |||
9a5633fda4 | |||
f8a96576c9 | |||
88d3d62894 | |||
ac40dcc2c6 | |||
6482e88dfc | |||
a01249e903 | |||
7b3e1f12bd | |||
031c8d67b1 | |||
778b0d4368 | |||
95b57e144d | |||
1d26269993 | |||
ffee1701fc | |||
871be7132a | |||
5fe3c9868f | |||
c936aa7157 | |||
05005f4258 | |||
c179d7e5ae | |||
c8553fba2b | |||
26a9d68823 | |||
af5b3aa171 | |||
d4728e31c1 | |||
f9a5b46365 | |||
d65deab0af | |||
61591742e4 | |||
54b13a9daa | |||
4adf0aa1e2 | |||
3d3ee746cf | |||
07e4358d00 | |||
962dd5d919 | |||
52bd1afb0a | |||
d918dd8967 | |||
e2e0f6a346 | |||
326cb99732 | |||
341b63c61c | |||
81b026865c | |||
f50c72c033 | |||
e1046e2d56 | |||
ed3fa8add1 | |||
d123283849 | |||
d7fd44b845 | |||
3ffee049ee | |||
9924ef3a71 | |||
524462d7bc | |||
6ebc766308 | |||
6f9a66a0d7 | |||
cca7b68dd0 | |||
ab39f13e03 | |||
351e573fea | |||
f1bc26e2e5 | |||
8aeb607f6e | |||
e530b7a788 | |||
bf61090b5a | |||
106b068a51 | |||
6f0a97a2fa | |||
5d604be091 | |||
905cf47d90 | |||
2c40d316f4 | |||
32521523c1 | |||
3a2e8939b1 | |||
a6d9c74054 | |||
07bd527cc1 | |||
fa7843983f | |||
2d5b7afda7 | |||
82520a7f0a | |||
af236ba892 | |||
c4b7d4618d | |||
003f2b003d | |||
4f08801ae8 | |||
cfe2bbe125 | |||
29dcf229d8 | |||
747b5ec68d | |||
ed0dc324a3 | |||
1c13ed54af | |||
8abfea0409 | |||
ce4adccc80 | |||
dcd3b5df56 | |||
f6425480ca | |||
a3e9392a2f | |||
633a2ae985 | |||
e67a8f9c05 | |||
ad110f490c | |||
3741201b87 | |||
63d325c208 | |||
2149c0a9d1 | |||
430f8458cb | |||
bdb7cb4cbf | |||
54d002d718 | |||
dc2358bbaa | |||
fc651f6645 | |||
cc2c4b16bf | |||
1b05b7cf2c | |||
dcc3509a33 | |||
9dbe45a730 | |||
7875bcc067 | |||
9c702b27b2 | |||
db8a4bd26d | |||
2b7e1b465f | |||
b7b294f024 | |||
a3fb7c9f00 | |||
5ed164de24 | |||
596378208e | |||
943d0ddb69 |
27
.github/workflows/release-k8-operator-helm.yml
vendored
Normal file
27
.github/workflows/release-k8-operator-helm.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: Release K8 Operator Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,17 +1,87 @@
|
||||
name: Release image + Helm chart K8s Operator
|
||||
name: Release K8 Operator Docker Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
release-image:
|
||||
name: Generate Helm Chart PR
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Dependency for helm generation
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
# Dependency for helm generation
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.21
|
||||
|
||||
# Install binaries for helm generation
|
||||
- name: Install dependencies
|
||||
working-directory: k8-operator
|
||||
run: |
|
||||
make helmify
|
||||
make kustomize
|
||||
make controller-gen
|
||||
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm
|
||||
|
||||
- name: Update Helm Chart Version
|
||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
echo "Current git status:"
|
||||
git status
|
||||
echo ""
|
||||
echo "Modified files:"
|
||||
git diff --name-only
|
||||
|
||||
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
|
||||
if [ -z "$(git diff --name-only)" ]; then
|
||||
echo "No helm changes or version changes. Invalid release detected, Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Helm Chart PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
branch: helm-update-${{ steps.extract_version.outputs.version }}
|
||||
delete-branch: true
|
||||
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
body: |
|
||||
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
|
||||
Additionally the helm chart has been updated to match the latest operator code changes.
|
||||
|
||||
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
Once you have approved this PR, you can trigger the helm release workflow manually.
|
||||
base: main
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
@ -35,18 +105,3 @@ jobs:
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
@ -8,3 +8,9 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
||||
docs/cli/commands/bootstrap.mdx:jwt:86
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
|
||||
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
|
||||
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
|
||||
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
|
||||
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52
|
||||
|
1
backend/src/@types/fastify.d.ts
vendored
1
backend/src/@types/fastify.d.ts
vendored
@ -106,6 +106,7 @@ declare module "@fastify/request-context" {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,7 @@ const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Dat
|
||||
const startDateStr = formatPartitionDate(startDate);
|
||||
const endDateStr = formatPartitionDate(endDate);
|
||||
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
|
||||
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
||||
|
@ -0,0 +1,30 @@
|
||||
import { Knex } from "knex";
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem"))) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("shouldUseNewPrivilegeSystem");
|
||||
t.string("privilegeUpgradeInitiatedByUsername");
|
||||
t.dateTime("privilegeUpgradeInitiatedAt");
|
||||
});
|
||||
|
||||
await knex(TableName.Organization).update({
|
||||
shouldUseNewPrivilegeSystem: false
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("shouldUseNewPrivilegeSystem").defaultTo(true).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem")) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("shouldUseNewPrivilegeSystem");
|
||||
t.dropColumn("privilegeUpgradeInitiatedByUsername");
|
||||
t.dropColumn("privilegeUpgradeInitiatedAt");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.index(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropIndex(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasReviewerJwtCol = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
if (hasReviewerJwtCol) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// we can't make it back to non nullable, it will fail
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
}
|
@ -16,7 +16,8 @@ export const AccessApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional()
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||
|
@ -28,7 +28,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
|
@ -23,6 +23,9 @@ export const OrganizationsSchema = z.object({
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional(),
|
||||
shouldUseNewPrivilegeSystem: z.boolean().default(true),
|
||||
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
|
||||
privilegeUpgradeInitiatedAt: z.date().nullable().optional(),
|
||||
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
|
||||
});
|
||||
|
||||
|
@ -16,7 +16,8 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional()
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
@ -16,7 +16,7 @@ export const registerCertificateEstRouter = async (server: FastifyZodProvider) =
|
||||
// for CSRs sent in PEM, we leave them as is
|
||||
// for CSRs sent in base64, we preprocess them to remove new lines and spaces
|
||||
if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) {
|
||||
csrBody = csrBody.replace(/\n/g, "").replace(/ /g, "");
|
||||
csrBody = csrBody.replaceAll("\n", "").replaceAll(" ", "");
|
||||
}
|
||||
|
||||
done(null, csrBody);
|
||||
|
@ -29,7 +29,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -147,7 +148,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).optional(),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -110,7 +110,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretPath: z.string().nullish(),
|
||||
envId: z.string(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
|
@ -61,8 +61,8 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
if (ldapConfig.groupSearchBase) {
|
||||
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
|
||||
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
|
||||
.replace(/{{\.Username}}/g, user.uid)
|
||||
.replace(/{{\.UserDN}}/g, user.dn);
|
||||
.replaceAll("{{.Username}}", user.uid)
|
||||
.replaceAll("{{.UserDN}}", user.dn);
|
||||
|
||||
if (!isValidLdapFilter(groupSearchFilter)) {
|
||||
throw new Error("Generated LDAP search filter is invalid.");
|
||||
|
@ -35,7 +35,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -85,7 +86,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -49,7 +49,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
committerUser: approvalRequestUser,
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
@ -267,7 +268,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
|
@ -5,9 +5,11 @@ import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-type
|
||||
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -73,6 +75,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SignSshKey,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey
|
||||
@ -152,6 +164,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.IssueSshCreds,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey,
|
||||
|
@ -65,7 +65,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvers,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TCreateAccessApprovalPolicy) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
@ -153,7 +154,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -216,7 +218,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TUpdateAccessApprovalPolicy) => {
|
||||
const groupApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.Group)
|
||||
@ -262,7 +265,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -26,6 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
@ -35,6 +36,7 @@ export type TUpdateAccessApprovalPolicy = {
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteAccessApprovalPolicy = {
|
||||
|
@ -61,6 +61,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
)
|
||||
@ -119,6 +120,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: doc.policyApprovals,
|
||||
secretPath: doc.policySecretPath,
|
||||
enforcementLevel: doc.policyEnforcementLevel,
|
||||
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
|
||||
envId: doc.policyEnvId,
|
||||
deletedAt: doc.policyDeletedAt
|
||||
},
|
||||
@ -254,6 +256,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
@ -275,6 +278,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||
deletedAt: el.policyDeletedAt
|
||||
},
|
||||
requestedByUser: {
|
||||
|
@ -320,6 +320,11 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
message: "The policy associated with this access request has been deleted."
|
||||
});
|
||||
}
|
||||
if (!policy.allowedSelfApprovals && actorId === accessApprovalRequest.requestedByUserId) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to review access approval request. Users are not authorized to review their own request."
|
||||
});
|
||||
}
|
||||
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
|
@ -45,7 +45,6 @@ export const auditLogStreamServiceFactory = ({
|
||||
}: TCreateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
|
||||
const appCfg = getConfig();
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams) {
|
||||
throw new BadRequestError({
|
||||
@ -62,9 +61,8 @@ export const auditLogStreamServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
if (appCfg.isCloud) {
|
||||
blockLocalAndPrivateIpAddresses(url);
|
||||
}
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
@ -135,9 +133,8 @@ export const auditLogStreamServiceFactory = ({
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
|
||||
if (url && appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
@ -1,8 +1,10 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
@ -81,8 +83,12 @@ export const auditLogServiceFactory = ({
|
||||
if (!data.projectId && !data.orgId)
|
||||
throw new BadRequestError({ message: "Must specify either project id or org id" });
|
||||
}
|
||||
|
||||
return auditLogQueue.pushToLog(data);
|
||||
const el = { ...data };
|
||||
if (el.actor.type === ActorType.USER || el.actor.type === ActorType.IDENTITY) {
|
||||
const permissionMetadata = requestContext.get("identityPermissionMetadata");
|
||||
el.actor.metadata.permission = permissionMetadata;
|
||||
}
|
||||
return auditLogQueue.pushToLog(el);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -290,6 +290,7 @@ interface UserActorMetadata {
|
||||
userId: string;
|
||||
email?: string | null;
|
||||
username: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ServiceActorMetadata {
|
||||
@ -300,6 +301,7 @@ interface ServiceActorMetadata {
|
||||
interface IdentityActorMetadata {
|
||||
identityId: string;
|
||||
name: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ScimClientActorMetadata {}
|
||||
@ -966,6 +968,7 @@ interface LoginIdentityOidcAuthEvent {
|
||||
identityId: string;
|
||||
identityOidcAuthId: string;
|
||||
identityAccessTokenId: string;
|
||||
oidcClaimsReceived: Record<string, unknown>;
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
@ -67,9 +68,7 @@ export const certificateEstServiceFactory = ({
|
||||
|
||||
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
|
||||
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new UnauthorizedError({ message: "Missing client certificate" });
|
||||
@ -88,10 +87,7 @@ export const certificateEstServiceFactory = ({
|
||||
const verifiedChains = await Promise.all(
|
||||
caCertChains.map((chain) => {
|
||||
const caCert = new x509.X509Certificate(chain.certificate);
|
||||
const caChain =
|
||||
chain.certificateChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((c) => new x509.X509Certificate(c)) || [];
|
||||
const caChain = extractX509CertFromChain(chain.certificateChain)?.map((c) => new x509.X509Certificate(c)) || [];
|
||||
|
||||
return isCertChainValid([cert, caCert, ...caChain]);
|
||||
})
|
||||
@ -172,9 +168,7 @@ export const certificateEstServiceFactory = ({
|
||||
}
|
||||
|
||||
if (!estConfig.disableBootstrapCertValidation) {
|
||||
const caCerts = estConfig.caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => {
|
||||
const caCerts = extractX509CertFromChain(estConfig.caChain)?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
|
||||
@ -182,9 +176,7 @@ export const certificateEstServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new BadRequestError({ message: "Missing client certificate" });
|
||||
@ -250,13 +242,7 @@ export const certificateEstServiceFactory = ({
|
||||
kmsService
|
||||
});
|
||||
|
||||
const certificates = caCertChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
if (!certificates) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
const certificates = extractX509CertFromChain(caCertChain).map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
const caCertificate = new x509.X509Certificate(caCert);
|
||||
return convertRawCertsToPkcs7([caCertificate.rawData, ...certificates.map((cert) => cert.rawData)]);
|
||||
|
@ -183,7 +183,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
});
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) {
|
||||
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id) {
|
||||
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
|
||||
}
|
||||
|
||||
@ -256,7 +256,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
});
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease)
|
||||
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id)
|
||||
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
|
@ -1,31 +1,51 @@
|
||||
import crypto from "node:crypto";
|
||||
import dns from "node:dns/promises";
|
||||
import net from "node:net";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
export const verifyHostInputValidity = (host: string, isGateway = false) => {
|
||||
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
|
||||
const appCfg = getConfig();
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
// no need for validation when it's dev
|
||||
if (appCfg.NODE_ENV === "development") return;
|
||||
// if (appCfg.NODE_ENV === "development") return ["host.docker.internal"]; // incase you want to remove this check in dev
|
||||
|
||||
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
|
||||
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
|
||||
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
|
||||
getDbConnectionHost(appCfg.REDIS_URL)
|
||||
);
|
||||
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
!isGateway &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
// get host db ip
|
||||
const exclusiveIps: string[] = [];
|
||||
for await (const el of reservedHosts) {
|
||||
if (el) {
|
||||
if (net.isIPv4(el)) {
|
||||
exclusiveIps.push(el);
|
||||
} else {
|
||||
const resolvedIps = await dns.resolve4(el);
|
||||
exclusiveIps.push(...resolvedIps);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
|
||||
) {
|
||||
const normalizedHost = host.split(":")[0];
|
||||
const inputHostIps: string[] = [];
|
||||
if (net.isIPv4(host)) {
|
||||
inputHostIps.push(host);
|
||||
} else {
|
||||
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
const resolvedIps = await dns.resolve4(host);
|
||||
inputHostIps.push(...resolvedIps);
|
||||
}
|
||||
|
||||
if (!isGateway) {
|
||||
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
|
||||
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
|
||||
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
|
||||
return inputHostIps;
|
||||
};
|
||||
|
@ -13,6 +13,7 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@ -144,6 +145,14 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
|
||||
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
|
||||
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
|
||||
validateHandlebarTemplate("AWS ElastiCache creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("AWS ElastiCache revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
@ -3,9 +3,10 @@ import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
@ -20,14 +21,28 @@ const generateUsername = () => {
|
||||
export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
const hostIps = await Promise.all(
|
||||
providerInputs.host
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((el) => verifyHostInputValidity(el).then((ip) => ip[0]))
|
||||
);
|
||||
validateHandlebarTemplate("Cassandra creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration", "keyspace"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Cassandra renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration", "keyspace"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Cassandra revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return providerInputs;
|
||||
return { ...providerInputs, hostIps };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -40,7 +55,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
},
|
||||
keyspace: providerInputs.keyspace,
|
||||
localDataCenter: providerInputs?.localDataCenter,
|
||||
contactPoints: providerInputs.host.split(",").filter(Boolean)
|
||||
contactPoints: providerInputs.hostIps
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
@ -19,15 +19,14 @@ const generateUsername = () => {
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
|
@ -19,15 +19,15 @@ const generateUsername = () => {
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
|
||||
? `mongodb+srv://${providerInputs.hostIp}`
|
||||
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
|
||||
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
|
@ -3,7 +3,6 @@ import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
@ -79,14 +78,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
@ -51,16 +52,28 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
|
||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("Redis creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Redis renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Redis revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema> & { hostIp: string }) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
username: providerInputs.username,
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
password: providerInputs.password,
|
||||
...(providerInputs.ca && {
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
@ -27,14 +28,25 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("SAP ASE revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const $getClient = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
|
||||
useMaster?: boolean
|
||||
) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`SERVER=${providerInputs.hostIp};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
@ -83,7 +95,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
@ -104,7 +116,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
@ -11,6 +11,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
|
||||
@ -28,13 +29,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("SAP Hana renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@ -31,6 +32,18 @@ const getDaysToExpiry = (expiryDate: Date) => {
|
||||
export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
|
||||
validateHandlebarTemplate("Snowflake creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Snowflake renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Snowflake revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
@ -117,8 +118,21 @@ type TSqlDatabaseProviderDTO = {
|
||||
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
return providerInputs;
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
validateHandlebarTemplate("SQL creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration", "database"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("SQL renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration", "database"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("SQL revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username", "database"].includes(val)
|
||||
});
|
||||
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
@ -144,7 +158,8 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
}
|
||||
: undefined
|
||||
},
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
|
||||
pool: { min: 0, max: 7 }
|
||||
});
|
||||
return db;
|
||||
};
|
||||
@ -178,7 +193,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
let isConnected = false;
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
@ -3,8 +3,7 @@ import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
|
||||
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@ -14,7 +13,8 @@ import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { OrgPermissionGroupActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TGroupDALFactory } from "./group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "./group-fns";
|
||||
@ -67,14 +67,14 @@ export const groupServiceFactory = ({
|
||||
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
const { permission, membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Create, OrgPermissionSubjects.Groups);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.groups)
|
||||
@ -87,14 +87,26 @@ export const groupServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
const isCustomRole = Boolean(customRole);
|
||||
if (role !== OrgMembershipRole.NoAccess) {
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.GrantPrivileges,
|
||||
OrgPermissionSubjects.Groups,
|
||||
permission,
|
||||
rolePermission
|
||||
);
|
||||
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to create a more privileged group",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to create group",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.GrantPrivileges,
|
||||
OrgPermissionSubjects.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
}
|
||||
|
||||
const group = await groupDAL.transaction(async (tx) => {
|
||||
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
|
||||
@ -133,14 +145,15 @@ export const groupServiceFactory = ({
|
||||
}: TUpdateGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
const { permission, membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.groups)
|
||||
@ -161,11 +174,21 @@ export const groupServiceFactory = ({
|
||||
);
|
||||
|
||||
const isCustomRole = Boolean(customOrgRole);
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.GrantPrivileges,
|
||||
OrgPermissionSubjects.Groups,
|
||||
permission,
|
||||
rolePermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update a more privileged group",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to update group",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.GrantPrivileges,
|
||||
OrgPermissionSubjects.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
if (isCustomRole) customRole = customOrgRole;
|
||||
@ -215,7 +238,7 @@ export const groupServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
|
||||
@ -242,7 +265,7 @@ export const groupServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findById(id);
|
||||
if (!group) {
|
||||
@ -275,7 +298,7 @@ export const groupServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
orgId: actorOrgId,
|
||||
@ -303,14 +326,14 @@ export const groupServiceFactory = ({
|
||||
const addUserToGroup = async ({ id, username, actor, actorId, actorAuthMethod, actorOrgId }: TAddUserToGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
const { permission, membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
|
||||
// check if group with slug exists
|
||||
const group = await groupDAL.findOne({
|
||||
@ -338,11 +361,22 @@ export const groupServiceFactory = ({
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.AddMembers,
|
||||
OrgPermissionSubjects.Groups,
|
||||
permission,
|
||||
groupRolePermission
|
||||
);
|
||||
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to add user to more privileged group",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to add user to more privileged group",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.AddMembers,
|
||||
OrgPermissionSubjects.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
@ -374,14 +408,14 @@ export const groupServiceFactory = ({
|
||||
}: TRemoveUserFromGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
const { permission, membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
|
||||
// check if group with slug exists
|
||||
const group = await groupDAL.findOne({
|
||||
@ -409,11 +443,21 @@ export const groupServiceFactory = ({
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.RemoveMembers,
|
||||
OrgPermissionSubjects.Groups,
|
||||
permission,
|
||||
groupRolePermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to delete user from more privileged group",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to delete user from more privileged group",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionGroupActions.RemoveMembers,
|
||||
OrgPermissionSubjects.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
|
@ -2,16 +2,17 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { ProjectPermissionIdentityActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2DALFactory } from "./identity-project-additional-privilege-v2-dal";
|
||||
import {
|
||||
IdentityProjectAdditionalPrivilegeTemporaryMode,
|
||||
@ -64,10 +65,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionIdentityActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
|
||||
const { permission: targetIdentityPermission, membership } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.IDENTITY,
|
||||
actorId: identityId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
@ -79,13 +80,26 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity,
|
||||
permission,
|
||||
targetIdentityPermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to update more privileged identity",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -150,10 +164,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionIdentityActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
|
||||
const { permission: targetIdentityPermission, membership } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.IDENTITY,
|
||||
actorId: identityProjectMembership.identityId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
@ -165,14 +179,28 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity,
|
||||
permission,
|
||||
targetIdentityPermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to update more privileged identity",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
if (data?.slug) {
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug: data.slug,
|
||||
@ -227,7 +255,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
const { permission, membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
@ -236,7 +264,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionIdentityActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission({
|
||||
@ -247,11 +275,21 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity,
|
||||
permission,
|
||||
identityRolePermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to update more privileged identity",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
@ -287,7 +325,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionIdentityActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
@ -322,7 +360,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionIdentityActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
@ -358,7 +396,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionIdentityActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
|
@ -2,16 +2,21 @@ import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability"
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionIdentityActions,
|
||||
ProjectPermissionSet,
|
||||
ProjectPermissionSub
|
||||
} from "../permission/project-permission";
|
||||
import { TIdentityProjectAdditionalPrivilegeDALFactory } from "./identity-project-additional-privilege-dal";
|
||||
import {
|
||||
IdentityProjectAdditionalPrivilegeTemporaryMode,
|
||||
@ -63,7 +68,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
const { permission, membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
@ -71,8 +76,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionIdentityActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
@ -88,11 +94,21 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity,
|
||||
permission,
|
||||
targetIdentityPermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to update more privileged identity",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
@ -102,6 +118,10 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
});
|
||||
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
|
||||
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
|
||||
@ -150,7 +170,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
const { permission, membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
@ -160,7 +180,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionIdentityActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
@ -176,11 +196,21 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity,
|
||||
permission,
|
||||
targetIdentityPermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to update more privileged identity",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
@ -203,6 +233,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
}
|
||||
|
||||
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
|
||||
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
|
||||
if (isTemporary) {
|
||||
@ -255,7 +288,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
const { permission, membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
@ -264,7 +297,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionIdentityActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
@ -276,11 +309,21 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity,
|
||||
permission,
|
||||
identityRolePermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to edit more privileged identity",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to edit more privileged identity",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Identity
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
@ -327,7 +370,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionIdentityActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
@ -371,7 +414,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionIdentityActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
|
@ -4,8 +4,9 @@ import crypto, { KeyObject } from "crypto";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { isValidHostname, isValidIp } from "@app/lib/ip";
|
||||
import { isValidIp } from "@app/lib/ip";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { isFQDN } from "@app/lib/validator/validate-url";
|
||||
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
|
||||
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
import {
|
||||
@ -665,7 +666,7 @@ export const kmipServiceFactory = ({
|
||||
.split(",")
|
||||
.map((name) => name.trim())
|
||||
.map((altName) => {
|
||||
if (isValidHostname(altName)) {
|
||||
if (isFQDN(altName, { allow_wildcard: true })) {
|
||||
return {
|
||||
type: "dns",
|
||||
value: altName
|
||||
|
@ -97,12 +97,14 @@ export const searchGroups = async (
|
||||
|
||||
res.on("searchEntry", (entry) => {
|
||||
const dn = entry.dn.toString();
|
||||
const regex = /cn=([^,]+)/;
|
||||
const match = dn.match(regex);
|
||||
// parse the cn from the dn
|
||||
const cn = (match && match[1]) as string;
|
||||
const cnStartIndex = dn.indexOf("cn=");
|
||||
|
||||
if (cnStartIndex !== -1) {
|
||||
const valueStartIndex = cnStartIndex + 3;
|
||||
const commaIndex = dn.indexOf(",", valueStartIndex);
|
||||
const cn = dn.substring(valueStartIndex, commaIndex === -1 ? undefined : commaIndex);
|
||||
groups.push({ dn, cn });
|
||||
}
|
||||
});
|
||||
res.on("error", (error) => {
|
||||
ldapClient.unbind();
|
||||
|
24
backend/src/ee/services/license/licence-enums.ts
Normal file
24
backend/src/ee/services/license/licence-enums.ts
Normal file
@ -0,0 +1,24 @@
|
||||
export const BillingPlanRows = {
|
||||
MemberLimit: { name: "Organization member limit", field: "memberLimit" },
|
||||
IdentityLimit: { name: "Organization identity limit", field: "identityLimit" },
|
||||
WorkspaceLimit: { name: "Project limit", field: "workspaceLimit" },
|
||||
EnvironmentLimit: { name: "Environment limit", field: "environmentLimit" },
|
||||
SecretVersioning: { name: "Secret versioning", field: "secretVersioning" },
|
||||
PitRecovery: { name: "Point in time recovery", field: "pitRecovery" },
|
||||
Rbac: { name: "RBAC", field: "rbac" },
|
||||
CustomRateLimits: { name: "Custom rate limits", field: "customRateLimits" },
|
||||
CustomAlerts: { name: "Custom alerts", field: "customAlerts" },
|
||||
AuditLogs: { name: "Audit logs", field: "auditLogs" },
|
||||
SamlSSO: { name: "SAML SSO", field: "samlSSO" },
|
||||
Hsm: { name: "Hardware Security Module (HSM)", field: "hsm" },
|
||||
OidcSSO: { name: "OIDC SSO", field: "oidcSSO" },
|
||||
SecretApproval: { name: "Secret approvals", field: "secretApproval" },
|
||||
SecretRotation: { name: "Secret rotation", field: "secretRotation" },
|
||||
InstanceUserManagement: { name: "Instance User Management", field: "instanceUserManagement" },
|
||||
ExternalKms: { name: "External KMS", field: "externalKms" }
|
||||
} as const;
|
||||
|
||||
export const BillingPlanTableHead = {
|
||||
Allowed: { name: "Allowed" },
|
||||
Used: { name: "Used" }
|
||||
} as const;
|
@ -12,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { verifyOfflineLicense } from "@app/lib/crypto";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
@ -28,6 +31,7 @@ import {
|
||||
TFeatureSet,
|
||||
TGetOrgBillInfoDTO,
|
||||
TGetOrgTaxIdDTO,
|
||||
TOfflineLicense,
|
||||
TOfflineLicenseContents,
|
||||
TOrgInvoiceDTO,
|
||||
TOrgLicensesDTO,
|
||||
@ -39,10 +43,12 @@ import {
|
||||
} from "./license-types";
|
||||
|
||||
type TLicenseServiceFactoryDep = {
|
||||
orgDAL: Pick<TOrgDALFactory, "findOrgById">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findOrgById" | "countAllOrgMembers">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseDAL: TLicenseDALFactory;
|
||||
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem">;
|
||||
identityOrgMembershipDAL: TIdentityOrgDALFactory;
|
||||
projectDAL: TProjectDALFactory;
|
||||
};
|
||||
|
||||
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
|
||||
@ -57,11 +63,14 @@ export const licenseServiceFactory = ({
|
||||
orgDAL,
|
||||
permissionService,
|
||||
licenseDAL,
|
||||
keyStore
|
||||
keyStore,
|
||||
identityOrgMembershipDAL,
|
||||
projectDAL
|
||||
}: TLicenseServiceFactoryDep) => {
|
||||
let isValidLicense = false;
|
||||
let instanceType = InstanceType.OnPrem;
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
let selfHostedLicense: TOfflineLicense | null = null;
|
||||
|
||||
const appCfg = getConfig();
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
@ -125,6 +134,7 @@ export const licenseServiceFactory = ({
|
||||
instanceType = InstanceType.EnterpriseOnPremOffline;
|
||||
logger.info(`Instance type: ${InstanceType.EnterpriseOnPremOffline}`);
|
||||
isValidLicense = true;
|
||||
selfHostedLicense = contents.license;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -348,10 +358,21 @@ export const licenseServiceFactory = ({
|
||||
message: `Organization with ID '${orgId}' not found`
|
||||
});
|
||||
}
|
||||
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
return {
|
||||
currentPeriodStart: selfHostedLicense?.issuedAt ? Date.parse(selfHostedLicense?.issuedAt) / 1000 : undefined,
|
||||
currentPeriodEnd: selfHostedLicense?.expiresAt ? Date.parse(selfHostedLicense?.expiresAt) / 1000 : undefined,
|
||||
interval: "month",
|
||||
intervalCount: 1,
|
||||
amount: 0,
|
||||
quantity: 1
|
||||
};
|
||||
};
|
||||
|
||||
// returns org current plan feature table
|
||||
@ -365,10 +386,41 @@ export const licenseServiceFactory = ({
|
||||
message: `Organization with ID '${orgId}' not found`
|
||||
});
|
||||
}
|
||||
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
const mappedRows = await Promise.all(
|
||||
Object.values(BillingPlanRows).map(async ({ name, field }: { name: string; field: string }) => {
|
||||
const allowed = onPremFeatures[field as keyof TFeatureSet];
|
||||
let used = "-";
|
||||
|
||||
if (field === BillingPlanRows.MemberLimit.field) {
|
||||
const orgMemberships = await orgDAL.countAllOrgMembers(orgId);
|
||||
used = orgMemberships.toString();
|
||||
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
|
||||
const projects = await projectDAL.find({ orgId });
|
||||
used = projects.length.toString();
|
||||
} else if (field === BillingPlanRows.IdentityLimit.field) {
|
||||
const identities = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
|
||||
used = identities.toString();
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
allowed,
|
||||
used
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
head: Object.values(BillingPlanTableHead),
|
||||
rows: mappedRows
|
||||
};
|
||||
};
|
||||
|
||||
const getOrgBillingDetails = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {
|
||||
|
@ -44,6 +44,28 @@ export enum OrgPermissionGatewayActions {
|
||||
DeleteGateways = "delete-gateways"
|
||||
}
|
||||
|
||||
export enum OrgPermissionIdentityActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete",
|
||||
GrantPrivileges = "grant-privileges",
|
||||
RevokeAuth = "revoke-auth",
|
||||
CreateToken = "create-token",
|
||||
GetToken = "get-token",
|
||||
DeleteToken = "delete-token"
|
||||
}
|
||||
|
||||
export enum OrgPermissionGroupActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete",
|
||||
GrantPrivileges = "grant-privileges",
|
||||
AddMembers = "add-members",
|
||||
RemoveMembers = "remove-members"
|
||||
}
|
||||
|
||||
export enum OrgPermissionSubjects {
|
||||
Workspace = "workspace",
|
||||
Role = "role",
|
||||
@ -80,10 +102,10 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Groups]
|
||||
| [OrgPermissionGroupActions, OrgPermissionSubjects.Groups]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
|
||||
| [OrgPermissionIdentityActions, OrgPermissionSubjects.Identity]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
|
||||
@ -256,20 +278,28 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Ldap);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.Create, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.GrantPrivileges, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.AddMembers, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.RemoveMembers, OrgPermissionSubjects.Groups);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Billing);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.GrantPrivileges, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.RevokeAuth, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.CreateToken, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.GetToken, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.DeleteToken, OrgPermissionSubjects.Identity);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Kms);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Kms);
|
||||
@ -316,7 +346,7 @@ const buildMemberPermission = () => {
|
||||
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
|
||||
@ -327,10 +357,10 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.SecretScanning);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.SecretScanning);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
|
||||
|
@ -49,6 +49,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization),
|
||||
db.ref("slug").withSchema(TableName.OrgRoles).withSchema(TableName.OrgRoles).as("customRoleSlug"),
|
||||
db.ref("permissions").withSchema(TableName.OrgRoles),
|
||||
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
|
||||
@ -70,7 +71,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
OrgMembershipsSchema.extend({
|
||||
permissions: z.unknown(),
|
||||
orgAuthEnforced: z.boolean().optional().nullable(),
|
||||
customRoleSlug: z.string().optional().nullable()
|
||||
customRoleSlug: z.string().optional().nullable(),
|
||||
shouldUseNewPrivilegeSystem: z.boolean()
|
||||
}).parse(el),
|
||||
childrenMapper: [
|
||||
{
|
||||
@ -118,7 +120,9 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
.select(selectAllTableCols(TableName.IdentityOrgMembership))
|
||||
.select(db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"))
|
||||
.select("permissions")
|
||||
.select(db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization))
|
||||
.first();
|
||||
|
||||
return membership;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "GetOrgIdentityPermission" });
|
||||
@ -668,7 +672,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
|
||||
db.ref("orgId").withSchema(TableName.Project),
|
||||
db.ref("type").withSchema(TableName.Project).as("projectType"),
|
||||
db.ref("id").withSchema(TableName.Project).as("projectId")
|
||||
db.ref("id").withSchema(TableName.Project).as("projectId"),
|
||||
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization)
|
||||
);
|
||||
|
||||
const [userPermission] = sqlNestRelationships({
|
||||
@ -684,7 +689,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
groupMembershipCreatedAt,
|
||||
groupMembershipUpdatedAt,
|
||||
membershipUpdatedAt,
|
||||
projectType
|
||||
projectType,
|
||||
shouldUseNewPrivilegeSystem
|
||||
}) => ({
|
||||
orgId,
|
||||
orgAuthEnforced,
|
||||
@ -694,7 +700,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
projectType,
|
||||
id: membershipId || groupMembershipId,
|
||||
createdAt: membershipCreatedAt || groupMembershipCreatedAt,
|
||||
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt
|
||||
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt,
|
||||
shouldUseNewPrivilegeSystem
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
@ -995,6 +1002,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
`${TableName.IdentityProjectMembership}.projectId`,
|
||||
`${TableName.Project}.id`
|
||||
)
|
||||
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
|
||||
.leftJoin(TableName.IdentityMetadata, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.Identity}.id`, `${TableName.IdentityMetadata}.identityId`)
|
||||
@ -1012,6 +1020,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership).as("membershipUpdatedAt"),
|
||||
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
|
||||
db.ref("permissions").withSchema(TableName.ProjectRoles),
|
||||
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization),
|
||||
db.ref("id").withSchema(TableName.IdentityProjectAdditionalPrivilege).as("identityApId"),
|
||||
db.ref("permissions").withSchema(TableName.IdentityProjectAdditionalPrivilege).as("identityApPermissions"),
|
||||
db
|
||||
@ -1045,7 +1054,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
membershipUpdatedAt,
|
||||
orgId,
|
||||
identityName,
|
||||
projectType
|
||||
projectType,
|
||||
shouldUseNewPrivilegeSystem
|
||||
}) => ({
|
||||
id: membershipId,
|
||||
identityId,
|
||||
@ -1055,6 +1065,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
updatedAt: membershipUpdatedAt,
|
||||
orgId,
|
||||
projectType,
|
||||
shouldUseNewPrivilegeSystem,
|
||||
// just a prefilled value
|
||||
orgAuthEnforced: false
|
||||
}),
|
||||
|
@ -3,9 +3,11 @@ import { ForbiddenError, MongoAbility, PureAbility, subject } from "@casl/abilit
|
||||
import { z } from "zod";
|
||||
|
||||
import { TOrganizations } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
import { OrgPermissionSet } from "./org-permission";
|
||||
import {
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSet,
|
||||
@ -145,4 +147,57 @@ const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) =
|
||||
return new Proxy(obj, handler);
|
||||
};
|
||||
|
||||
export { escapeHandlebarsMissingDict, isAuthMethodSaml, validateOrgSSO };
|
||||
// This function serves as a transition layer between the old and new privilege management system
|
||||
// the old privilege management system is based on the actor having more privileges than the managed permission
|
||||
// the new privilege management system is based on the actor having the appropriate permission to perform the privilege change,
|
||||
// regardless of the actor's privilege level.
|
||||
const validatePrivilegeChangeOperation = (
|
||||
shouldUseNewPrivilegeSystem: boolean,
|
||||
opAction: OrgPermissionSet[0] | ProjectPermissionSet[0],
|
||||
opSubject: OrgPermissionSet[1] | ProjectPermissionSet[1],
|
||||
actorPermission: MongoAbility,
|
||||
managedPermission: MongoAbility
|
||||
) => {
|
||||
if (shouldUseNewPrivilegeSystem) {
|
||||
if (actorPermission.can(opAction, opSubject)) {
|
||||
return {
|
||||
isValid: true,
|
||||
missingPermissions: []
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: false,
|
||||
missingPermissions: [
|
||||
{
|
||||
action: opAction,
|
||||
subject: opSubject
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
// if not, we check if the actor is indeed more privileged than the managed permission - this is the old system
|
||||
return validatePermissionBoundary(actorPermission, managedPermission);
|
||||
};
|
||||
|
||||
const constructPermissionErrorMessage = (
|
||||
baseMessage: string,
|
||||
shouldUseNewPrivilegeSystem: boolean,
|
||||
opAction: OrgPermissionSet[0] | ProjectPermissionSet[0],
|
||||
opSubject: OrgPermissionSet[1] | ProjectPermissionSet[1]
|
||||
) => {
|
||||
return `${baseMessage}${
|
||||
shouldUseNewPrivilegeSystem
|
||||
? `. Actor is missing permission ${opAction as string} on ${opSubject as string}`
|
||||
: ". Actor privilege level is not high enough to perform this action"
|
||||
}`;
|
||||
};
|
||||
|
||||
export {
|
||||
constructPermissionErrorMessage,
|
||||
escapeHandlebarsMissingDict,
|
||||
isAuthMethodSaml,
|
||||
validateOrgSSO,
|
||||
validatePrivilegeChangeOperation
|
||||
};
|
||||
|
@ -244,22 +244,20 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(
|
||||
objectify(
|
||||
const unescapedMetadata = objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
),
|
||||
"identity.metadata"
|
||||
);
|
||||
const templateValue = {
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
|
||||
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata });
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
id: userProjectPermission.userId,
|
||||
username: userProjectPermission.username,
|
||||
metadata: metadataKeyValuePair
|
||||
};
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: templateValue
|
||||
}
|
||||
},
|
||||
{ data: false }
|
||||
);
|
||||
@ -331,15 +329,16 @@ export const permissionServiceFactory = ({
|
||||
? escapeHandlebarsMissingDict(unescapedIdentityAuthInfo as never, "identity.auth")
|
||||
: {};
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
|
||||
const templateValue = {
|
||||
|
||||
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata, auth: unescapedIdentityAuthInfo });
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
id: identityProjectPermission.identityId,
|
||||
username: identityProjectPermission.username,
|
||||
metadata: metadataKeyValuePair,
|
||||
auth: identityAuthInfo
|
||||
};
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: templateValue
|
||||
}
|
||||
},
|
||||
{ data: false }
|
||||
);
|
||||
@ -398,14 +397,18 @@ export const permissionServiceFactory = ({
|
||||
const scopes = ServiceTokenScopes.parse(serviceToken.scopes || []);
|
||||
return {
|
||||
permission: buildServiceTokenProjectPermission(scopes, serviceToken.permissions),
|
||||
membership: undefined
|
||||
membership: {
|
||||
shouldUseNewPrivilegeSystem: true
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
type TProjectPermissionRT<T extends ActorType> = T extends ActorType.SERVICE
|
||||
? {
|
||||
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
|
||||
membership: undefined;
|
||||
membership: {
|
||||
shouldUseNewPrivilegeSystem: boolean;
|
||||
};
|
||||
hasRole: (arg: string) => boolean;
|
||||
} // service token doesn't have both membership and roles
|
||||
: {
|
||||
@ -414,6 +417,7 @@ export const permissionServiceFactory = ({
|
||||
orgAuthEnforced: boolean | null | undefined;
|
||||
orgId: string;
|
||||
roles: Array<{ role: string }>;
|
||||
shouldUseNewPrivilegeSystem: boolean;
|
||||
};
|
||||
hasRole: (role: string) => boolean;
|
||||
};
|
||||
@ -440,14 +444,13 @@ export const permissionServiceFactory = ({
|
||||
),
|
||||
"identity.metadata"
|
||||
);
|
||||
const templateValue = {
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
id: userProjectPermission.userId,
|
||||
username: userProjectPermission.username,
|
||||
metadata: metadataKeyValuePair
|
||||
};
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: templateValue
|
||||
}
|
||||
},
|
||||
{ data: false }
|
||||
);
|
||||
@ -487,14 +490,13 @@ export const permissionServiceFactory = ({
|
||||
),
|
||||
"identity.metadata"
|
||||
);
|
||||
const templateValue = {
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
id: identityProjectPermission.identityId,
|
||||
username: identityProjectPermission.username,
|
||||
metadata: metadataKeyValuePair
|
||||
};
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: templateValue
|
||||
}
|
||||
},
|
||||
{ data: false }
|
||||
);
|
||||
|
@ -43,6 +43,30 @@ export enum ProjectPermissionDynamicSecretActions {
|
||||
Lease = "lease"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionIdentityActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete",
|
||||
GrantPrivileges = "grant-privileges"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionMemberActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete",
|
||||
GrantPrivileges = "grant-privileges"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionGroupActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete",
|
||||
GrantPrivileges = "grant-privileges"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretSyncActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
@ -150,8 +174,8 @@ export type ProjectPermissionSet =
|
||||
]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Role]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Tags]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Member]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Groups]
|
||||
| [ProjectPermissionMemberActions, ProjectPermissionSub.Member]
|
||||
| [ProjectPermissionGroupActions, ProjectPermissionSub.Groups]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
|
||||
@ -162,7 +186,7 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionIdentityActions,
|
||||
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
|
||||
]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
|
||||
@ -290,13 +314,13 @@ const GeneralPermissionSchema = [
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Member).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionMemberActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Groups).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionGroupActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
@ -510,7 +534,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionIdentityActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: IdentityManagementConditionSchema.describe(
|
||||
@ -531,12 +555,9 @@ const buildAdminPermissionRules = () => {
|
||||
ProjectPermissionSub.SecretImports,
|
||||
ProjectPermissionSub.SecretApproval,
|
||||
ProjectPermissionSub.SecretRotation,
|
||||
ProjectPermissionSub.Member,
|
||||
ProjectPermissionSub.Groups,
|
||||
ProjectPermissionSub.Role,
|
||||
ProjectPermissionSub.Integrations,
|
||||
ProjectPermissionSub.Webhooks,
|
||||
ProjectPermissionSub.Identity,
|
||||
ProjectPermissionSub.ServiceTokens,
|
||||
ProjectPermissionSub.Settings,
|
||||
ProjectPermissionSub.Environments,
|
||||
@ -563,6 +584,39 @@ const buildAdminPermissionRules = () => {
|
||||
);
|
||||
});
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionMemberActions.Create,
|
||||
ProjectPermissionMemberActions.Edit,
|
||||
ProjectPermissionMemberActions.Delete,
|
||||
ProjectPermissionMemberActions.Read,
|
||||
ProjectPermissionMemberActions.GrantPrivileges
|
||||
],
|
||||
ProjectPermissionSub.Member
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionGroupActions.Create,
|
||||
ProjectPermissionGroupActions.Edit,
|
||||
ProjectPermissionGroupActions.Delete,
|
||||
ProjectPermissionGroupActions.Read,
|
||||
ProjectPermissionGroupActions.GrantPrivileges
|
||||
],
|
||||
ProjectPermissionSub.Groups
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionIdentityActions.Create,
|
||||
ProjectPermissionIdentityActions.Edit,
|
||||
ProjectPermissionIdentityActions.Delete,
|
||||
ProjectPermissionIdentityActions.Read,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges
|
||||
],
|
||||
ProjectPermissionSub.Identity
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
@ -677,9 +731,9 @@ const buildMemberPermissionRules = () => {
|
||||
|
||||
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
|
||||
|
||||
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.Member);
|
||||
can([ProjectPermissionMemberActions.Read, ProjectPermissionMemberActions.Create], ProjectPermissionSub.Member);
|
||||
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.Groups);
|
||||
can([ProjectPermissionGroupActions.Read], ProjectPermissionSub.Groups);
|
||||
|
||||
can(
|
||||
[
|
||||
@ -703,10 +757,10 @@ const buildMemberPermissionRules = () => {
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
ProjectPermissionIdentityActions.Read,
|
||||
ProjectPermissionIdentityActions.Edit,
|
||||
ProjectPermissionIdentityActions.Create,
|
||||
ProjectPermissionIdentityActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.Identity
|
||||
);
|
||||
@ -820,12 +874,12 @@ const buildViewerPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
|
||||
can(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
|
||||
can(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
can(ProjectPermissionIdentityActions.Read, ProjectPermissionSub.Identity);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);
|
||||
|
@ -2,15 +2,20 @@ import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
|
||||
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionMemberActions,
|
||||
ProjectPermissionSet,
|
||||
ProjectPermissionSub
|
||||
} from "../permission/project-permission";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "./project-user-additional-privilege-dal";
|
||||
import {
|
||||
ProjectUserAdditionalPrivilegeTemporaryMode,
|
||||
@ -63,8 +68,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
|
||||
const { permission: targetUserPermission, membership } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.USER,
|
||||
actorId: projectMembership.userId,
|
||||
projectId: projectMembership.projectId,
|
||||
@ -76,11 +81,21 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionMemberActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Member,
|
||||
permission,
|
||||
targetUserPermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged user",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to update more privileged user",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionMemberActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Member
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
@ -92,6 +107,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
if (existingSlug)
|
||||
throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` });
|
||||
|
||||
validateHandlebarTemplate("User Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
|
||||
@ -146,7 +165,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
message: `Project membership for user with ID '${userPrivilege.userId}' not found in project with ID '${userPrivilege.projectId}'`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
const { permission, membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: projectMembership.projectId,
|
||||
@ -154,7 +173,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
|
||||
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.USER,
|
||||
actorId: projectMembership.userId,
|
||||
@ -167,11 +186,21 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionMemberActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Member,
|
||||
permission,
|
||||
targetUserPermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to update more privileged user",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionMemberActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Member
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
@ -185,6 +214,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` });
|
||||
}
|
||||
|
||||
validateHandlebarTemplate("User Additional Privilege Update", JSON.stringify(dto.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary;
|
||||
|
||||
const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions));
|
||||
@ -244,7 +277,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
|
||||
|
||||
const deletedPrivilege = await projectUserAdditionalPrivilegeDAL.deleteById(userPrivilege.id);
|
||||
return {
|
||||
@ -281,7 +314,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
|
||||
|
||||
return {
|
||||
...userPrivilege,
|
||||
@ -308,7 +341,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
|
||||
|
||||
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find(
|
||||
{
|
||||
|
@ -29,15 +29,9 @@ export const parseScimFilter = (filterToParse: string | undefined) => {
|
||||
attributeName = "name";
|
||||
}
|
||||
|
||||
return { [attributeName]: parsedValue.replace(/"/g, "") };
|
||||
return { [attributeName]: parsedValue.replaceAll('"', "") };
|
||||
};
|
||||
|
||||
export function extractScimValueFromPath(path: string): string | null {
|
||||
const regex = /members\[value eq "([^"]+)"\]/;
|
||||
const match = path.match(regex);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
export const buildScimUser = ({
|
||||
orgMembershipId,
|
||||
username,
|
||||
|
@ -62,7 +62,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
projectId,
|
||||
secretPath,
|
||||
environment,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TCreateSapDTO) => {
|
||||
const groupApprovers = approvers
|
||||
?.filter((approver) => approver.type === ApproverType.Group)
|
||||
@ -113,7 +114,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -172,7 +174,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
secretPolicyId,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TUpdateSapDTO) => {
|
||||
const groupApprovers = approvers
|
||||
?.filter((approver) => approver.type === ApproverType.Group)
|
||||
@ -218,7 +221,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -10,6 +10,7 @@ export type TCreateSapDTO = {
|
||||
projectId: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateSapDTO = {
|
||||
@ -19,6 +20,7 @@ export type TUpdateSapDTO = {
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
allowedSelfApprovals?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteSapDTO = {
|
||||
|
@ -112,6 +112,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
@ -150,7 +151,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
envId: el.policyEnvId,
|
||||
deletedAt: el.policyDeletedAt
|
||||
deletedAt: el.policyDeletedAt,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
@ -336,6 +338,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
),
|
||||
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
|
||||
db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"),
|
||||
@ -364,7 +367,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
name: el.policyName,
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
},
|
||||
committerUser: {
|
||||
userId: el.committerUserId,
|
||||
@ -482,6 +486,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
|
||||
),
|
||||
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
|
||||
@ -511,7 +516,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
name: el.policyName,
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
},
|
||||
committerUser: {
|
||||
userId: el.committerUserId,
|
||||
|
@ -352,6 +352,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
message: "The policy associated with this secret approval request has been deleted."
|
||||
});
|
||||
}
|
||||
if (!policy.allowedSelfApprovals && actorId === secretApprovalRequest.committerUserId) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to review secret approval request. Users are not authorized to review their own request."
|
||||
});
|
||||
}
|
||||
|
||||
const { hasRole } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.USER,
|
||||
|
@ -8,23 +8,49 @@ import axios from "axios";
|
||||
import jmespath from "jmespath";
|
||||
import knex from "knex";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
|
||||
import { TAssignOp, TDbProviderClients, TDirectAssignOp, THttpProviderFunction } from "../templates/types";
|
||||
import { TSecretRotationData, TSecretRotationDbFn } from "./secret-rotation-queue-types";
|
||||
|
||||
const REGEX = /\${([^}]+)}/g;
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
const replaceTemplateVariables = (str: string, getValue: (key: string) => unknown) => {
|
||||
// Use array to collect pieces and join at the end (more efficient for large strings)
|
||||
const parts: string[] = [];
|
||||
let pos = 0;
|
||||
|
||||
while (pos < str.length) {
|
||||
const start = str.indexOf("${", pos);
|
||||
if (start === -1) {
|
||||
parts.push(str.slice(pos));
|
||||
break;
|
||||
}
|
||||
|
||||
parts.push(str.slice(pos, start));
|
||||
const end = str.indexOf("}", start + 2);
|
||||
|
||||
if (end === -1) {
|
||||
parts.push(str.slice(start));
|
||||
break;
|
||||
}
|
||||
|
||||
const varName = str.slice(start + 2, end);
|
||||
parts.push(String(getValue(varName)));
|
||||
pos = end + 1;
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
};
|
||||
|
||||
export const interpolate = (data: any, getValue: (key: string) => unknown) => {
|
||||
if (!data) return;
|
||||
|
||||
if (typeof data === "number") return data;
|
||||
|
||||
if (typeof data === "string") {
|
||||
return data.replace(REGEX, (_a, b) => getValue(b) as string);
|
||||
return replaceTemplateVariables(data, getValue);
|
||||
}
|
||||
|
||||
if (typeof data === "object" && Array.isArray(data)) {
|
||||
@ -88,32 +114,14 @@ export const secretRotationDbFn = async ({
|
||||
variables,
|
||||
options
|
||||
}: TSecretRotationDbFn) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const ssl = ca ? { rejectUnauthorized: false, ca } : undefined;
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
if (
|
||||
isCloud &&
|
||||
// internal ips
|
||||
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new Error("Invalid db host");
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
// database infisical uses
|
||||
dbHost === host
|
||||
)
|
||||
throw new Error("Invalid db host");
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(host);
|
||||
const db = knex({
|
||||
client,
|
||||
connection: {
|
||||
database,
|
||||
port,
|
||||
host,
|
||||
host: hostIp,
|
||||
user: username,
|
||||
password,
|
||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||
|
@ -8,7 +8,18 @@ type GetFullFolderPath = {
|
||||
|
||||
export const getFullFolderPath = async ({ folderDAL, folderId, envId }: GetFullFolderPath): Promise<string> => {
|
||||
// Helper function to remove duplicate slashes
|
||||
const removeDuplicateSlashes = (path: string) => path.replace(/\/{2,}/g, "/");
|
||||
const removeDuplicateSlashes = (path: string) => {
|
||||
const chars = [];
|
||||
let lastWasSlash = false;
|
||||
|
||||
for (let i = 0; i < path.length; i += 1) {
|
||||
const char = path[i];
|
||||
if (char !== "/" || !lastWasSlash) chars.push(char);
|
||||
lastWasSlash = char === "/";
|
||||
}
|
||||
|
||||
return chars.join("");
|
||||
};
|
||||
|
||||
// Fetch all folders at once based on environment ID to avoid multiple queries
|
||||
const folders = await folderDAL.find({ envId });
|
||||
|
@ -1,14 +1,34 @@
|
||||
import { isIP } from "net";
|
||||
|
||||
import { isFQDN } from "@app/lib/validator/validate-url";
|
||||
|
||||
// Validates usernames or wildcard (*)
|
||||
export const isValidUserPattern = (value: string): boolean => {
|
||||
// Matches valid Linux usernames or a wildcard (*)
|
||||
const userRegex = /^(?:\*|[a-z_][a-z0-9_-]{0,31})$/;
|
||||
// Length check before regex to prevent ReDoS
|
||||
if (typeof value !== "string") return false;
|
||||
if (value.length > 32) return false; // Maximum Linux username length
|
||||
if (value === "*") return true; // Handle wildcard separately
|
||||
|
||||
// Simpler, more specific pattern for usernames
|
||||
const userRegex = /^[a-z_][a-z0-9_-]*$/i;
|
||||
return userRegex.test(value);
|
||||
};
|
||||
|
||||
// Validates hostnames, wildcard domains, or IP addresses
|
||||
export const isValidHostPattern = (value: string): boolean => {
|
||||
// Matches FQDNs, wildcard domains (*.example.com), IPv4, and IPv6 addresses
|
||||
const hostRegex =
|
||||
/^(?:\*|\*\.[a-z0-9-]+(?:\.[a-z0-9-]+)*|[a-z0-9-]+(?:\.[a-z0-9-]+)*|\d{1,3}(\.\d{1,3}){3}|([a-fA-F0-9:]+:+)+[a-fA-F0-9]+(?:%[a-zA-Z0-9]+)?)$/;
|
||||
return hostRegex.test(value);
|
||||
// Input validation
|
||||
if (typeof value !== "string") return false;
|
||||
|
||||
// Length check
|
||||
if (value.length > 255) return false;
|
||||
|
||||
// Handle the wildcard case separately
|
||||
if (value === "*") return true;
|
||||
|
||||
// Check for IP addresses using Node.js built-in functions
|
||||
if (isIP(value)) return true;
|
||||
|
||||
return isFQDN(value, {
|
||||
allow_wildcard: true
|
||||
});
|
||||
};
|
||||
|
@ -8,6 +8,7 @@ import { promisify } from "util";
|
||||
import { TSshCertificateTemplates } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
|
||||
import {
|
||||
@ -18,6 +19,7 @@ import { SshCertType, TCreateSshCertDTO } from "./ssh-certificate-authority-type
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
const EXEC_TIMEOUT_MS = 10000; // 10 seconds
|
||||
/* eslint-disable no-bitwise */
|
||||
export const createSshCertSerialNumber = () => {
|
||||
const randomBytes = crypto.randomBytes(8); // 8 bytes = 64 bits
|
||||
@ -64,7 +66,9 @@ export const createSshKeyPair = async (keyAlgorithm: CertKeyAlgorithm) => {
|
||||
// Generate the SSH key pair
|
||||
// The "-N ''" sets an empty passphrase
|
||||
// The keys are created in the temporary directory
|
||||
await execFileAsync("ssh-keygen", ["-t", keyType, "-b", keyBits, "-f", privateKeyFile, "-N", ""]);
|
||||
await execFileAsync("ssh-keygen", ["-t", keyType, "-b", keyBits, "-f", privateKeyFile, "-N", ""], {
|
||||
timeout: EXEC_TIMEOUT_MS
|
||||
});
|
||||
|
||||
// Read the generated keys
|
||||
const publicKey = await fs.readFile(publicKeyFile, "utf8");
|
||||
@ -87,7 +91,10 @@ export const getSshPublicKey = async (privateKey: string) => {
|
||||
await fs.writeFile(privateKeyFile, privateKey, { mode: 0o600 });
|
||||
|
||||
// Run ssh-keygen to extract the public key
|
||||
const { stdout } = await execFileAsync("ssh-keygen", ["-y", "-f", privateKeyFile], { encoding: "utf8" });
|
||||
const { stdout } = await execFileAsync("ssh-keygen", ["-y", "-f", privateKeyFile], {
|
||||
encoding: "utf8",
|
||||
timeout: EXEC_TIMEOUT_MS
|
||||
});
|
||||
return stdout.trim();
|
||||
} finally {
|
||||
// Ensure that files and the temporary directory are cleaned up
|
||||
@ -143,7 +150,14 @@ export const validateSshCertificatePrincipals = (
|
||||
}
|
||||
|
||||
// restrict allowed characters to letters, digits, dot, underscore, and hyphen
|
||||
if (!/^[A-Za-z0-9._-]+$/.test(sanitized)) {
|
||||
if (
|
||||
!characterValidator([
|
||||
CharacterType.AlphaNumeric,
|
||||
CharacterType.Period,
|
||||
CharacterType.Underscore,
|
||||
CharacterType.Hyphen
|
||||
])(sanitized)
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `Principal '${sanitized}' contains invalid characters. Allowed: alphanumeric, '.', '_', '-'.`
|
||||
});
|
||||
@ -266,8 +280,8 @@ export const validateSshCertificateTtl = (template: TSshCertificateTemplates, tt
|
||||
* that it only contains alphanumeric characters with no spaces.
|
||||
*/
|
||||
export const validateSshCertificateKeyId = (keyId: string) => {
|
||||
const regex = /^[A-Za-z0-9-]+$/;
|
||||
if (!regex.test(keyId)) {
|
||||
const regex = characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen]);
|
||||
if (!regex(keyId)) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to validate Key ID because it can only contain alphanumeric characters and hyphens, with no spaces."
|
||||
@ -298,7 +312,7 @@ const validateSshPublicKey = async (publicKey: string) => {
|
||||
|
||||
try {
|
||||
await fs.writeFile(pubKeyFile, publicKey, { mode: 0o600 });
|
||||
await execFileAsync("ssh-keygen", ["-l", "-f", pubKeyFile]);
|
||||
await execFileAsync("ssh-keygen", ["-l", "-f", pubKeyFile], { timeout: EXEC_TIMEOUT_MS });
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SSH public key format: could not be parsed."
|
||||
@ -363,7 +377,7 @@ export const createSshCert = async ({
|
||||
await fs.writeFile(privateKeyFile, caPrivateKey, { mode: 0o600 });
|
||||
|
||||
// Execute the signing process
|
||||
await execFileAsync("ssh-keygen", sshKeygenArgs, { encoding: "utf8" });
|
||||
await execFileAsync("ssh-keygen", sshKeygenArgs, { encoding: "utf8", timeout: EXEC_TIMEOUT_MS });
|
||||
|
||||
// Read the signed public key from the generated cert file
|
||||
const signedPublicKey = await fs.readFile(signedPublicKeyFile, "utf8");
|
||||
|
@ -244,7 +244,7 @@ export const KUBERNETES_AUTH = {
|
||||
kubernetesHost: "The host string, host:port pair, or URL to the base of the Kubernetes API server.",
|
||||
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
|
||||
tokenReviewerJwt:
|
||||
"The long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
|
||||
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
|
||||
allowedNamespaces:
|
||||
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
|
||||
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
|
||||
@ -260,7 +260,7 @@ export const KUBERNETES_AUTH = {
|
||||
kubernetesHost: "The new host string, host:port pair, or URL to the base of the Kubernetes API server.",
|
||||
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
|
||||
tokenReviewerJwt:
|
||||
"The new long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
|
||||
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
|
||||
allowedNamespaces:
|
||||
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
|
||||
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
|
||||
@ -631,7 +631,8 @@ export const FOLDERS = {
|
||||
workspaceId: "The ID of the project to list folders from.",
|
||||
environment: "The slug of the environment to list folders from.",
|
||||
path: "The path to list folders from.",
|
||||
directory: "The directory to list folders from. (Deprecated in favor of path)"
|
||||
directory: "The directory to list folders from. (Deprecated in favor of path)",
|
||||
recursive: "Whether or not to fetch all folders from the specified base path, and all of its subdirectories."
|
||||
},
|
||||
GET_BY_ID: {
|
||||
folderId: "The ID of the folder to get details."
|
||||
@ -815,7 +816,8 @@ export const DASHBOARD = {
|
||||
search: "The text string to filter secret keys and folder names by.",
|
||||
includeSecrets: "Whether to include project secrets in the response.",
|
||||
includeFolders: "Whether to include project folders in the response.",
|
||||
includeDynamicSecrets: "Whether to include dynamic project secrets in the response."
|
||||
includeDynamicSecrets: "Whether to include dynamic project secrets in the response.",
|
||||
includeImports: "Whether to include project secret imports in the response."
|
||||
},
|
||||
SECRET_DETAILS_LIST: {
|
||||
projectId: "The ID of the project to list secrets/folders from.",
|
||||
|
@ -28,8 +28,8 @@ export const createDigestAuthRequestInterceptor = (
|
||||
nc += 1;
|
||||
const nonceCount = nc.toString(16).padStart(8, "0");
|
||||
const cnonce = crypto.randomBytes(24).toString("hex");
|
||||
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1].replace(/"/g, "");
|
||||
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1].replace(/"/g, "");
|
||||
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1]?.replaceAll('"', "") || "";
|
||||
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1]?.replaceAll('"', "") || "";
|
||||
const ha1 = crypto.createHash("md5").update(`${username}:${realm}:${password}`).digest("hex");
|
||||
const path = opts.url;
|
||||
|
||||
|
@ -1,26 +1,35 @@
|
||||
// Credit: https://github.com/miguelmota/is-base64
|
||||
export const isBase64 = (
|
||||
v: string,
|
||||
opts = { allowEmpty: false, mimeRequired: false, allowMime: true, paddingRequired: false }
|
||||
) => {
|
||||
if (opts.allowEmpty === false && v === "") {
|
||||
return false;
|
||||
type Base64Options = {
|
||||
urlSafe?: boolean;
|
||||
padding?: boolean;
|
||||
};
|
||||
|
||||
const base64WithPadding = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{4})$/;
|
||||
const base64WithoutPadding = /^[A-Za-z0-9+/]+$/;
|
||||
const base64UrlWithPadding = /^(?:[A-Za-z0-9_-]{4})*(?:[A-Za-z0-9_-]{2}==|[A-Za-z0-9_-]{3}=|[A-Za-z0-9_-]{4})$/;
|
||||
const base64UrlWithoutPadding = /^[A-Za-z0-9_-]+$/;
|
||||
|
||||
export const isBase64 = (str: string, options: Base64Options = {}): boolean => {
|
||||
if (typeof str !== "string") {
|
||||
throw new TypeError("Expected a string");
|
||||
}
|
||||
|
||||
let regex = "(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+/]{3}=)?";
|
||||
const mimeRegex = "(data:\\w+\\/[a-zA-Z\\+\\-\\.]+;base64,)";
|
||||
// Default padding to true unless urlSafe is true
|
||||
const opts: Base64Options = {
|
||||
urlSafe: false,
|
||||
padding: options.urlSafe === undefined ? true : !options.urlSafe,
|
||||
...options
|
||||
};
|
||||
|
||||
if (opts.mimeRequired === true) {
|
||||
regex = mimeRegex + regex;
|
||||
} else if (opts.allowMime === true) {
|
||||
regex = `${mimeRegex}?${regex}`;
|
||||
if (str === "") return true;
|
||||
|
||||
let regex;
|
||||
if (opts.urlSafe) {
|
||||
regex = opts.padding ? base64UrlWithPadding : base64UrlWithoutPadding;
|
||||
} else {
|
||||
regex = opts.padding ? base64WithPadding : base64WithoutPadding;
|
||||
}
|
||||
|
||||
if (opts.paddingRequired === false) {
|
||||
regex = "(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}(==)?|[A-Za-z0-9+\\/]{3}=?)?";
|
||||
}
|
||||
|
||||
return new RegExp(`^${regex}$`, "gi").test(v);
|
||||
return (!opts.padding || str.length % 4 === 0) && regex.test(str);
|
||||
};
|
||||
|
||||
export const getBase64SizeInBytes = (base64String: string) => {
|
||||
|
42
backend/src/lib/certificates/extract-certificate.test.ts
Normal file
42
backend/src/lib/certificates/extract-certificate.test.ts
Normal file
@ -0,0 +1,42 @@
|
||||
import { extractX509CertFromChain } from "./extract-certificate";
|
||||
|
||||
describe("Extract Certificate Payload", () => {
|
||||
test("Single chain", () => {
|
||||
const payload = `-----BEGIN CERTIFICATE-----
|
||||
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
|
||||
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
|
||||
-----END CERTIFICATE-----`;
|
||||
const result = extractX509CertFromChain(payload);
|
||||
expect(result).toBeDefined();
|
||||
expect(result?.length).toBe(1);
|
||||
expect(result?.[0]).toEqual(payload);
|
||||
});
|
||||
|
||||
test("Multiple chain", () => {
|
||||
const payload = `-----BEGIN CERTIFICATE-----
|
||||
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
|
||||
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
|
||||
-----END CERTIFICATE-----`;
|
||||
const result = extractX509CertFromChain(payload);
|
||||
expect(result).toBeDefined();
|
||||
expect(result?.length).toBe(3);
|
||||
expect(result).toEqual([
|
||||
`-----BEGIN CERTIFICATE-----
|
||||
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
|
||||
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
|
||||
-----END CERTIFICATE-----`,
|
||||
`-----BEGIN CERTIFICATE-----
|
||||
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
|
||||
-----END CERTIFICATE-----`,
|
||||
`-----BEGIN CERTIFICATE-----
|
||||
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
|
||||
-----END CERTIFICATE-----`
|
||||
]);
|
||||
});
|
||||
});
|
51
backend/src/lib/certificates/extract-certificate.ts
Normal file
51
backend/src/lib/certificates/extract-certificate.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import { BadRequestError } from "../errors";
|
||||
|
||||
export const extractX509CertFromChain = (certificateChain: string): string[] => {
|
||||
if (!certificateChain) {
|
||||
throw new BadRequestError({
|
||||
message: "Certificate chain is empty or undefined"
|
||||
});
|
||||
}
|
||||
|
||||
const certificates: string[] = [];
|
||||
let currentPosition = 0;
|
||||
const chainLength = certificateChain.length;
|
||||
|
||||
while (currentPosition < chainLength) {
|
||||
// Find the start of a certificate
|
||||
const beginMarker = "-----BEGIN CERTIFICATE-----";
|
||||
const startIndex = certificateChain.indexOf(beginMarker, currentPosition);
|
||||
|
||||
if (startIndex === -1) {
|
||||
break; // No more certificates found
|
||||
}
|
||||
|
||||
// Find the end of the certificate
|
||||
const endMarker = "-----END CERTIFICATE-----";
|
||||
const endIndex = certificateChain.indexOf(endMarker, startIndex);
|
||||
|
||||
if (endIndex === -1) {
|
||||
throw new BadRequestError({
|
||||
message: "Malformed certificate chain: Found BEGIN marker without matching END marker"
|
||||
});
|
||||
}
|
||||
|
||||
// Extract the complete certificate including markers
|
||||
const completeEndIndex = endIndex + endMarker.length;
|
||||
const certificate = certificateChain.substring(startIndex, completeEndIndex);
|
||||
|
||||
// Add the extracted certificate to our results
|
||||
certificates.push(certificate);
|
||||
|
||||
// Move position to after this certificate
|
||||
currentPosition = completeEndIndex;
|
||||
}
|
||||
|
||||
if (certificates.length === 0) {
|
||||
throw new BadRequestError({
|
||||
message: "No valid certificates found in the chain"
|
||||
});
|
||||
}
|
||||
|
||||
return certificates;
|
||||
};
|
@ -56,6 +56,7 @@ const envSchema = z
|
||||
// TODO(akhilmhdh): will be changed to one
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
|
||||
HTTPS_ENABLED: zodStrBool,
|
||||
// smtp options
|
||||
SMTP_HOST: zpStr(z.string().optional()),
|
||||
|
@ -68,6 +68,23 @@ export class ForbiddenRequestError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
export class PermissionBoundaryError extends ForbiddenRequestError {
|
||||
constructor({
|
||||
message,
|
||||
name,
|
||||
error,
|
||||
details
|
||||
}: {
|
||||
message?: string;
|
||||
name?: string;
|
||||
error?: unknown;
|
||||
details?: unknown;
|
||||
}) {
|
||||
super({ message, name, error, details });
|
||||
this.name = "PermissionBoundaryError";
|
||||
}
|
||||
}
|
||||
|
||||
export class BadRequestError extends Error {
|
||||
name: string;
|
||||
|
||||
|
@ -93,6 +93,7 @@ export const pingGatewayAndVerify = async ({
|
||||
let lastError: Error | null = null;
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
message: (err as Error)?.message,
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
|
@ -107,12 +107,6 @@ export const isValidIp = (ip: string) => {
|
||||
return net.isIPv4(ip) || net.isIPv6(ip);
|
||||
};
|
||||
|
||||
export const isValidHostname = (name: string) => {
|
||||
const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
|
||||
|
||||
return hostnameRegex.test(name);
|
||||
};
|
||||
|
||||
export type TIp = {
|
||||
ipAddress: string;
|
||||
type: IPType;
|
||||
|
61
backend/src/lib/ip/ipRange.ts
Normal file
61
backend/src/lib/ip/ipRange.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { BlockList } from "node:net";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
// Define BlockList instances for each range type
|
||||
const ipv4RangeLists: Record<string, BlockList> = {
|
||||
unspecified: new BlockList(),
|
||||
broadcast: new BlockList(),
|
||||
multicast: new BlockList(),
|
||||
linkLocal: new BlockList(),
|
||||
loopback: new BlockList(),
|
||||
carrierGradeNat: new BlockList(),
|
||||
private: new BlockList(),
|
||||
reserved: new BlockList()
|
||||
};
|
||||
|
||||
// Add IPv4 CIDR ranges to each BlockList
|
||||
ipv4RangeLists.unspecified.addSubnet("0.0.0.0", 8);
|
||||
ipv4RangeLists.broadcast.addAddress("255.255.255.255");
|
||||
ipv4RangeLists.multicast.addSubnet("224.0.0.0", 4);
|
||||
ipv4RangeLists.linkLocal.addSubnet("169.254.0.0", 16);
|
||||
ipv4RangeLists.loopback.addSubnet("127.0.0.0", 8);
|
||||
ipv4RangeLists.carrierGradeNat.addSubnet("100.64.0.0", 10);
|
||||
|
||||
// IPv4 Private ranges
|
||||
ipv4RangeLists.private.addSubnet("10.0.0.0", 8);
|
||||
ipv4RangeLists.private.addSubnet("172.16.0.0", 12);
|
||||
ipv4RangeLists.private.addSubnet("192.168.0.0", 16);
|
||||
|
||||
// IPv4 Reserved ranges
|
||||
ipv4RangeLists.reserved.addSubnet("192.0.0.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("192.0.2.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("192.88.99.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("198.18.0.0", 15);
|
||||
ipv4RangeLists.reserved.addSubnet("198.51.100.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("203.0.113.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("240.0.0.0", 4);
|
||||
|
||||
/**
|
||||
* Checks if an IP address (IPv4) is private or public
|
||||
* inspired by: https://github.com/whitequark/ipaddr.js/blob/main/lib/ipaddr.js
|
||||
*/
|
||||
export const getIpRange = (ip: string): string => {
|
||||
try {
|
||||
const rangeLists = ipv4RangeLists;
|
||||
// Check each range type
|
||||
for (const rangeName in rangeLists) {
|
||||
if (Object.hasOwn(rangeLists, rangeName)) {
|
||||
if (rangeLists[rangeName].check(ip)) {
|
||||
return rangeName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no range matched, it's a public address
|
||||
return "unicast";
|
||||
} catch (error) {
|
||||
throw new BadRequestError({ message: "Invalid IP address", error });
|
||||
}
|
||||
};
|
||||
|
||||
export const isPrivateIp = (ip: string) => getIpRange(ip) !== "unicast";
|
21
backend/src/lib/template/validate-handlebars.ts
Normal file
21
backend/src/lib/template/validate-handlebars.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import handlebars from "handlebars";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { logger } from "../logger";
|
||||
|
||||
type SanitizationArg = {
|
||||
allowedExpressions?: (arg: string) => boolean;
|
||||
};
|
||||
|
||||
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
|
||||
const parsedAst = handlebars.parse(template);
|
||||
parsedAst.body.forEach((el) => {
|
||||
if (el.type === "ContentStatement") return;
|
||||
if (el.type === "MustacheStatement" && "path" in el) {
|
||||
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
|
||||
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
|
||||
}
|
||||
logger.error(el, "Template sanitization failed");
|
||||
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
|
||||
});
|
||||
};
|
@ -1,5 +1,11 @@
|
||||
import { CharacterType, characterValidator } from "./validate-string";
|
||||
|
||||
// regex to allow only alphanumeric, dash, underscore
|
||||
export const isValidFolderName = (name: string) => /^[a-zA-Z0-9-_]+$/.test(name);
|
||||
export const isValidFolderName = characterValidator([
|
||||
CharacterType.AlphaNumeric,
|
||||
CharacterType.Hyphen,
|
||||
CharacterType.Underscore
|
||||
]);
|
||||
|
||||
export const isValidSecretPath = (path: string) =>
|
||||
path
|
||||
|
23
backend/src/lib/validator/validate-string.test.ts
Normal file
23
backend/src/lib/validator/validate-string.test.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import { CharacterType, characterValidator } from "./validate-string";
|
||||
|
||||
describe("validate-string", () => {
|
||||
test("Check alphabets", () => {
|
||||
expect(characterValidator([CharacterType.Alphabets])("hello")).toBeTruthy();
|
||||
expect(characterValidator([CharacterType.Alphabets])("hello world")).toBeFalsy();
|
||||
expect(characterValidator([CharacterType.Alphabets, CharacterType.Spaces])("hello world")).toBeTruthy();
|
||||
});
|
||||
|
||||
test("Check numbers", () => {
|
||||
expect(characterValidator([CharacterType.Numbers])("1234567890")).toBeTruthy();
|
||||
expect(characterValidator([CharacterType.AlphaNumeric])("helloWORLD1234567890")).toBeTruthy();
|
||||
expect(characterValidator([CharacterType.AlphaNumeric])("helloWORLD1234567890-")).toBeFalsy();
|
||||
});
|
||||
|
||||
test("Check special characters", () => {
|
||||
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])("Hello-World")).toBeTruthy();
|
||||
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Plus])("Hello+World")).toBeTruthy();
|
||||
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Underscore])("Hello_World")).toBeTruthy();
|
||||
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Colon])("Hello:World")).toBeTruthy();
|
||||
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Underscore])("Hello World")).toBeFalsy();
|
||||
});
|
||||
});
|
101
backend/src/lib/validator/validate-string.ts
Normal file
101
backend/src/lib/validator/validate-string.ts
Normal file
@ -0,0 +1,101 @@
|
||||
export enum CharacterType {
|
||||
Alphabets = "alphabets",
|
||||
Numbers = "numbers",
|
||||
AlphaNumeric = "alpha-numeric",
|
||||
Spaces = "spaces",
|
||||
SpecialCharacters = "specialCharacters",
|
||||
Punctuation = "punctuation",
|
||||
Period = "period", // .
|
||||
Underscore = "underscore", // _
|
||||
Colon = "colon", // :
|
||||
ForwardSlash = "forwardSlash", // /
|
||||
Equals = "equals", // =
|
||||
Plus = "plus", // +
|
||||
Hyphen = "hyphen", // -
|
||||
At = "at", // @
|
||||
// Additional individual characters that might be useful
|
||||
Asterisk = "asterisk", // *
|
||||
Ampersand = "ampersand", // &
|
||||
Question = "question", // ?
|
||||
Hash = "hash", // #
|
||||
Percent = "percent", // %
|
||||
Dollar = "dollar", // $
|
||||
Caret = "caret", // ^
|
||||
Backtick = "backtick", // `
|
||||
Pipe = "pipe", // |
|
||||
Backslash = "backslash", // \
|
||||
OpenParen = "openParen", // (
|
||||
CloseParen = "closeParen", // )
|
||||
OpenBracket = "openBracket", // [
|
||||
CloseBracket = "closeBracket", // ]
|
||||
OpenBrace = "openBrace", // {
|
||||
CloseBrace = "closeBrace", // }
|
||||
LessThan = "lessThan", // <
|
||||
GreaterThan = "greaterThan", // >
|
||||
SingleQuote = "singleQuote", // '
|
||||
DoubleQuote = "doubleQuote", // "
|
||||
Comma = "comma", // ,
|
||||
Semicolon = "semicolon", // ;
|
||||
Exclamation = "exclamation" // !
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates if a string contains only specific types of characters
|
||||
*/
|
||||
export const characterValidator = (allowedCharacters: CharacterType[]) => {
|
||||
// Create a regex pattern based on allowed character types
|
||||
const patternMap: Record<CharacterType, string> = {
|
||||
[CharacterType.Alphabets]: "a-zA-Z",
|
||||
[CharacterType.Numbers]: "0-9",
|
||||
[CharacterType.AlphaNumeric]: "a-zA-Z0-9",
|
||||
[CharacterType.Spaces]: "\\s",
|
||||
[CharacterType.SpecialCharacters]: "!@#$%^&*()_+\\-=\\[\\]{}|;:'\",.<>/?\\\\",
|
||||
[CharacterType.Punctuation]: "\\.\\,\\;\\:\\!\\?",
|
||||
[CharacterType.Colon]: "\\:",
|
||||
[CharacterType.ForwardSlash]: "\\/",
|
||||
[CharacterType.Underscore]: "_",
|
||||
[CharacterType.Hyphen]: "\\-",
|
||||
[CharacterType.Period]: "\\.",
|
||||
[CharacterType.Equals]: "=",
|
||||
[CharacterType.Plus]: "\\+",
|
||||
[CharacterType.At]: "@",
|
||||
[CharacterType.Asterisk]: "\\*",
|
||||
[CharacterType.Ampersand]: "&",
|
||||
[CharacterType.Question]: "\\?",
|
||||
[CharacterType.Hash]: "#",
|
||||
[CharacterType.Percent]: "%",
|
||||
[CharacterType.Dollar]: "\\$",
|
||||
[CharacterType.Caret]: "\\^",
|
||||
[CharacterType.Backtick]: "`",
|
||||
[CharacterType.Pipe]: "\\|",
|
||||
[CharacterType.Backslash]: "\\\\",
|
||||
[CharacterType.OpenParen]: "\\(",
|
||||
[CharacterType.CloseParen]: "\\)",
|
||||
[CharacterType.OpenBracket]: "\\[",
|
||||
[CharacterType.CloseBracket]: "\\]",
|
||||
[CharacterType.OpenBrace]: "\\{",
|
||||
[CharacterType.CloseBrace]: "\\}",
|
||||
[CharacterType.LessThan]: "<",
|
||||
[CharacterType.GreaterThan]: ">",
|
||||
[CharacterType.SingleQuote]: "'",
|
||||
[CharacterType.DoubleQuote]: '\\"',
|
||||
[CharacterType.Comma]: ",",
|
||||
[CharacterType.Semicolon]: ";",
|
||||
[CharacterType.Exclamation]: "!"
|
||||
};
|
||||
|
||||
// Combine patterns from allowed characters
|
||||
const combinedPattern = allowedCharacters.map((char) => patternMap[char]).join("");
|
||||
|
||||
// Create a regex that matches only the allowed characters
|
||||
const regex = new RegExp(`^[${combinedPattern}]+$`);
|
||||
|
||||
/**
|
||||
* Validates if the input string contains only the allowed character types
|
||||
* @param input String to validate
|
||||
* @returns Boolean indicating if the string is valid
|
||||
*/
|
||||
return function validate(input: string): boolean {
|
||||
return regex.test(input);
|
||||
};
|
||||
};
|
15
backend/src/lib/validator/validate-url.test.ts
Normal file
15
backend/src/lib/validator/validate-url.test.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { isFQDN } from "./validate-url";
|
||||
|
||||
describe("isFQDN", () => {
|
||||
test("Non wildcard", () => {
|
||||
expect(isFQDN("www.example.com")).toBeTruthy();
|
||||
});
|
||||
|
||||
test("Wildcard", () => {
|
||||
expect(isFQDN("*.example.com", { allow_wildcard: true })).toBeTruthy();
|
||||
});
|
||||
|
||||
test("Wildcard FQDN fails on option allow_wildcard false", () => {
|
||||
expect(isFQDN("*.example.com")).toBeFalsy();
|
||||
});
|
||||
});
|
@ -1,18 +1,117 @@
|
||||
import { getConfig } from "../config/env";
|
||||
import dns from "node:dns/promises";
|
||||
|
||||
import { isIPv4 } from "net";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { isPrivateIp } from "../ip/ipRange";
|
||||
|
||||
export const blockLocalAndPrivateIpAddresses = (url: string) => {
|
||||
export const blockLocalAndPrivateIpAddresses = async (url: string) => {
|
||||
const validUrl = new URL(url);
|
||||
const appCfg = getConfig();
|
||||
// on cloud local ips are not allowed
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
(validUrl.host === "host.docker.internal" ||
|
||||
validUrl.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
validUrl.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
const inputHostIps: string[] = [];
|
||||
if (isIPv4(validUrl.host)) {
|
||||
inputHostIps.push(validUrl.host);
|
||||
} else {
|
||||
if (validUrl.host === "localhost" || validUrl.host === "host.docker.internal") {
|
||||
throw new BadRequestError({ message: "Local IPs not allowed as URL" });
|
||||
|
||||
if (validUrl.host === "localhost" || validUrl.host === "127.0.0.1")
|
||||
throw new BadRequestError({ message: "Localhost not allowed" });
|
||||
}
|
||||
const resolvedIps = await dns.resolve4(validUrl.host);
|
||||
inputHostIps.push(...resolvedIps);
|
||||
}
|
||||
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
|
||||
if (isInternalIp) throw new BadRequestError({ message: "Local IPs not allowed as URL" });
|
||||
};
|
||||
|
||||
type FQDNOptions = {
|
||||
require_tld?: boolean;
|
||||
allow_underscores?: boolean;
|
||||
allow_trailing_dot?: boolean;
|
||||
allow_numeric_tld?: boolean;
|
||||
allow_wildcard?: boolean;
|
||||
ignore_max_length?: boolean;
|
||||
};
|
||||
|
||||
const defaultFqdnOptions: FQDNOptions = {
|
||||
require_tld: true,
|
||||
allow_underscores: false,
|
||||
allow_trailing_dot: false,
|
||||
allow_numeric_tld: false,
|
||||
allow_wildcard: false,
|
||||
ignore_max_length: false
|
||||
};
|
||||
|
||||
// credits: https://github.com/validatorjs/validator.js/blob/f5da7fb6ed59b94695e6fcb2e970c80029509919/src/lib/isFQDN.js#L13
|
||||
export const isFQDN = (str: string, options: FQDNOptions = {}): boolean => {
|
||||
if (typeof str !== "string") {
|
||||
throw new TypeError("Expected a string");
|
||||
}
|
||||
|
||||
// Apply default options
|
||||
const opts: FQDNOptions = {
|
||||
...defaultFqdnOptions,
|
||||
...options
|
||||
};
|
||||
|
||||
let testStr = str;
|
||||
/* Remove the optional trailing dot before checking validity */
|
||||
if (opts.allow_trailing_dot && str[str.length - 1] === ".") {
|
||||
testStr = testStr.substring(0, str.length - 1);
|
||||
}
|
||||
|
||||
/* Remove the optional wildcard before checking validity */
|
||||
if (opts.allow_wildcard === true && str.indexOf("*.") === 0) {
|
||||
testStr = testStr.substring(2);
|
||||
}
|
||||
|
||||
const parts = testStr.split(".");
|
||||
const tld = parts[parts.length - 1];
|
||||
|
||||
if (opts.require_tld) {
|
||||
// disallow fqdns without tld
|
||||
if (parts.length < 2) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
!opts.allow_numeric_tld &&
|
||||
!/^([a-z\u00A1-\u00A8\u00AA-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]{2,}|xn[a-z0-9-]{2,})$/i.test(tld)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// disallow spaces
|
||||
if (/\s/.test(tld)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// reject numeric TLDs
|
||||
if (!opts.allow_numeric_tld && /^\d+$/.test(tld)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return parts.every((part) => {
|
||||
if (part.length > 63 && !opts.ignore_max_length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!/^[a-z_\u00a1-\uffff0-9-]+$/i.test(part)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// disallow full-width chars
|
||||
if (/[\uff01-\uff5e]/.test(part)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// disallow parts starting or ending with hyphen
|
||||
if (/^-|-$/.test(part)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!opts.allow_underscores && /_/.test(part)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
};
|
||||
|
@ -272,10 +272,13 @@ export const queueServiceFactory = (
|
||||
connection
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.QUEUE_WORKERS_ENABLED) {
|
||||
workerContainer[name] = new Worker<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>(name, jobFn, {
|
||||
...queueSettings,
|
||||
connection
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const startPg = async <T extends QueueName>(
|
||||
@ -307,6 +310,11 @@ export const queueServiceFactory = (
|
||||
event: U,
|
||||
listener: WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>[U]
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.QUEUE_WORKERS_ENABLED) {
|
||||
return;
|
||||
}
|
||||
|
||||
const worker = workerContainer[name];
|
||||
worker.on(event, listener);
|
||||
};
|
||||
|
@ -1,6 +1,8 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
|
||||
|
||||
interface SlugSchemaInputs {
|
||||
min?: number;
|
||||
max?: number;
|
||||
@ -27,4 +29,13 @@ export const GenericResourceNameSchema = z
|
||||
.trim()
|
||||
.min(1, { message: "Name must be at least 1 character" })
|
||||
.max(64, { message: "Name must be 64 or fewer characters" })
|
||||
.regex(/^[a-zA-Z0-9\-_\s]+$/, "Name can only contain alphanumeric characters, dashes, underscores, and spaces");
|
||||
.refine(
|
||||
(val) =>
|
||||
characterValidator([
|
||||
CharacterType.AlphaNumeric,
|
||||
CharacterType.Hyphen,
|
||||
CharacterType.Underscore,
|
||||
CharacterType.Spaces
|
||||
])(val),
|
||||
"Name can only contain alphanumeric characters, dashes, underscores, and spaces"
|
||||
);
|
||||
|
@ -13,6 +13,7 @@ import {
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
OidcAuthError,
|
||||
PermissionBoundaryError,
|
||||
RateLimitError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
@ -117,7 +118,7 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
conditions: el.conditions
|
||||
}))
|
||||
});
|
||||
} else if (error instanceof ForbiddenRequestError) {
|
||||
} else if (error instanceof ForbiddenRequestError || error instanceof PermissionBoundaryError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
|
@ -65,7 +65,7 @@ export const registerSecretScannerGhApp = async (server: FastifyZodProvider) =>
|
||||
payload: JSON.stringify(req.body),
|
||||
signature: signatureSHA256
|
||||
});
|
||||
void res.send("ok");
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ export const registerServeUI = async (
|
||||
TELEMETRY_CAPTURING_ENABLED: appCfg.TELEMETRY_ENABLED
|
||||
};
|
||||
const js = `window.__INFISICAL_RUNTIME_ENV__ = Object.freeze(${JSON.stringify(config)});`;
|
||||
void res.send(js);
|
||||
return res.send(js);
|
||||
}
|
||||
});
|
||||
|
||||
@ -57,7 +57,7 @@ export const registerServeUI = async (
|
||||
reply.callNotFound();
|
||||
return;
|
||||
}
|
||||
void reply.sendFile("index.html");
|
||||
return reply.sendFile("index.html");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -413,7 +413,14 @@ export const registerRoutes = async (
|
||||
serviceTokenDAL,
|
||||
projectDAL
|
||||
});
|
||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||
const licenseService = licenseServiceFactory({
|
||||
permissionService,
|
||||
orgDAL,
|
||||
licenseDAL,
|
||||
keyStore,
|
||||
identityOrgMembershipDAL,
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule,
|
||||
|
@ -70,6 +70,19 @@ export const DefaultResponseErrorsSchema = {
|
||||
})
|
||||
};
|
||||
|
||||
export const booleanSchema = z
|
||||
.union([z.boolean(), z.string().trim()])
|
||||
.transform((value) => {
|
||||
if (typeof value === "string") {
|
||||
// ie if not empty, 0 or false, return true
|
||||
return Boolean(value) && Number(value) !== 0 && value.toLowerCase() !== "false";
|
||||
}
|
||||
|
||||
return value;
|
||||
})
|
||||
.optional()
|
||||
.default(true);
|
||||
|
||||
export const sapPubSchema = SecretApprovalPoliciesSchema.merge(
|
||||
z.object({
|
||||
environment: z.object({
|
||||
|
@ -16,7 +16,12 @@ import { secretsLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { getUserAgentType } from "@app/server/plugins/audit-log";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema, SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import {
|
||||
booleanSchema,
|
||||
SanitizedDynamicSecretSchema,
|
||||
SanitizedTagSchema,
|
||||
secretRawSchema
|
||||
} from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
@ -24,20 +29,6 @@ import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
const MAX_DEEP_SEARCH_LIMIT = 500; // arbitrary limit to prevent excessive results
|
||||
|
||||
// handle querystring boolean values
|
||||
const booleanSchema = z
|
||||
.union([z.boolean(), z.string().trim()])
|
||||
.transform((value) => {
|
||||
if (typeof value === "string") {
|
||||
// ie if not empty, 0 or false, return true
|
||||
return Boolean(value) && Number(value) !== 0 && value.toLowerCase() !== "false";
|
||||
}
|
||||
|
||||
return value;
|
||||
})
|
||||
.optional()
|
||||
.default(true);
|
||||
|
||||
const parseSecretPathSearch = (search?: string) => {
|
||||
if (!search)
|
||||
return {
|
||||
@ -109,6 +100,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
search: z.string().trim().describe(DASHBOARD.SECRET_OVERVIEW_LIST.search).optional(),
|
||||
includeSecrets: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeSecrets),
|
||||
includeFolders: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeFolders),
|
||||
includeImports: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeImports),
|
||||
includeDynamicSecrets: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeDynamicSecrets)
|
||||
}),
|
||||
response: {
|
||||
@ -124,9 +116,17 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
.array()
|
||||
.optional(),
|
||||
imports: SecretImportsSchema.omit({ importEnv: true })
|
||||
.extend({
|
||||
importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() }),
|
||||
environment: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional(),
|
||||
totalFolderCount: z.number().optional(),
|
||||
totalDynamicSecretCount: z.number().optional(),
|
||||
totalSecretCount: z.number().optional(),
|
||||
totalImportCount: z.number().optional(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
@ -143,6 +143,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
orderDirection,
|
||||
includeFolders,
|
||||
includeSecrets,
|
||||
includeImports,
|
||||
includeDynamicSecrets
|
||||
} = req.query;
|
||||
|
||||
@ -159,6 +160,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
let remainingLimit = limit;
|
||||
let adjustedOffset = offset;
|
||||
|
||||
let imports: Awaited<ReturnType<typeof server.services.secretImport.getImportsMultiEnv>> | undefined;
|
||||
let folders: Awaited<ReturnType<typeof server.services.folder.getFoldersMultiEnv>> | undefined;
|
||||
let secrets: Awaited<ReturnType<typeof server.services.secret.getSecretsRawMultiEnv>> | undefined;
|
||||
let dynamicSecrets:
|
||||
@ -168,6 +170,53 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
let totalFolderCount: number | undefined;
|
||||
let totalDynamicSecretCount: number | undefined;
|
||||
let totalSecretCount: number | undefined;
|
||||
let totalImportCount: number | undefined;
|
||||
|
||||
if (includeImports) {
|
||||
totalImportCount = await server.services.secretImport.getProjectImportMultiEnvCount({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId,
|
||||
environments,
|
||||
path: secretPath,
|
||||
search
|
||||
});
|
||||
|
||||
if (remainingLimit > 0 && totalImportCount > adjustedOffset) {
|
||||
imports = await server.services.secretImport.getImportsMultiEnv({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId,
|
||||
environments,
|
||||
path: secretPath,
|
||||
search,
|
||||
limit: remainingLimit,
|
||||
offset: adjustedOffset
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_IMPORTS,
|
||||
metadata: {
|
||||
environment: environments.join(","),
|
||||
folderId: imports?.[0]?.folderId,
|
||||
numberOfImports: imports.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
remainingLimit -= imports.length;
|
||||
adjustedOffset = 0;
|
||||
} else {
|
||||
adjustedOffset = Math.max(0, adjustedOffset - totalImportCount);
|
||||
}
|
||||
}
|
||||
|
||||
if (includeFolders) {
|
||||
// this is the unique count, ie duplicate folders across envs only count as 1
|
||||
@ -345,10 +394,13 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
folders,
|
||||
dynamicSecrets,
|
||||
secrets,
|
||||
imports,
|
||||
totalFolderCount,
|
||||
totalDynamicSecretCount,
|
||||
totalImportCount,
|
||||
totalSecretCount,
|
||||
totalCount: (totalFolderCount ?? 0) + (totalDynamicSecretCount ?? 0) + (totalSecretCount ?? 0)
|
||||
totalCount:
|
||||
(totalFolderCount ?? 0) + (totalDynamicSecretCount ?? 0) + (totalSecretCount ?? 0) + (totalImportCount ?? 0)
|
||||
};
|
||||
}
|
||||
});
|
||||
|
@ -24,7 +24,7 @@ const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick(
|
||||
allowedAudience: true
|
||||
}).extend({
|
||||
caCert: z.string(),
|
||||
tokenReviewerJwt: z.string()
|
||||
tokenReviewerJwt: z.string().optional().nullable()
|
||||
});
|
||||
|
||||
export const registerIdentityKubernetesRouter = async (server: FastifyZodProvider) => {
|
||||
@ -98,7 +98,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
.object({
|
||||
kubernetesHost: z.string().trim().min(1).describe(KUBERNETES_AUTH.ATTACH.kubernetesHost),
|
||||
caCert: z.string().trim().default("").describe(KUBERNETES_AUTH.ATTACH.caCert),
|
||||
tokenReviewerJwt: z.string().trim().min(1).describe(KUBERNETES_AUTH.ATTACH.tokenReviewerJwt),
|
||||
tokenReviewerJwt: z.string().trim().optional().describe(KUBERNETES_AUTH.ATTACH.tokenReviewerJwt),
|
||||
allowedNamespaces: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNamespaces), // TODO: validation
|
||||
allowedNames: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNames),
|
||||
allowedAudience: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedAudience),
|
||||
@ -195,7 +195,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
.object({
|
||||
kubernetesHost: z.string().trim().min(1).optional().describe(KUBERNETES_AUTH.UPDATE.kubernetesHost),
|
||||
caCert: z.string().trim().optional().describe(KUBERNETES_AUTH.UPDATE.caCert),
|
||||
tokenReviewerJwt: z.string().trim().min(1).optional().describe(KUBERNETES_AUTH.UPDATE.tokenReviewerJwt),
|
||||
tokenReviewerJwt: z.string().trim().nullable().optional().describe(KUBERNETES_AUTH.UPDATE.tokenReviewerJwt),
|
||||
allowedNamespaces: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNamespaces), // TODO: validation
|
||||
allowedNames: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNames),
|
||||
allowedAudience: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedAudience),
|
||||
|
@ -32,6 +32,8 @@ const IdentityOidcAuthResponseSchema = IdentityOidcAuthsSchema.pick({
|
||||
caCert: z.string()
|
||||
});
|
||||
|
||||
const MAX_OIDC_CLAIM_SIZE = 32_768;
|
||||
|
||||
export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
@ -55,7 +57,7 @@ export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider)
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { identityOidcAuth, accessToken, identityAccessToken, identityMembershipOrg } =
|
||||
const { identityOidcAuth, accessToken, identityAccessToken, identityMembershipOrg, oidcTokenData } =
|
||||
await server.services.identityOidcAuth.login({
|
||||
identityId: req.body.identityId,
|
||||
jwt: req.body.jwt
|
||||
@ -69,7 +71,11 @@ export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider)
|
||||
metadata: {
|
||||
identityId: identityOidcAuth.identityId,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
identityOidcAuthId: identityOidcAuth.id
|
||||
identityOidcAuthId: identityOidcAuth.id,
|
||||
oidcClaimsReceived:
|
||||
Buffer.from(JSON.stringify(oidcTokenData), "utf8").byteLength < MAX_OIDC_CLAIM_SIZE
|
||||
? oidcTokenData
|
||||
: { payload: "Error: Payload exceeds 32KB, provided oidc claim not recorded in audit log." }
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -4,7 +4,6 @@ import {
|
||||
AuditLogsSchema,
|
||||
GroupsSchema,
|
||||
IncidentContactsSchema,
|
||||
OrganizationsSchema,
|
||||
OrgMembershipsSchema,
|
||||
OrgRolesSchema,
|
||||
UsersSchema
|
||||
@ -57,7 +56,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organization: OrganizationsSchema
|
||||
organization: sanitizedOrganizationSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -263,7 +262,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string(),
|
||||
organization: OrganizationsSchema
|
||||
organization: sanitizedOrganizationSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -9,6 +9,8 @@ import { readLimit, secretsLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { booleanSchema } from "../sanitizedSchemas";
|
||||
|
||||
export const registerSecretFolderRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
@ -347,11 +349,14 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.default("/")
|
||||
.transform(prefixWithSlash)
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.LIST.directory)
|
||||
.describe(FOLDERS.LIST.directory),
|
||||
recursive: booleanSchema.default(false).describe(FOLDERS.LIST.recursive)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
folders: SecretFoldersSchema.array()
|
||||
folders: SecretFoldersSchema.extend({
|
||||
relativePath: z.string().optional()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
OrganizationsSchema,
|
||||
OrgMembershipsSchema,
|
||||
ProjectMembershipsSchema,
|
||||
ProjectsSchema,
|
||||
@ -15,6 +14,7 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { GenericResourceNameSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { sanitizedOrganizationSchema } from "@app/services/org/org-schema";
|
||||
|
||||
export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -335,7 +335,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organization: OrganizationsSchema
|
||||
organization: sanitizedOrganizationSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -365,7 +365,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organization: OrganizationsSchema,
|
||||
organization: sanitizedOrganizationSchema,
|
||||
accessToken: z.string()
|
||||
})
|
||||
}
|
||||
@ -396,4 +396,30 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
return { organization, accessToken: tokens.accessToken };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/privilege-system-upgrade",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
organization: sanitizedOrganizationSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const organization = await server.services.org.upgradePrivilegeSystem({
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return { organization };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -7,9 +7,11 @@ import { z } from "zod";
|
||||
import { ActionProjectType, ProjectType, TCertificateAuthorities, TCertificateTemplates } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { isFQDN } from "@app/lib/validator/validate-url";
|
||||
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
@ -58,7 +60,6 @@ import {
|
||||
TSignIntermediateDTO,
|
||||
TUpdateCaDTO
|
||||
} from "./certificate-authority-types";
|
||||
import { hostnameRegex } from "./certificate-authority-validators";
|
||||
|
||||
type TCertificateAuthorityServiceFactoryDep = {
|
||||
certificateAuthorityDAL: Pick<
|
||||
@ -1017,9 +1018,7 @@ export const certificateAuthorityServiceFactory = ({
|
||||
const maxPathLength = certObj.getExtension(x509.BasicConstraintsExtension)?.pathLength;
|
||||
|
||||
// validate imported certificate and certificate chain
|
||||
const certificates = certificateChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
const certificates = extractX509CertFromChain(certificateChain)?.map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
if (!certificates) throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
|
||||
@ -1325,7 +1324,7 @@ export const certificateAuthorityServiceFactory = ({
|
||||
}
|
||||
|
||||
// check if the altName is a valid hostname
|
||||
if (hostnameRegex.test(altName)) {
|
||||
if (isFQDN(altName, { allow_wildcard: true })) {
|
||||
return {
|
||||
type: "dns",
|
||||
value: altName
|
||||
@ -1702,7 +1701,7 @@ export const certificateAuthorityServiceFactory = ({
|
||||
}
|
||||
|
||||
// check if the altName is a valid hostname
|
||||
if (hostnameRegex.test(altName)) {
|
||||
if (isFQDN(altName, { allow_wildcard: true })) {
|
||||
return {
|
||||
type: "dns",
|
||||
value: altName
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { isValidIp } from "@app/lib/ip";
|
||||
import { isFQDN } from "@app/lib/validator/validate-url";
|
||||
|
||||
const isValidDate = (dateString: string) => {
|
||||
const date = new Date(dateString);
|
||||
@ -9,7 +10,6 @@ const isValidDate = (dateString: string) => {
|
||||
|
||||
export const validateCaDateField = z.string().trim().refine(isValidDate, { message: "Invalid date format" });
|
||||
|
||||
export const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
|
||||
export const validateAltNamesField = z
|
||||
.string()
|
||||
.trim()
|
||||
@ -27,7 +27,7 @@ export const validateAltNamesField = z
|
||||
if (data === "") return true;
|
||||
// Split and validate each alt name
|
||||
return data.split(", ").every((name) => {
|
||||
return hostnameRegex.test(name) || z.string().email().safeParse(name).success || isValidIp(name);
|
||||
return isFQDN(name, { allow_wildcard: true }) || z.string().email().safeParse(name).success || isValidIp(name);
|
||||
});
|
||||
},
|
||||
{
|
||||
|
@ -11,6 +11,7 @@ export const validateCertificateDetailsAgainstTemplate = (
|
||||
},
|
||||
template: TCertificateTemplates
|
||||
) => {
|
||||
// these are validated in router using validateTemplateRegexField
|
||||
const commonNameRegex = new RegExp(template.commonName);
|
||||
if (!commonNameRegex.test(cert.commonName)) {
|
||||
throw new BadRequestError({
|
||||
|
@ -6,6 +6,7 @@ import { ActionProjectType, TCertificateTemplateEstConfigsUpdate } from "@app/db
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
@ -281,9 +282,7 @@ export const certificateTemplateServiceFactory = ({
|
||||
});
|
||||
|
||||
// validate CA chain
|
||||
const certificates = caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
const certificates = extractX509CertFromChain(caChain)?.map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
if (!certificates) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
@ -379,9 +378,7 @@ export const certificateTemplateServiceFactory = ({
|
||||
};
|
||||
|
||||
if (caChain) {
|
||||
const certificates = caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
const certificates = extractX509CertFromChain(caChain)?.map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
if (!certificates) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
|
@ -1,13 +1,27 @@
|
||||
import safe from "safe-regex";
|
||||
import z from "zod";
|
||||
|
||||
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
|
||||
|
||||
export const validateTemplateRegexField = z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(100)
|
||||
.regex(/^[a-zA-Z0-9 *@\-\\.\\]+$/, {
|
||||
.refine(
|
||||
(val) =>
|
||||
characterValidator([
|
||||
CharacterType.AlphaNumeric,
|
||||
CharacterType.Spaces, // (space)
|
||||
CharacterType.Asterisk, // *
|
||||
CharacterType.At, // @
|
||||
CharacterType.Hyphen, // -
|
||||
CharacterType.Period, // .
|
||||
CharacterType.Backslash // \
|
||||
])(val),
|
||||
{
|
||||
message: "Invalid pattern: only alphanumeric characters, spaces, *, ., @, -, and \\ are allowed."
|
||||
})
|
||||
}
|
||||
)
|
||||
// we ensure that the inputted pattern is computationally safe by limiting star height to 1
|
||||
.refine((v) => safe(v), {
|
||||
message: "Unsafe REGEX pattern"
|
||||
|
@ -1,12 +1,15 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType, ProjectMembershipRole, SecretKeyEncoding, TGroups } from "@app/db/schemas";
|
||||
import {
|
||||
constructPermissionErrorMessage,
|
||||
validatePrivilegeChangeOperation
|
||||
} from "@app/ee/services/permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { ProjectPermissionGroupActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { isUuidV4 } from "@app/lib/validator";
|
||||
@ -70,7 +73,7 @@ export const groupProjectServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` });
|
||||
if (project.version < 2) throw new BadRequestError({ message: `Failed to add group to E2EE project` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
const { permission, membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
@ -78,7 +81,7 @@ export const groupProjectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Create, ProjectPermissionSub.Groups);
|
||||
|
||||
let group: TGroups | null = null;
|
||||
if (isUuidV4(groupIdOrName)) {
|
||||
@ -102,11 +105,21 @@ export const groupProjectServiceFactory = ({
|
||||
project.id
|
||||
);
|
||||
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionGroupActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Groups,
|
||||
permission,
|
||||
rolePermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to assign group to a more privileged role",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to assign group to role",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionGroupActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
}
|
||||
@ -248,7 +261,7 @@ export const groupProjectServiceFactory = ({
|
||||
|
||||
if (!project) throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
const { permission, membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
@ -256,7 +269,7 @@ export const groupProjectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Edit, ProjectPermissionSub.Groups);
|
||||
|
||||
const group = await groupDAL.findOne({ orgId: actorOrgId, id: groupId });
|
||||
if (!group) throw new NotFoundError({ message: `Failed to find group with ID ${groupId}` });
|
||||
@ -269,11 +282,21 @@ export const groupProjectServiceFactory = ({
|
||||
requestedRoleChange,
|
||||
project.id
|
||||
);
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionGroupActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Groups,
|
||||
permission,
|
||||
rolePermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to assign group to a more privileged role",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to assign group to role",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionGroupActions.GrantPrivileges,
|
||||
ProjectPermissionSub.Groups
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
}
|
||||
@ -360,7 +383,7 @@ export const groupProjectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Delete, ProjectPermissionSub.Groups);
|
||||
|
||||
const deletedProjectGroup = await groupProjectDAL.transaction(async (tx) => {
|
||||
const groupMembers = await userGroupMembershipDAL.findGroupMembersNotInProject(group.id, project.id, tx);
|
||||
@ -405,7 +428,7 @@ export const groupProjectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
|
||||
|
||||
const groupMemberships = await groupProjectDAL.findByProjectId(project.id);
|
||||
return groupMemberships;
|
||||
@ -433,7 +456,7 @@ export const groupProjectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
|
||||
|
||||
const [groupMembership] = await groupProjectDAL.findByProjectId(project.id, {
|
||||
groupId
|
||||
|
@ -5,11 +5,14 @@ import jwt from "jsonwebtoken";
|
||||
|
||||
import { IdentityAuthMethod } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { OrgPermissionIdentityActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import {
|
||||
constructPermissionErrorMessage,
|
||||
validatePrivilegeChangeOperation
|
||||
} from "@app/ee/services/permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
@ -39,6 +42,31 @@ type TIdentityAwsAuthServiceFactoryDep = {
|
||||
|
||||
export type TIdentityAwsAuthServiceFactory = ReturnType<typeof identityAwsAuthServiceFactory>;
|
||||
|
||||
const awsRegionFromHeader = (authorizationHeader: string): string | null => {
|
||||
// https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-auth-using-authorization-header.html
|
||||
// The Authorization header takes the following form.
|
||||
// Authorization: AWS4-HMAC-SHA256
|
||||
// Credential=AKIAIOSFODNN7EXAMPLE/20230719/us-east-1/sts/aws4_request,
|
||||
// SignedHeaders=content-length;content-type;host;x-amz-date,
|
||||
// Signature=fe5f80f77d5fa3beca038a248ff027d0445342fe2855ddc963176630326f1024
|
||||
//
|
||||
// The credential is in the form of "<your-access-key-id>/<date>/<aws-region>/<aws-service>/aws4_request"
|
||||
try {
|
||||
const fields = authorizationHeader.split(" ");
|
||||
for (const field of fields) {
|
||||
if (field.startsWith("Credential=")) {
|
||||
const parts = field.split("/");
|
||||
if (parts.length >= 3) {
|
||||
return parts[2];
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const identityAwsAuthServiceFactory = ({
|
||||
identityAccessTokenDAL,
|
||||
identityAwsAuthDAL,
|
||||
@ -57,6 +85,9 @@ export const identityAwsAuthServiceFactory = ({
|
||||
const headers: TAwsGetCallerIdentityHeaders = JSON.parse(Buffer.from(iamRequestHeaders, "base64").toString());
|
||||
const body: string = Buffer.from(iamRequestBody, "base64").toString();
|
||||
|
||||
const region = headers.Authorization ? awsRegionFromHeader(headers.Authorization) : null;
|
||||
const url = region ? `https://sts.${region}.amazonaws.com` : identityAwsAuth.stsEndpoint;
|
||||
|
||||
const {
|
||||
data: {
|
||||
GetCallerIdentityResponse: {
|
||||
@ -65,7 +96,7 @@ export const identityAwsAuthServiceFactory = ({
|
||||
}
|
||||
}: { data: TGetCallerIdentityResponse } = await axios({
|
||||
method: iamHttpRequestMethod,
|
||||
url: headers?.Host ? `https://${headers.Host}` : identityAwsAuth.stsEndpoint,
|
||||
url,
|
||||
headers,
|
||||
data: body
|
||||
});
|
||||
@ -93,7 +124,8 @@ export const identityAwsAuthServiceFactory = ({
|
||||
.some((principalArn) => {
|
||||
// convert wildcard ARN to a regular expression: "arn:aws:iam::123456789012:*" -> "^arn:aws:iam::123456789012:.*$"
|
||||
// considers exact matches + wildcard matches
|
||||
const regex = new RegExp(`^${principalArn.replace(/\*/g, ".*")}$`);
|
||||
// heavily validated in router
|
||||
const regex = new RegExp(`^${principalArn.replaceAll("*", ".*")}$`);
|
||||
return regex.test(extractPrincipalArn(Arn));
|
||||
});
|
||||
|
||||
@ -175,7 +207,7 @@ export const identityAwsAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
|
||||
|
||||
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
|
||||
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
|
||||
@ -254,7 +286,7 @@ export const identityAwsAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
|
||||
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => {
|
||||
@ -308,7 +340,7 @@ export const identityAwsAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
|
||||
return { ...awsIdentityAuth, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
|
||||
@ -326,14 +358,14 @@ export const identityAwsAuthServiceFactory = ({
|
||||
message: "The identity does not have aws auth"
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
const { permission, membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const { permission: rolePermission } = await permissionService.getOrgPermission(
|
||||
ActorType.IDENTITY,
|
||||
@ -343,11 +375,22 @@ export const identityAwsAuthServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionIdentityActions.RevokeAuth,
|
||||
OrgPermissionSubjects.Identity,
|
||||
permission,
|
||||
rolePermission
|
||||
);
|
||||
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to revoke aws auth of identity with more privileged role",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to revoke aws auth of identity with more privileged role",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionIdentityActions.RevokeAuth,
|
||||
OrgPermissionSubjects.Identity
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
import safe from "safe-regex";
|
||||
import { z } from "zod";
|
||||
|
||||
const twelveDigitRegex = /^\d{12}$/;
|
||||
// akhilmhdh: change this to a normal function later. Checked no redosable at the moment
|
||||
const arnRegex = /^arn:aws:iam::\d{12}:(user\/[a-zA-Z0-9_.@+*/-]+|role\/[a-zA-Z0-9_.@+*/-]+|\*)$/;
|
||||
|
||||
export const validateAccountIds = z
|
||||
@ -42,7 +44,8 @@ export const validatePrincipalArns = z
|
||||
// Split the string by commas to check each supposed ARN
|
||||
const arns = data.split(",");
|
||||
// Return true only if every item matches one of the allowed ARN formats
|
||||
return arns.every((arn) => arnRegex.test(arn.trim()));
|
||||
// and checks whether the provided regex is safe
|
||||
return arns.map((el) => el.trim()).every((arn) => safe(`^${arn.replaceAll("*", ".*")}$`) && arnRegex.test(arn));
|
||||
},
|
||||
{
|
||||
message:
|
||||
|
@ -3,11 +3,14 @@ import jwt from "jsonwebtoken";
|
||||
|
||||
import { IdentityAuthMethod } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { OrgPermissionIdentityActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import {
|
||||
constructPermissionErrorMessage,
|
||||
validatePrivilegeChangeOperation
|
||||
} from "@app/ee/services/permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
@ -147,7 +150,7 @@ export const identityAzureAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
|
||||
|
||||
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
|
||||
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
|
||||
@ -225,7 +228,7 @@ export const identityAzureAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
|
||||
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => {
|
||||
@ -281,7 +284,7 @@ export const identityAzureAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
|
||||
|
||||
return { ...identityAzureAuth, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
@ -300,14 +303,14 @@ export const identityAzureAuthServiceFactory = ({
|
||||
message: "The identity does not have azure auth"
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
const { permission, membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const { permission: rolePermission } = await permissionService.getOrgPermission(
|
||||
ActorType.IDENTITY,
|
||||
@ -316,11 +319,21 @@ export const identityAzureAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionIdentityActions.RevokeAuth,
|
||||
OrgPermissionSubjects.Identity,
|
||||
permission,
|
||||
rolePermission
|
||||
);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to revoke azure auth of identity with more privileged role",
|
||||
throw new PermissionBoundaryError({
|
||||
message: constructPermissionErrorMessage(
|
||||
"Failed to revoke azure auth of identity with more privileged role",
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionIdentityActions.RevokeAuth,
|
||||
OrgPermissionSubjects.Identity
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user