Compare commits

..

8 Commits

Author SHA1 Message Date
f58ed1fbfb Update values.yaml 2025-03-21 03:58:50 +04:00
e0c2851a4f finished helm 2025-03-21 03:44:42 +04:00
fe44fa6a7e fixed values.yaml 2025-03-21 03:01:41 +04:00
3a3d8271bf fixed deployment.yaml 2025-03-21 02:49:20 +04:00
c40ca9b4c5 Fixed metrics-reader-rbac.yaml and proxy-rbac.yaml 2025-03-21 02:29:25 +04:00
ea3fe21955 no change needed for metrics-service.yaml 2025-03-21 02:26:34 +04:00
ae34a2f7fc fixed manager-rbac.yaml 2025-03-21 02:24:59 +04:00
c80f34c929 fixed crds 2025-03-21 02:24:38 +04:00
351 changed files with 3210 additions and 11778 deletions

View File

@ -1,27 +0,0 @@
name: Release K8 Operator Helm Chart
on:
workflow_dispatch:
jobs:
release-helm:
name: Release Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,107 +1,74 @@
name: Release K8 Operator Docker Image
name: Release image + Helm chart K8s Operator
on:
push:
tags:
- "infisical-k8-operator/v*.*.*"
permissions:
contents: write
pull-requests: write
push:
tags:
- "infisical-k8-operator/v*.*.*"
jobs:
release-image:
name: Generate Helm Chart PR
runs-on: ubuntu-latest
outputs:
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
release:
runs-on: ubuntu-latest
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
- name: Checkout code
uses: actions/checkout@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
- name: Checkout code
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
# Dependency for helm generation
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Generate Helm Chart
run: sh k8-operator/scripts/generate-helm.sh
- name: Update Helm Chart Version
run: sh k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
# Dependency for helm generation
- name: Install Go
uses: actions/setup-go@v4
with:
go-version: 1.21
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
# Install binaries for helm generation
- name: Install dependencies
working-directory: k8-operator
run: |
make helmify
make kustomize
make controller-gen
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Generate Helm Chart
working-directory: k8-operator
run: make helm
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Update Helm Chart Version
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: k8-operator
push: true
platforms: linux/amd64,linux/arm64
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Debug - Check file changes
run: |
echo "Current git status:"
git status
echo ""
echo "Modified files:"
git diff --name-only
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
if [ -z "$(git diff --name-only)" ]; then
echo "No helm changes or version changes. Invalid release detected, Exiting."
exit 1
fi
- name: Create Helm Chart PR
id: create-pr
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
committer: GitHub <noreply@github.com>
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
branch: helm-update-${{ steps.extract_version.outputs.version }}
delete-branch: true
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
body: |
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
Additionally the helm chart has been updated to match the latest operator code changes.
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
Once you have approved this PR, you can trigger the helm release workflow manually.
base: main
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: k8-operator
push: true
platforms: linux/amd64,linux/arm64
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Configure Git
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
- name: Commit and Push Helm Changes
run: |
git add .
git commit -m "Update Helm chart to version ${{ steps.extract_version.outputs.version }}" || echo "No changes to commit"
git push

View File

@ -8,9 +8,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
docs/cli/commands/bootstrap.mdx:jwt:86
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52

View File

@ -16,7 +16,7 @@ const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Dat
const startDateStr = formatPartitionDate(startDate);
const endDateStr = formatPartitionDate(endDate);
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
await knex.schema.raw(
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`

View File

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem"))) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("shouldUseNewPrivilegeSystem");
t.string("privilegeUpgradeInitiatedByUsername");
t.dateTime("privilegeUpgradeInitiatedAt");
});
await knex(TableName.Organization).update({
shouldUseNewPrivilegeSystem: false
});
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("shouldUseNewPrivilegeSystem").defaultTo(true).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem")) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("shouldUseNewPrivilegeSystem");
t.dropColumn("privilegeUpgradeInitiatedByUsername");
t.dropColumn("privilegeUpgradeInitiatedAt");
});
}
}

View File

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.index(["parentId", "name"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropIndex(["parentId", "name"]);
});
}
}

View File

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasReviewerJwtCol = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
if (hasReviewerJwtCol) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
});
}
}
export async function down(): Promise<void> {
// we can't make it back to non nullable, it will fail
}

View File

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
if (!hasCol) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.datetime("lastSecretModified");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
if (hasCol) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropColumn("lastSecretModified");
});
}
}

View File

@ -16,8 +16,7 @@ export const AccessApprovalPoliciesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
deletedAt: z.date().nullable().optional()
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

View File

@ -28,7 +28,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string(),
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesTokenReviewerJwt: zodBuffer,
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
});

View File

@ -233,8 +233,3 @@ export enum ActionProjectType {
// project operations that happen on all types
Any = "any"
}
export enum SortDirection {
ASC = "asc",
DESC = "desc"
}

View File

@ -23,9 +23,6 @@ export const OrganizationsSchema = z.object({
defaultMembershipRole: z.string().default("member"),
enforceMfa: z.boolean().default(false),
selectedMfaMethod: z.string().nullable().optional(),
shouldUseNewPrivilegeSystem: z.boolean().default(true),
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
privilegeUpgradeInitiatedAt: z.date().nullable().optional(),
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
});

View File

@ -16,8 +16,7 @@ export const SecretApprovalPoliciesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
deletedAt: z.date().nullable().optional()
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@ -16,8 +16,7 @@ export const SecretFoldersSchema = z.object({
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional(),
description: z.string().nullable().optional(),
lastSecretModified: z.date().nullable().optional()
description: z.string().nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

View File

@ -16,7 +16,7 @@ export const registerCertificateEstRouter = async (server: FastifyZodProvider) =
// for CSRs sent in PEM, we leave them as is
// for CSRs sent in base64, we preprocess them to remove new lines and spaces
if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) {
csrBody = csrBody.replaceAll("\n", "").replaceAll(" ", "");
csrBody = csrBody.replace(/\n/g, "").replace(/ /g, "");
}
done(null, csrBody);

View File

@ -29,8 +29,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
}),
response: {
200: z.object({
@ -148,8 +147,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
}),
response: {
200: z.object({

View File

@ -110,8 +110,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
deletedAt: z.date().nullish()
}),
reviewers: z
.object({

View File

@ -61,8 +61,8 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
if (ldapConfig.groupSearchBase) {
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
.replaceAll("{{.Username}}", user.uid)
.replaceAll("{{.UserDN}}", user.dn);
.replace(/{{\.Username}}/g, user.uid)
.replace(/{{\.UserDN}}/g, user.dn);
if (!isValidLdapFilter(groupSearchFilter)) {
throw new Error("Generated LDAP search filter is invalid.");

View File

@ -35,8 +35,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
}),
response: {
200: z.object({
@ -86,8 +85,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
.transform((val) => (val === "" ? "/" : val)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
}),
response: {
200: z.object({

View File

@ -49,8 +49,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
deletedAt: z.date().nullish()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
@ -268,8 +267,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
deletedAt: z.date().nullish()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),

View File

@ -5,11 +5,9 @@ import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-type
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
server.route({
@ -75,16 +73,6 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignSshKey,
distinctId: getTelemetryDistinctId(req),
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey
@ -164,16 +152,6 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshCreds,
distinctId: getTelemetryDistinctId(req),
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey,

View File

@ -65,8 +65,7 @@ export const accessApprovalPolicyServiceFactory = ({
approvers,
projectSlug,
environment,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
}: TCreateAccessApprovalPolicy) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -154,8 +153,7 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
},
tx
);
@ -218,8 +216,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorOrgId,
actorAuthMethod,
approvals,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers
.filter((approver) => approver.type === ApproverType.Group)
@ -265,8 +262,7 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
},
tx
);

View File

@ -26,7 +26,6 @@ export type TCreateAccessApprovalPolicy = {
projectSlug: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateAccessApprovalPolicy = {
@ -36,7 +35,6 @@ export type TUpdateAccessApprovalPolicy = {
secretPath?: string;
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {

View File

@ -61,7 +61,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
)
@ -120,7 +119,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
@ -256,7 +254,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
);
@ -278,7 +275,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals,
deletedAt: el.policyDeletedAt
},
requestedByUser: {

View File

@ -320,11 +320,6 @@ export const accessApprovalRequestServiceFactory = ({
message: "The policy associated with this access request has been deleted."
});
}
if (!policy.allowedSelfApprovals && actorId === accessApprovalRequest.requestedByUserId) {
throw new BadRequestError({
message: "Failed to review access approval request. Users are not authorized to review their own request."
});
}
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,

View File

@ -45,6 +45,7 @@ export const auditLogStreamServiceFactory = ({
}: TCreateAuditLogStreamDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const appCfg = getConfig();
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.auditLogStreams) {
throw new BadRequestError({
@ -61,8 +62,9 @@ export const auditLogStreamServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
const appCfg = getConfig();
if (appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
if (appCfg.isCloud) {
blockLocalAndPrivateIpAddresses(url);
}
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
if (totalStreams.length >= plan.auditLogStreamLimit) {
@ -133,8 +135,9 @@ export const auditLogStreamServiceFactory = ({
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const appCfg = getConfig();
if (url && appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
// testing connection first
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };

View File

@ -9,14 +9,13 @@ import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { EventType, filterableSecretEvents } from "./audit-log-types";
import { EventType } from "./audit-log-types";
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
type TFindQuery = {
actor?: string;
projectId?: string;
environment?: string;
orgId?: string;
eventType?: string;
startDate?: string;
@ -33,7 +32,6 @@ export const auditLogDALFactory = (db: TDbClient) => {
{
orgId,
projectId,
environment,
userAgentType,
startDate,
endDate,
@ -42,14 +40,12 @@ export const auditLogDALFactory = (db: TDbClient) => {
actorId,
actorType,
secretPath,
secretKey,
eventType,
eventMetadata
}: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string;
actorType?: ActorType;
secretPath?: string;
secretKey?: string;
eventType?: EventType[];
eventMetadata?: Record<string, string>;
},
@ -94,29 +90,8 @@ export const auditLogDALFactory = (db: TDbClient) => {
});
}
const eventIsSecretType = !eventType?.length || eventType.some((event) => filterableSecretEvents.includes(event));
// We only want to filter for environment/secretPath/secretKey if the user is either checking for all event types
// ? Note(daniel): use the `eventMetadata" @> ?::jsonb` approach to properly use our GIN index
if (projectId && eventIsSecretType) {
if (environment || secretPath) {
// Handle both environment and secret path together to only use the GIN index once
void sqlQuery.whereRaw(`"eventMetadata" @> ?::jsonb`, [
JSON.stringify({
...(environment && { environment }),
...(secretPath && { secretPath })
})
]);
}
// Handle secret key separately to include the OR condition
if (secretKey) {
void sqlQuery.whereRaw(
`("eventMetadata" @> ?::jsonb
OR "eventMetadata"->'secrets' @> ?::jsonb)`,
[JSON.stringify({ secretKey }), JSON.stringify([{ secretKey }])]
);
}
if (projectId && secretPath) {
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
}
// Filter by actor type

View File

@ -63,8 +63,6 @@ export const auditLogServiceFactory = ({
actorType: filter.actorType,
eventMetadata: filter.eventMetadata,
secretPath: filter.secretPath,
secretKey: filter.secretKey,
environment: filter.environment,
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
});

View File

@ -33,11 +33,9 @@ export type TListProjectAuditLogDTO = {
endDate?: string;
startDate?: string;
projectId?: string;
environment?: string;
auditLogActorId?: string;
actorType?: ActorType;
secretPath?: string;
secretKey?: string;
eventMetadata?: Record<string, string>;
};
} & Omit<TProjectPermission, "projectId">;
@ -285,21 +283,9 @@ export enum EventType {
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
KMIP_OPERATION_REGISTER = "kmip-operation-register",
PROJECT_ACCESS_REQUEST = "project-access-request"
KMIP_OPERATION_REGISTER = "kmip-operation-register"
}
export const filterableSecretEvents: EventType[] = [
EventType.GET_SECRET,
EventType.DELETE_SECRETS,
EventType.CREATE_SECRETS,
EventType.UPDATE_SECRETS,
EventType.CREATE_SECRET,
EventType.UPDATE_SECRET,
EventType.DELETE_SECRET
];
interface UserActorMetadata {
userId: string;
email?: string | null;
@ -982,7 +968,6 @@ interface LoginIdentityOidcAuthEvent {
identityId: string;
identityOidcAuthId: string;
identityAccessTokenId: string;
oidcClaimsReceived: Record<string, unknown>;
};
}
@ -2279,15 +2264,6 @@ interface KmipOperationRegisterEvent {
};
}
interface ProjectAccessRequestEvent {
type: EventType.PROJECT_ACCESS_REQUEST;
metadata: {
projectId: string;
requesterId: string;
requesterEmail: string;
};
}
interface SetupKmipEvent {
type: EventType.SETUP_KMIP;
metadata: {
@ -2522,6 +2498,5 @@ export type Event =
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent
| ProjectAccessRequestEvent
| CreateSecretRequestEvent
| SecretApprovalRequestReview;

View File

@ -1,6 +1,5 @@
import * as x509 from "@peculiar/x509";
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
@ -68,7 +67,9 @@ export const certificateEstServiceFactory = ({
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
const leafCertificate = decodeURIComponent(sslClientCert).match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
)?.[0];
if (!leafCertificate) {
throw new UnauthorizedError({ message: "Missing client certificate" });
@ -87,7 +88,10 @@ export const certificateEstServiceFactory = ({
const verifiedChains = await Promise.all(
caCertChains.map((chain) => {
const caCert = new x509.X509Certificate(chain.certificate);
const caChain = extractX509CertFromChain(chain.certificateChain)?.map((c) => new x509.X509Certificate(c)) || [];
const caChain =
chain.certificateChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((c) => new x509.X509Certificate(c)) || [];
return isCertChainValid([cert, caCert, ...caChain]);
})
@ -168,15 +172,19 @@ export const certificateEstServiceFactory = ({
}
if (!estConfig.disableBootstrapCertValidation) {
const caCerts = extractX509CertFromChain(estConfig.caChain)?.map((cert) => {
return new x509.X509Certificate(cert);
});
const caCerts = estConfig.caChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => {
return new x509.X509Certificate(cert);
});
if (!caCerts) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });
}
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
const leafCertificate = decodeURIComponent(sslClientCert).match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
)?.[0];
if (!leafCertificate) {
throw new BadRequestError({ message: "Missing client certificate" });
@ -242,7 +250,13 @@ export const certificateEstServiceFactory = ({
kmsService
});
const certificates = extractX509CertFromChain(caCertChain).map((cert) => new x509.X509Certificate(cert));
const certificates = caCertChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => new x509.X509Certificate(cert));
if (!certificates) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });
}
const caCertificate = new x509.X509Certificate(caCert);
return convertRawCertsToPkcs7([caCertificate.rawData, ...certificates.map((cert) => cert.rawData)]);

View File

@ -183,7 +183,7 @@ export const dynamicSecretLeaseServiceFactory = ({
});
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id) {
if (!dynamicSecretLease) {
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
}
@ -256,7 +256,7 @@ export const dynamicSecretLeaseServiceFactory = ({
});
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id)
if (!dynamicSecretLease)
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;

View File

@ -1,51 +1,31 @@
import dns from "node:dns/promises";
import net from "node:net";
import crypto from "node:crypto";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { isPrivateIp } from "@app/lib/ip/ipRange";
import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
export const verifyHostInputValidity = (host: string, isGateway = false) => {
const appCfg = getConfig();
// if (appCfg.NODE_ENV === "development") return ["host.docker.internal"]; // incase you want to remove this check in dev
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
// no need for validation when it's dev
if (appCfg.NODE_ENV === "development") return;
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
getDbConnectionHost(appCfg.REDIS_URL)
);
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
// get host db ip
const exclusiveIps: string[] = [];
for await (const el of reservedHosts) {
if (el) {
if (net.isIPv4(el)) {
exclusiveIps.push(el);
} else {
const resolvedIps = await dns.resolve4(el);
exclusiveIps.push(...resolvedIps);
}
}
if (
appCfg.isCloud &&
!isGateway &&
// localhost
// internal ips
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (
host === "localhost" ||
host === "127.0.0.1" ||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
) {
throw new BadRequestError({ message: "Invalid db host" });
}
const normalizedHost = host.split(":")[0];
const inputHostIps: string[] = [];
if (net.isIPv4(host)) {
inputHostIps.push(host);
} else {
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
throw new BadRequestError({ message: "Invalid db host" });
}
const resolvedIps = await dns.resolve4(host);
inputHostIps.push(...resolvedIps);
}
if (!isGateway) {
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
}
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
return inputHostIps;
};

View File

@ -13,7 +13,6 @@ import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
@ -145,14 +144,6 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
validateHandlebarTemplate("AWS ElastiCache creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("AWS ElastiCache revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return providerInputs;
};

View File

@ -3,10 +3,9 @@ import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
@ -21,28 +20,14 @@ const generateUsername = () => {
export const CassandraProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
const hostIps = await Promise.all(
providerInputs.host
.split(",")
.filter(Boolean)
.map((el) => verifyHostInputValidity(el).then((ip) => ip[0]))
);
validateHandlebarTemplate("Cassandra creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration", "keyspace"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Cassandra renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration", "keyspace"].includes(val)
});
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
validateHandlebarTemplate("Cassandra revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIps };
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@ -55,7 +40,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
},
keyspace: providerInputs.keyspace,
localDataCenter: providerInputs?.localDataCenter,
contactPoints: providerInputs.hostIps
contactPoints: providerInputs.host.split(",").filter(Boolean)
});
return client;
};

View File

@ -19,14 +19,15 @@ const generateUsername = () => {
export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
...(providerInputs.ca && {
ssl: {
rejectUnauthorized: false,

View File

@ -19,15 +19,15 @@ const generateUsername = () => {
export const MongoDBProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.hostIp}`
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
? `mongodb+srv://${providerInputs.host}`
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
const client = new MongoClient(uri, {
auth: {

View File

@ -3,6 +3,7 @@ import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { removeTrailingSlash } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
@ -78,13 +79,14 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const axiosInstance = axios.create({
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
auth: {
username: providerInputs.username,
password: providerInputs.password

View File

@ -5,7 +5,6 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
@ -52,28 +51,16 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("Redis creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Redis renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("Redis revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
let connection: Redis | null = null;
try {
connection = new Redis({
username: providerInputs.username,
host: providerInputs.hostIp,
host: providerInputs.host,
port: providerInputs.port,
password: providerInputs.password,
...(providerInputs.ca && {

View File

@ -5,7 +5,6 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
@ -28,25 +27,14 @@ export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("SAP ASE revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
useMaster?: boolean
) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.hostIp};` +
`SERVER=${providerInputs.host};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +
@ -95,7 +83,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
password
});
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
for await (const query of queries) {
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
@ -116,7 +104,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
username
});
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);

View File

@ -11,7 +11,6 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
@ -29,24 +28,13 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("SAP Hana renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const client = hdb.createClient({
host: providerInputs.hostIp,
host: providerInputs.host,
port: providerInputs.port,
user: providerInputs.username,
password: providerInputs.password,

View File

@ -5,7 +5,6 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
@ -32,18 +31,6 @@ const getDaysToExpiry = (expiryDate: Date) => {
export const SnowflakeProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
validateHandlebarTemplate("Snowflake creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Snowflake renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("Snowflake revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return providerInputs;
};

View File

@ -5,7 +5,6 @@ import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
@ -118,21 +117,8 @@ type TSqlDatabaseProviderDTO = {
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
validateHandlebarTemplate("SQL creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration", "database"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("SQL renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration", "database"].includes(val)
});
}
validateHandlebarTemplate("SQL revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username", "database"].includes(val)
});
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
@ -158,8 +144,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
}
: undefined
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
pool: { min: 0, max: 7 }
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
});
return db;
};
@ -193,7 +178,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";

View File

@ -3,7 +3,8 @@ import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@ -13,8 +14,7 @@ import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionGroupActions, OrgPermissionSubjects } from "../permission/org-permission";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TGroupDALFactory } from "./group-dal";
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "./group-fns";
@ -67,14 +67,14 @@ export const groupServiceFactory = ({
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Create, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
@ -87,26 +87,14 @@ export const groupServiceFactory = ({
actorOrgId
);
const isCustomRole = Boolean(customRole);
if (role !== OrgMembershipRole.NoAccess) {
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to create group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const group = await groupDAL.transaction(async (tx) => {
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
@ -145,15 +133,14 @@ export const groupServiceFactory = ({
}: TUpdateGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
@ -174,21 +161,11 @@ export const groupServiceFactory = ({
);
const isCustomRole = Boolean(customOrgRole);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups,
permission,
rolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (isCustomRole) customRole = customOrgRole;
@ -238,7 +215,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
@ -265,7 +242,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
const group = await groupDAL.findById(id);
if (!group) {
@ -298,7 +275,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
const group = await groupDAL.findOne({
orgId: actorOrgId,
@ -326,14 +303,14 @@ export const groupServiceFactory = ({
const addUserToGroup = async ({ id, username, actor, actorId, actorAuthMethod, actorOrgId }: TAddUserToGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
// check if group with slug exists
const group = await groupDAL.findOne({
@ -361,22 +338,11 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.AddMembers,
OrgPermissionSubjects.Groups,
permission,
groupRolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to add user to more privileged group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.AddMembers,
OrgPermissionSubjects.Groups
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to add user to more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -408,14 +374,14 @@ export const groupServiceFactory = ({
}: TRemoveUserFromGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
// check if group with slug exists
const group = await groupDAL.findOne({
@ -443,21 +409,11 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.RemoveMembers,
OrgPermissionSubjects.Groups,
permission,
groupRolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to delete user from more privileged group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.RemoveMembers,
OrgPermissionSubjects.Groups
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to delete user from more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});

View File

@ -2,17 +2,16 @@ import { ForbiddenError, subject } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionIdentityActions, ProjectPermissionSub } from "../permission/project-permission";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { TIdentityProjectAdditionalPrivilegeV2DALFactory } from "./identity-project-additional-privilege-v2-dal";
import {
IdentityProjectAdditionalPrivilegeTemporaryMode,
@ -65,10 +64,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
const { permission: targetIdentityPermission, membership } = await permissionService.getProjectPermission({
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
actor: ActorType.IDENTITY,
actorId: identityId,
projectId: identityProjectMembership.projectId,
@ -80,26 +79,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -164,10 +150,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const { permission: targetIdentityPermission, membership } = await permissionService.getProjectPermission({
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
actor: ActorType.IDENTITY,
actorId: identityProjectMembership.identityId,
projectId: identityProjectMembership.projectId,
@ -179,28 +165,14 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
if (data?.slug) {
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug: data.slug,
@ -255,7 +227,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
});
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -264,7 +236,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const { permission: identityRolePermission } = await permissionService.getProjectPermission({
@ -275,21 +247,11 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
identityRolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -325,7 +287,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
@ -360,7 +322,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
@ -396,7 +358,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);

View File

@ -2,21 +2,16 @@ import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability"
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import {
ProjectPermissionIdentityActions,
ProjectPermissionSet,
ProjectPermissionSub
} from "../permission/project-permission";
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
import { TIdentityProjectAdditionalPrivilegeDALFactory } from "./identity-project-additional-privilege-dal";
import {
IdentityProjectAdditionalPrivilegeTemporaryMode,
@ -68,7 +63,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -76,9 +71,8 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -94,21 +88,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -118,10 +102,6 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = JSON.stringify(packRules(customPermission));
if (!dto.isTemporary) {
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
@ -170,7 +150,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -180,7 +160,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -196,21 +176,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -233,9 +203,6 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
}
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
if (isTemporary) {
@ -288,7 +255,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -297,7 +264,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -309,21 +276,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
identityRolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to edit more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to edit more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -370,7 +327,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -414,7 +371,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);

View File

@ -4,9 +4,8 @@ import crypto, { KeyObject } from "crypto";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
import { isValidIp } from "@app/lib/ip";
import { isValidHostname, isValidIp } from "@app/lib/ip";
import { ms } from "@app/lib/ms";
import { isFQDN } from "@app/lib/validator/validate-url";
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
import {
@ -666,7 +665,7 @@ export const kmipServiceFactory = ({
.split(",")
.map((name) => name.trim())
.map((altName) => {
if (isFQDN(altName, { allow_wildcard: true })) {
if (isValidHostname(altName)) {
return {
type: "dns",
value: altName

View File

@ -97,14 +97,12 @@ export const searchGroups = async (
res.on("searchEntry", (entry) => {
const dn = entry.dn.toString();
const cnStartIndex = dn.indexOf("cn=");
const regex = /cn=([^,]+)/;
const match = dn.match(regex);
// parse the cn from the dn
const cn = (match && match[1]) as string;
if (cnStartIndex !== -1) {
const valueStartIndex = cnStartIndex + 3;
const commaIndex = dn.indexOf(",", valueStartIndex);
const cn = dn.substring(valueStartIndex, commaIndex === -1 ? undefined : commaIndex);
groups.push({ dn, cn });
}
groups.push({ dn, cn });
});
res.on("error", (error) => {
ldapClient.unbind();

View File

@ -1,24 +0,0 @@
export const BillingPlanRows = {
MemberLimit: { name: "Organization member limit", field: "memberLimit" },
IdentityLimit: { name: "Organization identity limit", field: "identityLimit" },
WorkspaceLimit: { name: "Project limit", field: "workspaceLimit" },
EnvironmentLimit: { name: "Environment limit", field: "environmentLimit" },
SecretVersioning: { name: "Secret versioning", field: "secretVersioning" },
PitRecovery: { name: "Point in time recovery", field: "pitRecovery" },
Rbac: { name: "RBAC", field: "rbac" },
CustomRateLimits: { name: "Custom rate limits", field: "customRateLimits" },
CustomAlerts: { name: "Custom alerts", field: "customAlerts" },
AuditLogs: { name: "Audit logs", field: "auditLogs" },
SamlSSO: { name: "SAML SSO", field: "samlSSO" },
Hsm: { name: "Hardware Security Module (HSM)", field: "hsm" },
OidcSSO: { name: "OIDC SSO", field: "oidcSSO" },
SecretApproval: { name: "Secret approvals", field: "secretApproval" },
SecretRotation: { name: "Secret rotation", field: "secretRotation" },
InstanceUserManagement: { name: "Instance User Management", field: "instanceUserManagement" },
ExternalKms: { name: "External KMS", field: "externalKms" }
} as const;
export const BillingPlanTableHead = {
Allowed: { name: "Allowed" },
Used: { name: "Used" }
} as const;

View File

@ -12,13 +12,10 @@ import { getConfig } from "@app/lib/config/env";
import { verifyOfflineLicense } from "@app/lib/crypto";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
import { TLicenseDALFactory } from "./license-dal";
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
import {
@ -31,7 +28,6 @@ import {
TFeatureSet,
TGetOrgBillInfoDTO,
TGetOrgTaxIdDTO,
TOfflineLicense,
TOfflineLicenseContents,
TOrgInvoiceDTO,
TOrgLicensesDTO,
@ -43,12 +39,10 @@ import {
} from "./license-types";
type TLicenseServiceFactoryDep = {
orgDAL: Pick<TOrgDALFactory, "findOrgById" | "countAllOrgMembers">;
orgDAL: Pick<TOrgDALFactory, "findOrgById">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseDAL: TLicenseDALFactory;
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem">;
identityOrgMembershipDAL: TIdentityOrgDALFactory;
projectDAL: TProjectDALFactory;
};
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
@ -63,14 +57,11 @@ export const licenseServiceFactory = ({
orgDAL,
permissionService,
licenseDAL,
keyStore,
identityOrgMembershipDAL,
projectDAL
keyStore
}: TLicenseServiceFactoryDep) => {
let isValidLicense = false;
let instanceType = InstanceType.OnPrem;
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
let selfHostedLicense: TOfflineLicense | null = null;
const appCfg = getConfig();
const licenseServerCloudApi = setupLicenseRequestWithStore(
@ -134,7 +125,6 @@ export const licenseServiceFactory = ({
instanceType = InstanceType.EnterpriseOnPremOffline;
logger.info(`Instance type: ${InstanceType.EnterpriseOnPremOffline}`);
isValidLicense = true;
selfHostedLicense = contents.license;
return;
}
}
@ -358,21 +348,10 @@ export const licenseServiceFactory = ({
message: `Organization with ID '${orgId}' not found`
});
}
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
);
return data;
}
return {
currentPeriodStart: selfHostedLicense?.issuedAt ? Date.parse(selfHostedLicense?.issuedAt) / 1000 : undefined,
currentPeriodEnd: selfHostedLicense?.expiresAt ? Date.parse(selfHostedLicense?.expiresAt) / 1000 : undefined,
interval: "month",
intervalCount: 1,
amount: 0,
quantity: 1
};
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
);
return data;
};
// returns org current plan feature table
@ -386,41 +365,10 @@ export const licenseServiceFactory = ({
message: `Organization with ID '${orgId}' not found`
});
}
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
);
return data;
}
const mappedRows = await Promise.all(
Object.values(BillingPlanRows).map(async ({ name, field }: { name: string; field: string }) => {
const allowed = onPremFeatures[field as keyof TFeatureSet];
let used = "-";
if (field === BillingPlanRows.MemberLimit.field) {
const orgMemberships = await orgDAL.countAllOrgMembers(orgId);
used = orgMemberships.toString();
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
const projects = await projectDAL.find({ orgId });
used = projects.length.toString();
} else if (field === BillingPlanRows.IdentityLimit.field) {
const identities = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
used = identities.toString();
}
return {
name,
allowed,
used
};
})
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
);
return {
head: Object.values(BillingPlanTableHead),
rows: mappedRows
};
return data;
};
const getOrgBillingDetails = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {

View File

@ -44,28 +44,6 @@ export enum OrgPermissionGatewayActions {
DeleteGateways = "delete-gateways"
}
export enum OrgPermissionIdentityActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges",
RevokeAuth = "revoke-auth",
CreateToken = "create-token",
GetToken = "get-token",
DeleteToken = "delete-token"
}
export enum OrgPermissionGroupActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges",
AddMembers = "add-members",
RemoveMembers = "remove-members"
}
export enum OrgPermissionSubjects {
Workspace = "workspace",
Role = "role",
@ -102,10 +80,10 @@ export type OrgPermissionSet =
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
| [OrgPermissionGroupActions, OrgPermissionSubjects.Groups]
| [OrgPermissionActions, OrgPermissionSubjects.Groups]
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
| [OrgPermissionIdentityActions, OrgPermissionSubjects.Identity]
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
@ -278,28 +256,20 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Ldap);
can(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Create, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.GrantPrivileges, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.AddMembers, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.RemoveMembers, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Billing);
can(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.GrantPrivileges, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.RevokeAuth, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.CreateToken, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.GetToken, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.DeleteToken, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Kms);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Kms);
@ -346,7 +316,7 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
can(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
@ -357,10 +327,10 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.SecretScanning);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.SecretScanning);
can(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);

View File

@ -49,7 +49,6 @@ export const permissionDALFactory = (db: TDbClient) => {
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
.select(
selectAllTableCols(TableName.OrgMembership),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization),
db.ref("slug").withSchema(TableName.OrgRoles).withSchema(TableName.OrgRoles).as("customRoleSlug"),
db.ref("permissions").withSchema(TableName.OrgRoles),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
@ -71,8 +70,7 @@ export const permissionDALFactory = (db: TDbClient) => {
OrgMembershipsSchema.extend({
permissions: z.unknown(),
orgAuthEnforced: z.boolean().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
shouldUseNewPrivilegeSystem: z.boolean()
customRoleSlug: z.string().optional().nullable()
}).parse(el),
childrenMapper: [
{
@ -120,9 +118,7 @@ export const permissionDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.IdentityOrgMembership))
.select(db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"))
.select("permissions")
.select(db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization))
.first();
return membership;
} catch (error) {
throw new DatabaseError({ error, name: "GetOrgIdentityPermission" });
@ -672,8 +668,7 @@ export const permissionDALFactory = (db: TDbClient) => {
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("orgId").withSchema(TableName.Project),
db.ref("type").withSchema(TableName.Project).as("projectType"),
db.ref("id").withSchema(TableName.Project).as("projectId"),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization)
db.ref("id").withSchema(TableName.Project).as("projectId")
);
const [userPermission] = sqlNestRelationships({
@ -689,8 +684,7 @@ export const permissionDALFactory = (db: TDbClient) => {
groupMembershipCreatedAt,
groupMembershipUpdatedAt,
membershipUpdatedAt,
projectType,
shouldUseNewPrivilegeSystem
projectType
}) => ({
orgId,
orgAuthEnforced,
@ -700,8 +694,7 @@ export const permissionDALFactory = (db: TDbClient) => {
projectType,
id: membershipId || groupMembershipId,
createdAt: membershipCreatedAt || groupMembershipCreatedAt,
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt,
shouldUseNewPrivilegeSystem
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt
}),
childrenMapper: [
{
@ -1002,7 +995,6 @@ export const permissionDALFactory = (db: TDbClient) => {
`${TableName.IdentityProjectMembership}.projectId`,
`${TableName.Project}.id`
)
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
.leftJoin(TableName.IdentityMetadata, (queryBuilder) => {
void queryBuilder
.on(`${TableName.Identity}.id`, `${TableName.IdentityMetadata}.identityId`)
@ -1020,7 +1012,6 @@ export const permissionDALFactory = (db: TDbClient) => {
db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership).as("membershipUpdatedAt"),
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
db.ref("permissions").withSchema(TableName.ProjectRoles),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization),
db.ref("id").withSchema(TableName.IdentityProjectAdditionalPrivilege).as("identityApId"),
db.ref("permissions").withSchema(TableName.IdentityProjectAdditionalPrivilege).as("identityApPermissions"),
db
@ -1054,8 +1045,7 @@ export const permissionDALFactory = (db: TDbClient) => {
membershipUpdatedAt,
orgId,
identityName,
projectType,
shouldUseNewPrivilegeSystem
projectType
}) => ({
id: membershipId,
identityId,
@ -1065,7 +1055,6 @@ export const permissionDALFactory = (db: TDbClient) => {
updatedAt: membershipUpdatedAt,
orgId,
projectType,
shouldUseNewPrivilegeSystem,
// just a prefilled value
orgAuthEnforced: false
}),

View File

@ -3,11 +3,9 @@ import { ForbiddenError, MongoAbility, PureAbility, subject } from "@casl/abilit
import { z } from "zod";
import { TOrganizations } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type";
import { OrgPermissionSet } from "./org-permission";
import {
ProjectPermissionSecretActions,
ProjectPermissionSet,
@ -147,57 +145,4 @@ const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) =
return new Proxy(obj, handler);
};
// This function serves as a transition layer between the old and new privilege management system
// the old privilege management system is based on the actor having more privileges than the managed permission
// the new privilege management system is based on the actor having the appropriate permission to perform the privilege change,
// regardless of the actor's privilege level.
const validatePrivilegeChangeOperation = (
shouldUseNewPrivilegeSystem: boolean,
opAction: OrgPermissionSet[0] | ProjectPermissionSet[0],
opSubject: OrgPermissionSet[1] | ProjectPermissionSet[1],
actorPermission: MongoAbility,
managedPermission: MongoAbility
) => {
if (shouldUseNewPrivilegeSystem) {
if (actorPermission.can(opAction, opSubject)) {
return {
isValid: true,
missingPermissions: []
};
}
return {
isValid: false,
missingPermissions: [
{
action: opAction,
subject: opSubject
}
]
};
}
// if not, we check if the actor is indeed more privileged than the managed permission - this is the old system
return validatePermissionBoundary(actorPermission, managedPermission);
};
const constructPermissionErrorMessage = (
baseMessage: string,
shouldUseNewPrivilegeSystem: boolean,
opAction: OrgPermissionSet[0] | ProjectPermissionSet[0],
opSubject: OrgPermissionSet[1] | ProjectPermissionSet[1]
) => {
return `${baseMessage}${
shouldUseNewPrivilegeSystem
? `. Actor is missing permission ${opAction as string} on ${opSubject as string}`
: ". Actor privilege level is not high enough to perform this action"
}`;
};
export {
constructPermissionErrorMessage,
escapeHandlebarsMissingDict,
isAuthMethodSaml,
validateOrgSSO,
validatePrivilegeChangeOperation
};
export { escapeHandlebarsMissingDict, isAuthMethodSaml, validateOrgSSO };

View File

@ -397,18 +397,14 @@ export const permissionServiceFactory = ({
const scopes = ServiceTokenScopes.parse(serviceToken.scopes || []);
return {
permission: buildServiceTokenProjectPermission(scopes, serviceToken.permissions),
membership: {
shouldUseNewPrivilegeSystem: true
}
membership: undefined
};
};
type TProjectPermissionRT<T extends ActorType> = T extends ActorType.SERVICE
? {
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
membership: {
shouldUseNewPrivilegeSystem: boolean;
};
membership: undefined;
hasRole: (arg: string) => boolean;
} // service token doesn't have both membership and roles
: {
@ -417,7 +413,6 @@ export const permissionServiceFactory = ({
orgAuthEnforced: boolean | null | undefined;
orgId: string;
roles: Array<{ role: string }>;
shouldUseNewPrivilegeSystem: boolean;
};
hasRole: (role: string) => boolean;
};

View File

@ -43,30 +43,6 @@ export enum ProjectPermissionDynamicSecretActions {
Lease = "lease"
}
export enum ProjectPermissionIdentityActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionMemberActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionGroupActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionSecretSyncActions {
Read = "read",
Create = "create",
@ -174,8 +150,8 @@ export type ProjectPermissionSet =
]
| [ProjectPermissionActions, ProjectPermissionSub.Role]
| [ProjectPermissionActions, ProjectPermissionSub.Tags]
| [ProjectPermissionMemberActions, ProjectPermissionSub.Member]
| [ProjectPermissionGroupActions, ProjectPermissionSub.Groups]
| [ProjectPermissionActions, ProjectPermissionSub.Member]
| [ProjectPermissionActions, ProjectPermissionSub.Groups]
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
@ -186,7 +162,7 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
| [
ProjectPermissionIdentityActions,
ProjectPermissionActions,
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
]
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
@ -314,13 +290,13 @@ const GeneralPermissionSchema = [
}),
z.object({
subject: z.literal(ProjectPermissionSub.Member).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionMemberActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Groups).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionGroupActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
)
}),
@ -534,7 +510,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
z.object({
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionIdentityActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
),
conditions: IdentityManagementConditionSchema.describe(
@ -555,9 +531,12 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SecretImports,
ProjectPermissionSub.SecretApproval,
ProjectPermissionSub.SecretRotation,
ProjectPermissionSub.Member,
ProjectPermissionSub.Groups,
ProjectPermissionSub.Role,
ProjectPermissionSub.Integrations,
ProjectPermissionSub.Webhooks,
ProjectPermissionSub.Identity,
ProjectPermissionSub.ServiceTokens,
ProjectPermissionSub.Settings,
ProjectPermissionSub.Environments,
@ -584,39 +563,6 @@ const buildAdminPermissionRules = () => {
);
});
can(
[
ProjectPermissionMemberActions.Create,
ProjectPermissionMemberActions.Edit,
ProjectPermissionMemberActions.Delete,
ProjectPermissionMemberActions.Read,
ProjectPermissionMemberActions.GrantPrivileges
],
ProjectPermissionSub.Member
);
can(
[
ProjectPermissionGroupActions.Create,
ProjectPermissionGroupActions.Edit,
ProjectPermissionGroupActions.Delete,
ProjectPermissionGroupActions.Read,
ProjectPermissionGroupActions.GrantPrivileges
],
ProjectPermissionSub.Groups
);
can(
[
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Delete,
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.GrantPrivileges
],
ProjectPermissionSub.Identity
);
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
@ -731,9 +677,9 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
can([ProjectPermissionMemberActions.Read, ProjectPermissionMemberActions.Create], ProjectPermissionSub.Member);
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.Member);
can([ProjectPermissionGroupActions.Read], ProjectPermissionSub.Groups);
can([ProjectPermissionActions.Read], ProjectPermissionSub.Groups);
can(
[
@ -757,10 +703,10 @@ const buildMemberPermissionRules = () => {
can(
[
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Delete
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Identity
);
@ -874,12 +820,12 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation);
can(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
can(ProjectPermissionIdentityActions.Read, ProjectPermissionSub.Identity);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);

View File

@ -2,20 +2,15 @@ import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import {
ProjectPermissionMemberActions,
ProjectPermissionSet,
ProjectPermissionSub
} from "../permission/project-permission";
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
import { TProjectUserAdditionalPrivilegeDALFactory } from "./project-user-additional-privilege-dal";
import {
ProjectUserAdditionalPrivilegeTemporaryMode,
@ -68,8 +63,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission, membership } = await permissionService.getProjectPermission({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
actor: ActorType.USER,
actorId: projectMembership.userId,
projectId: projectMembership.projectId,
@ -81,21 +76,11 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member,
permission,
targetUserPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged user",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged user",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -107,10 +92,6 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
if (existingSlug)
throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` });
validateHandlebarTemplate("User Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = JSON.stringify(packRules(customPermission));
if (!dto.isTemporary) {
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
@ -165,7 +146,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
message: `Project membership for user with ID '${userPrivilege.userId}' not found in project with ID '${userPrivilege.projectId}'`
});
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: projectMembership.projectId,
@ -173,7 +154,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
actor: ActorType.USER,
actorId: projectMembership.userId,
@ -186,21 +167,11 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member,
permission,
targetUserPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged user",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -214,10 +185,6 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` });
}
validateHandlebarTemplate("User Additional Privilege Update", JSON.stringify(dto.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary;
const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions));
@ -277,7 +244,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
const deletedPrivilege = await projectUserAdditionalPrivilegeDAL.deleteById(userPrivilege.id);
return {
@ -314,7 +281,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
return {
...userPrivilege,
@ -341,7 +308,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find(
{

View File

@ -29,9 +29,15 @@ export const parseScimFilter = (filterToParse: string | undefined) => {
attributeName = "name";
}
return { [attributeName]: parsedValue.replaceAll('"', "") };
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
export function extractScimValueFromPath(path: string): string | null {
const regex = /members\[value eq "([^"]+)"\]/;
const match = path.match(regex);
return match ? match[1] : null;
}
export const buildScimUser = ({
orgMembershipId,
username,

View File

@ -62,8 +62,7 @@ export const secretApprovalPolicyServiceFactory = ({
projectId,
secretPath,
environment,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
}: TCreateSapDTO) => {
const groupApprovers = approvers
?.filter((approver) => approver.type === ApproverType.Group)
@ -114,8 +113,7 @@ export const secretApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
},
tx
);
@ -174,8 +172,7 @@ export const secretApprovalPolicyServiceFactory = ({
actorAuthMethod,
approvals,
secretPolicyId,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
}: TUpdateSapDTO) => {
const groupApprovers = approvers
?.filter((approver) => approver.type === ApproverType.Group)
@ -221,8 +218,7 @@ export const secretApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
},
tx
);

View File

@ -10,7 +10,6 @@ export type TCreateSapDTO = {
projectId: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateSapDTO = {
@ -20,7 +19,6 @@ export type TUpdateSapDTO = {
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSapDTO = {

View File

@ -112,7 +112,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
);
@ -151,8 +150,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
envId: el.policyEnvId,
deletedAt: el.policyDeletedAt,
allowedSelfApprovals: el.policyAllowedSelfApprovals
deletedAt: el.policyDeletedAt
}
}),
childrenMapper: [
@ -338,7 +336,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"),
@ -367,8 +364,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
enforcementLevel: el.policyEnforcementLevel
},
committerUser: {
userId: el.committerUserId,
@ -486,7 +482,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
@ -516,8 +511,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
enforcementLevel: el.policyEnforcementLevel
},
committerUser: {
userId: el.committerUserId,

View File

@ -352,11 +352,6 @@ export const secretApprovalRequestServiceFactory = ({
message: "The policy associated with this secret approval request has been deleted."
});
}
if (!policy.allowedSelfApprovals && actorId === secretApprovalRequest.committerUserId) {
throw new BadRequestError({
message: "Failed to review secret approval request. Users are not authorized to review their own request."
});
}
const { hasRole } = await permissionService.getProjectPermission({
actor: ActorType.USER,

View File

@ -8,49 +8,23 @@ import axios from "axios";
import jmespath from "jmespath";
import knex from "knex";
import { getConfig } from "@app/lib/config/env";
import { getDbConnectionHost } from "@app/lib/knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
import { TAssignOp, TDbProviderClients, TDirectAssignOp, THttpProviderFunction } from "../templates/types";
import { TSecretRotationData, TSecretRotationDbFn } from "./secret-rotation-queue-types";
const REGEX = /\${([^}]+)}/g;
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const replaceTemplateVariables = (str: string, getValue: (key: string) => unknown) => {
// Use array to collect pieces and join at the end (more efficient for large strings)
const parts: string[] = [];
let pos = 0;
while (pos < str.length) {
const start = str.indexOf("${", pos);
if (start === -1) {
parts.push(str.slice(pos));
break;
}
parts.push(str.slice(pos, start));
const end = str.indexOf("}", start + 2);
if (end === -1) {
parts.push(str.slice(start));
break;
}
const varName = str.slice(start + 2, end);
parts.push(String(getValue(varName)));
pos = end + 1;
}
return parts.join("");
};
export const interpolate = (data: any, getValue: (key: string) => unknown) => {
if (!data) return;
if (typeof data === "number") return data;
if (typeof data === "string") {
return replaceTemplateVariables(data, getValue);
return data.replace(REGEX, (_a, b) => getValue(b) as string);
}
if (typeof data === "object" && Array.isArray(data)) {
@ -114,14 +88,32 @@ export const secretRotationDbFn = async ({
variables,
options
}: TSecretRotationDbFn) => {
const appCfg = getConfig();
const ssl = ca ? { rejectUnauthorized: false, ca } : undefined;
const [hostIp] = await verifyHostInputValidity(host);
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
if (
isCloud &&
// internal ips
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new Error("Invalid db host");
if (
host === "localhost" ||
host === "127.0.0.1" ||
// database infisical uses
dbHost === host
)
throw new Error("Invalid db host");
const db = knex({
client,
connection: {
database,
port,
host: hostIp,
host,
user: username,
password,
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,

View File

@ -8,18 +8,7 @@ type GetFullFolderPath = {
export const getFullFolderPath = async ({ folderDAL, folderId, envId }: GetFullFolderPath): Promise<string> => {
// Helper function to remove duplicate slashes
const removeDuplicateSlashes = (path: string) => {
const chars = [];
let lastWasSlash = false;
for (let i = 0; i < path.length; i += 1) {
const char = path[i];
if (char !== "/" || !lastWasSlash) chars.push(char);
lastWasSlash = char === "/";
}
return chars.join("");
};
const removeDuplicateSlashes = (path: string) => path.replace(/\/{2,}/g, "/");
// Fetch all folders at once based on environment ID to avoid multiple queries
const folders = await folderDAL.find({ envId });

View File

@ -1,34 +1,14 @@
import { isIP } from "net";
import { isFQDN } from "@app/lib/validator/validate-url";
// Validates usernames or wildcard (*)
export const isValidUserPattern = (value: string): boolean => {
// Length check before regex to prevent ReDoS
if (typeof value !== "string") return false;
if (value.length > 32) return false; // Maximum Linux username length
if (value === "*") return true; // Handle wildcard separately
// Simpler, more specific pattern for usernames
const userRegex = /^[a-z_][a-z0-9_-]*$/i;
// Matches valid Linux usernames or a wildcard (*)
const userRegex = /^(?:\*|[a-z_][a-z0-9_-]{0,31})$/;
return userRegex.test(value);
};
// Validates hostnames, wildcard domains, or IP addresses
export const isValidHostPattern = (value: string): boolean => {
// Input validation
if (typeof value !== "string") return false;
// Length check
if (value.length > 255) return false;
// Handle the wildcard case separately
if (value === "*") return true;
// Check for IP addresses using Node.js built-in functions
if (isIP(value)) return true;
return isFQDN(value, {
allow_wildcard: true
});
// Matches FQDNs, wildcard domains (*.example.com), IPv4, and IPv6 addresses
const hostRegex =
/^(?:\*|\*\.[a-z0-9-]+(?:\.[a-z0-9-]+)*|[a-z0-9-]+(?:\.[a-z0-9-]+)*|\d{1,3}(\.\d{1,3}){3}|([a-fA-F0-9:]+:+)+[a-fA-F0-9]+(?:%[a-zA-Z0-9]+)?)$/;
return hostRegex.test(value);
};

View File

@ -8,7 +8,6 @@ import { promisify } from "util";
import { TSshCertificateTemplates } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import {
@ -19,7 +18,6 @@ import { SshCertType, TCreateSshCertDTO } from "./ssh-certificate-authority-type
const execFileAsync = promisify(execFile);
const EXEC_TIMEOUT_MS = 10000; // 10 seconds
/* eslint-disable no-bitwise */
export const createSshCertSerialNumber = () => {
const randomBytes = crypto.randomBytes(8); // 8 bytes = 64 bits
@ -66,9 +64,7 @@ export const createSshKeyPair = async (keyAlgorithm: CertKeyAlgorithm) => {
// Generate the SSH key pair
// The "-N ''" sets an empty passphrase
// The keys are created in the temporary directory
await execFileAsync("ssh-keygen", ["-t", keyType, "-b", keyBits, "-f", privateKeyFile, "-N", ""], {
timeout: EXEC_TIMEOUT_MS
});
await execFileAsync("ssh-keygen", ["-t", keyType, "-b", keyBits, "-f", privateKeyFile, "-N", ""]);
// Read the generated keys
const publicKey = await fs.readFile(publicKeyFile, "utf8");
@ -91,10 +87,7 @@ export const getSshPublicKey = async (privateKey: string) => {
await fs.writeFile(privateKeyFile, privateKey, { mode: 0o600 });
// Run ssh-keygen to extract the public key
const { stdout } = await execFileAsync("ssh-keygen", ["-y", "-f", privateKeyFile], {
encoding: "utf8",
timeout: EXEC_TIMEOUT_MS
});
const { stdout } = await execFileAsync("ssh-keygen", ["-y", "-f", privateKeyFile], { encoding: "utf8" });
return stdout.trim();
} finally {
// Ensure that files and the temporary directory are cleaned up
@ -150,14 +143,7 @@ export const validateSshCertificatePrincipals = (
}
// restrict allowed characters to letters, digits, dot, underscore, and hyphen
if (
!characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Period,
CharacterType.Underscore,
CharacterType.Hyphen
])(sanitized)
) {
if (!/^[A-Za-z0-9._-]+$/.test(sanitized)) {
throw new BadRequestError({
message: `Principal '${sanitized}' contains invalid characters. Allowed: alphanumeric, '.', '_', '-'.`
});
@ -280,8 +266,8 @@ export const validateSshCertificateTtl = (template: TSshCertificateTemplates, tt
* that it only contains alphanumeric characters with no spaces.
*/
export const validateSshCertificateKeyId = (keyId: string) => {
const regex = characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen]);
if (!regex(keyId)) {
const regex = /^[A-Za-z0-9-]+$/;
if (!regex.test(keyId)) {
throw new BadRequestError({
message:
"Failed to validate Key ID because it can only contain alphanumeric characters and hyphens, with no spaces."
@ -312,7 +298,7 @@ const validateSshPublicKey = async (publicKey: string) => {
try {
await fs.writeFile(pubKeyFile, publicKey, { mode: 0o600 });
await execFileAsync("ssh-keygen", ["-l", "-f", pubKeyFile], { timeout: EXEC_TIMEOUT_MS });
await execFileAsync("ssh-keygen", ["-l", "-f", pubKeyFile]);
} catch (error) {
throw new BadRequestError({
message: "Failed to validate SSH public key format: could not be parsed."
@ -377,7 +363,7 @@ export const createSshCert = async ({
await fs.writeFile(privateKeyFile, caPrivateKey, { mode: 0o600 });
// Execute the signing process
await execFileAsync("ssh-keygen", sshKeygenArgs, { encoding: "utf8", timeout: EXEC_TIMEOUT_MS });
await execFileAsync("ssh-keygen", sshKeygenArgs, { encoding: "utf8" });
// Read the signed public key from the generated cert file
const signedPublicKey = await fs.readFile(signedPublicKeyFile, "utf8");

View File

@ -244,7 +244,7 @@ export const KUBERNETES_AUTH = {
kubernetesHost: "The host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
"The long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
allowedNamespaces:
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -260,7 +260,7 @@ export const KUBERNETES_AUTH = {
kubernetesHost: "The new host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
"The new long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
allowedNamespaces:
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -631,10 +631,7 @@ export const FOLDERS = {
workspaceId: "The ID of the project to list folders from.",
environment: "The slug of the environment to list folders from.",
path: "The path to list folders from.",
directory: "The directory to list folders from. (Deprecated in favor of path)",
recursive: "Whether or not to fetch all folders from the specified base path, and all of its subdirectories.",
lastSecretModified:
"The timestamp used to filter folders with secrets modified after the specified date. The format for this timestamp is ISO 8601 (e.g. 2025-04-01T09:41:45-04:00)"
directory: "The directory to list folders from. (Deprecated in favor of path)"
},
GET_BY_ID: {
folderId: "The ID of the folder to get details."
@ -818,8 +815,7 @@ export const DASHBOARD = {
search: "The text string to filter secret keys and folder names by.",
includeSecrets: "Whether to include project secrets in the response.",
includeFolders: "Whether to include project folders in the response.",
includeDynamicSecrets: "Whether to include dynamic project secrets in the response.",
includeImports: "Whether to include project secret imports in the response."
includeDynamicSecrets: "Whether to include dynamic project secrets in the response."
},
SECRET_DETAILS_LIST: {
projectId: "The ID of the project to list secrets/folders from.",
@ -842,13 +838,9 @@ export const AUDIT_LOGS = {
EXPORT: {
projectId:
"Optionally filter logs by project ID. If not provided, logs from the entire organization will be returned.",
environment:
"The environment to filter logs by. If not provided, logs from all environments will be returned. Note that the projectId parameter must also be provided.",
eventType: "The type of the event to export.",
secretPath:
"The path of the secret to query audit logs for. Note that the projectId parameter must also be provided.",
secretKey:
"The key of the secret to query audit logs for. Note that the projectId parameter must also be provided.",
userAgentType: "Choose which consuming application to export audit logs for.",
eventMetadata:
"Filter by event metadata key-value pairs. Formatted as `key1=value1,key2=value2`, with comma-separation.",

View File

@ -28,8 +28,8 @@ export const createDigestAuthRequestInterceptor = (
nc += 1;
const nonceCount = nc.toString(16).padStart(8, "0");
const cnonce = crypto.randomBytes(24).toString("hex");
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1]?.replaceAll('"', "") || "";
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1]?.replaceAll('"', "") || "";
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1].replace(/"/g, "");
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1].replace(/"/g, "");
const ha1 = crypto.createHash("md5").update(`${username}:${realm}:${password}`).digest("hex");
const path = opts.url;

View File

@ -1,35 +1,26 @@
type Base64Options = {
urlSafe?: boolean;
padding?: boolean;
};
const base64WithPadding = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{4})$/;
const base64WithoutPadding = /^[A-Za-z0-9+/]+$/;
const base64UrlWithPadding = /^(?:[A-Za-z0-9_-]{4})*(?:[A-Za-z0-9_-]{2}==|[A-Za-z0-9_-]{3}=|[A-Za-z0-9_-]{4})$/;
const base64UrlWithoutPadding = /^[A-Za-z0-9_-]+$/;
export const isBase64 = (str: string, options: Base64Options = {}): boolean => {
if (typeof str !== "string") {
throw new TypeError("Expected a string");
// Credit: https://github.com/miguelmota/is-base64
export const isBase64 = (
v: string,
opts = { allowEmpty: false, mimeRequired: false, allowMime: true, paddingRequired: false }
) => {
if (opts.allowEmpty === false && v === "") {
return false;
}
// Default padding to true unless urlSafe is true
const opts: Base64Options = {
urlSafe: false,
padding: options.urlSafe === undefined ? true : !options.urlSafe,
...options
};
let regex = "(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+/]{3}=)?";
const mimeRegex = "(data:\\w+\\/[a-zA-Z\\+\\-\\.]+;base64,)";
if (str === "") return true;
let regex;
if (opts.urlSafe) {
regex = opts.padding ? base64UrlWithPadding : base64UrlWithoutPadding;
} else {
regex = opts.padding ? base64WithPadding : base64WithoutPadding;
if (opts.mimeRequired === true) {
regex = mimeRegex + regex;
} else if (opts.allowMime === true) {
regex = `${mimeRegex}?${regex}`;
}
return (!opts.padding || str.length % 4 === 0) && regex.test(str);
if (opts.paddingRequired === false) {
regex = "(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}(==)?|[A-Za-z0-9+\\/]{3}=?)?";
}
return new RegExp(`^${regex}$`, "gi").test(v);
};
export const getBase64SizeInBytes = (base64String: string) => {

View File

@ -1,42 +0,0 @@
import { extractX509CertFromChain } from "./extract-certificate";
describe("Extract Certificate Payload", () => {
test("Single chain", () => {
const payload = `-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
-----END CERTIFICATE-----`;
const result = extractX509CertFromChain(payload);
expect(result).toBeDefined();
expect(result?.length).toBe(1);
expect(result?.[0]).toEqual(payload);
});
test("Multiple chain", () => {
const payload = `-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
-----END CERTIFICATE-----`;
const result = extractX509CertFromChain(payload);
expect(result).toBeDefined();
expect(result?.length).toBe(3);
expect(result).toEqual([
`-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
-----END CERTIFICATE-----`,
`-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
-----END CERTIFICATE-----`,
`-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
-----END CERTIFICATE-----`
]);
});
});

View File

@ -1,51 +0,0 @@
import { BadRequestError } from "../errors";
export const extractX509CertFromChain = (certificateChain: string): string[] => {
if (!certificateChain) {
throw new BadRequestError({
message: "Certificate chain is empty or undefined"
});
}
const certificates: string[] = [];
let currentPosition = 0;
const chainLength = certificateChain.length;
while (currentPosition < chainLength) {
// Find the start of a certificate
const beginMarker = "-----BEGIN CERTIFICATE-----";
const startIndex = certificateChain.indexOf(beginMarker, currentPosition);
if (startIndex === -1) {
break; // No more certificates found
}
// Find the end of the certificate
const endMarker = "-----END CERTIFICATE-----";
const endIndex = certificateChain.indexOf(endMarker, startIndex);
if (endIndex === -1) {
throw new BadRequestError({
message: "Malformed certificate chain: Found BEGIN marker without matching END marker"
});
}
// Extract the complete certificate including markers
const completeEndIndex = endIndex + endMarker.length;
const certificate = certificateChain.substring(startIndex, completeEndIndex);
// Add the extracted certificate to our results
certificates.push(certificate);
// Move position to after this certificate
currentPosition = completeEndIndex;
}
if (certificates.length === 0) {
throw new BadRequestError({
message: "No valid certificates found in the chain"
});
}
return certificates;
};

View File

@ -68,23 +68,6 @@ export class ForbiddenRequestError extends Error {
}
}
export class PermissionBoundaryError extends ForbiddenRequestError {
constructor({
message,
name,
error,
details
}: {
message?: string;
name?: string;
error?: unknown;
details?: unknown;
}) {
super({ message, name, error, details });
this.name = "PermissionBoundaryError";
}
}
export class BadRequestError extends Error {
name: string;

View File

@ -93,7 +93,6 @@ export const pingGatewayAndVerify = async ({
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
message: (err as Error)?.message,
error: err as Error
});
});

View File

@ -107,6 +107,12 @@ export const isValidIp = (ip: string) => {
return net.isIPv4(ip) || net.isIPv6(ip);
};
export const isValidHostname = (name: string) => {
const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
return hostnameRegex.test(name);
};
export type TIp = {
ipAddress: string;
type: IPType;

View File

@ -1,61 +0,0 @@
import { BlockList } from "node:net";
import { BadRequestError } from "../errors";
// Define BlockList instances for each range type
const ipv4RangeLists: Record<string, BlockList> = {
unspecified: new BlockList(),
broadcast: new BlockList(),
multicast: new BlockList(),
linkLocal: new BlockList(),
loopback: new BlockList(),
carrierGradeNat: new BlockList(),
private: new BlockList(),
reserved: new BlockList()
};
// Add IPv4 CIDR ranges to each BlockList
ipv4RangeLists.unspecified.addSubnet("0.0.0.0", 8);
ipv4RangeLists.broadcast.addAddress("255.255.255.255");
ipv4RangeLists.multicast.addSubnet("224.0.0.0", 4);
ipv4RangeLists.linkLocal.addSubnet("169.254.0.0", 16);
ipv4RangeLists.loopback.addSubnet("127.0.0.0", 8);
ipv4RangeLists.carrierGradeNat.addSubnet("100.64.0.0", 10);
// IPv4 Private ranges
ipv4RangeLists.private.addSubnet("10.0.0.0", 8);
ipv4RangeLists.private.addSubnet("172.16.0.0", 12);
ipv4RangeLists.private.addSubnet("192.168.0.0", 16);
// IPv4 Reserved ranges
ipv4RangeLists.reserved.addSubnet("192.0.0.0", 24);
ipv4RangeLists.reserved.addSubnet("192.0.2.0", 24);
ipv4RangeLists.reserved.addSubnet("192.88.99.0", 24);
ipv4RangeLists.reserved.addSubnet("198.18.0.0", 15);
ipv4RangeLists.reserved.addSubnet("198.51.100.0", 24);
ipv4RangeLists.reserved.addSubnet("203.0.113.0", 24);
ipv4RangeLists.reserved.addSubnet("240.0.0.0", 4);
/**
* Checks if an IP address (IPv4) is private or public
* inspired by: https://github.com/whitequark/ipaddr.js/blob/main/lib/ipaddr.js
*/
export const getIpRange = (ip: string): string => {
try {
const rangeLists = ipv4RangeLists;
// Check each range type
for (const rangeName in rangeLists) {
if (Object.hasOwn(rangeLists, rangeName)) {
if (rangeLists[rangeName].check(ip)) {
return rangeName;
}
}
}
// If no range matched, it's a public address
return "unicast";
} catch (error) {
throw new BadRequestError({ message: "Invalid IP address", error });
}
};
export const isPrivateIp = (ip: string) => getIpRange(ip) !== "unicast";

View File

@ -1,21 +0,0 @@
import handlebars from "handlebars";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
type SanitizationArg = {
allowedExpressions?: (arg: string) => boolean;
};
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
const parsedAst = handlebars.parse(template);
parsedAst.body.forEach((el) => {
if (el.type === "ContentStatement") return;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
}
logger.error(el, "Template sanitization failed");
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
});
};

View File

@ -1,11 +1,5 @@
import { CharacterType, characterValidator } from "./validate-string";
// regex to allow only alphanumeric, dash, underscore
export const isValidFolderName = characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Hyphen,
CharacterType.Underscore
]);
export const isValidFolderName = (name: string) => /^[a-zA-Z0-9-_]+$/.test(name);
export const isValidSecretPath = (path: string) =>
path

View File

@ -1,23 +0,0 @@
import { CharacterType, characterValidator } from "./validate-string";
describe("validate-string", () => {
test("Check alphabets", () => {
expect(characterValidator([CharacterType.Alphabets])("hello")).toBeTruthy();
expect(characterValidator([CharacterType.Alphabets])("hello world")).toBeFalsy();
expect(characterValidator([CharacterType.Alphabets, CharacterType.Spaces])("hello world")).toBeTruthy();
});
test("Check numbers", () => {
expect(characterValidator([CharacterType.Numbers])("1234567890")).toBeTruthy();
expect(characterValidator([CharacterType.AlphaNumeric])("helloWORLD1234567890")).toBeTruthy();
expect(characterValidator([CharacterType.AlphaNumeric])("helloWORLD1234567890-")).toBeFalsy();
});
test("Check special characters", () => {
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])("Hello-World")).toBeTruthy();
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Plus])("Hello+World")).toBeTruthy();
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Underscore])("Hello_World")).toBeTruthy();
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Colon])("Hello:World")).toBeTruthy();
expect(characterValidator([CharacterType.AlphaNumeric, CharacterType.Underscore])("Hello World")).toBeFalsy();
});
});

View File

@ -1,103 +0,0 @@
export enum CharacterType {
Alphabets = "alphabets",
Numbers = "numbers",
AlphaNumeric = "alpha-numeric",
Spaces = "spaces",
SpecialCharacters = "specialCharacters",
Punctuation = "punctuation",
Period = "period", // .
Underscore = "underscore", // _
Colon = "colon", // :
ForwardSlash = "forwardSlash", // /
Equals = "equals", // =
Plus = "plus", // +
Hyphen = "hyphen", // -
At = "at", // @
// Additional individual characters that might be useful
Asterisk = "asterisk", // *
Ampersand = "ampersand", // &
Question = "question", // ?
Hash = "hash", // #
Percent = "percent", // %
Dollar = "dollar", // $
Caret = "caret", // ^
Backtick = "backtick", // `
Pipe = "pipe", // |
Backslash = "backslash", // \
OpenParen = "openParen", // (
CloseParen = "closeParen", // )
OpenBracket = "openBracket", // [
CloseBracket = "closeBracket", // ]
OpenBrace = "openBrace", // {
CloseBrace = "closeBrace", // }
LessThan = "lessThan", // <
GreaterThan = "greaterThan", // >
SingleQuote = "singleQuote", // '
DoubleQuote = "doubleQuote", // "
Comma = "comma", // ,
Semicolon = "semicolon", // ;
Exclamation = "exclamation", // !
Fullstop = "fullStop" // .
}
/**
* Validates if a string contains only specific types of characters
*/
export const characterValidator = (allowedCharacters: CharacterType[]) => {
// Create a regex pattern based on allowed character types
const patternMap: Record<CharacterType, string> = {
[CharacterType.Alphabets]: "a-zA-Z",
[CharacterType.Numbers]: "0-9",
[CharacterType.AlphaNumeric]: "a-zA-Z0-9",
[CharacterType.Spaces]: "\\s",
[CharacterType.SpecialCharacters]: "!@#$%^&*()_+\\-=\\[\\]{}|;:'\",.<>/?\\\\",
[CharacterType.Punctuation]: "\\.\\,\\;\\:\\!\\?",
[CharacterType.Colon]: "\\:",
[CharacterType.ForwardSlash]: "\\/",
[CharacterType.Underscore]: "_",
[CharacterType.Hyphen]: "\\-",
[CharacterType.Period]: "\\.",
[CharacterType.Equals]: "=",
[CharacterType.Plus]: "\\+",
[CharacterType.At]: "@",
[CharacterType.Asterisk]: "\\*",
[CharacterType.Ampersand]: "&",
[CharacterType.Question]: "\\?",
[CharacterType.Hash]: "#",
[CharacterType.Percent]: "%",
[CharacterType.Dollar]: "\\$",
[CharacterType.Caret]: "\\^",
[CharacterType.Backtick]: "`",
[CharacterType.Pipe]: "\\|",
[CharacterType.Backslash]: "\\\\",
[CharacterType.OpenParen]: "\\(",
[CharacterType.CloseParen]: "\\)",
[CharacterType.OpenBracket]: "\\[",
[CharacterType.CloseBracket]: "\\]",
[CharacterType.OpenBrace]: "\\{",
[CharacterType.CloseBrace]: "\\}",
[CharacterType.LessThan]: "<",
[CharacterType.GreaterThan]: ">",
[CharacterType.SingleQuote]: "'",
[CharacterType.DoubleQuote]: '\\"',
[CharacterType.Comma]: ",",
[CharacterType.Semicolon]: ";",
[CharacterType.Exclamation]: "!",
[CharacterType.Fullstop]: "."
};
// Combine patterns from allowed characters
const combinedPattern = allowedCharacters.map((char) => patternMap[char]).join("");
// Create a regex that matches only the allowed characters
const regex = new RegExp(`^[${combinedPattern}]+$`);
/**
* Validates if the input string contains only the allowed character types
* @param input String to validate
* @returns Boolean indicating if the string is valid
*/
return function validate(input: string): boolean {
return regex.test(input);
};
};

View File

@ -1,15 +0,0 @@
import { isFQDN } from "./validate-url";
describe("isFQDN", () => {
test("Non wildcard", () => {
expect(isFQDN("www.example.com")).toBeTruthy();
});
test("Wildcard", () => {
expect(isFQDN("*.example.com", { allow_wildcard: true })).toBeTruthy();
});
test("Wildcard FQDN fails on option allow_wildcard false", () => {
expect(isFQDN("*.example.com")).toBeFalsy();
});
});

View File

@ -1,117 +1,18 @@
import dns from "node:dns/promises";
import { isIPv4 } from "net";
import { getConfig } from "../config/env";
import { BadRequestError } from "../errors";
import { isPrivateIp } from "../ip/ipRange";
export const blockLocalAndPrivateIpAddresses = async (url: string) => {
export const blockLocalAndPrivateIpAddresses = (url: string) => {
const validUrl = new URL(url);
const inputHostIps: string[] = [];
if (isIPv4(validUrl.host)) {
inputHostIps.push(validUrl.host);
} else {
if (validUrl.host === "localhost" || validUrl.host === "host.docker.internal") {
throw new BadRequestError({ message: "Local IPs not allowed as URL" });
}
const resolvedIps = await dns.resolve4(validUrl.host);
inputHostIps.push(...resolvedIps);
}
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
if (isInternalIp) throw new BadRequestError({ message: "Local IPs not allowed as URL" });
};
type FQDNOptions = {
require_tld?: boolean;
allow_underscores?: boolean;
allow_trailing_dot?: boolean;
allow_numeric_tld?: boolean;
allow_wildcard?: boolean;
ignore_max_length?: boolean;
};
const defaultFqdnOptions: FQDNOptions = {
require_tld: true,
allow_underscores: false,
allow_trailing_dot: false,
allow_numeric_tld: false,
allow_wildcard: false,
ignore_max_length: false
};
// credits: https://github.com/validatorjs/validator.js/blob/f5da7fb6ed59b94695e6fcb2e970c80029509919/src/lib/isFQDN.js#L13
export const isFQDN = (str: string, options: FQDNOptions = {}): boolean => {
if (typeof str !== "string") {
throw new TypeError("Expected a string");
}
// Apply default options
const opts: FQDNOptions = {
...defaultFqdnOptions,
...options
};
let testStr = str;
/* Remove the optional trailing dot before checking validity */
if (opts.allow_trailing_dot && str[str.length - 1] === ".") {
testStr = testStr.substring(0, str.length - 1);
}
/* Remove the optional wildcard before checking validity */
if (opts.allow_wildcard === true && str.indexOf("*.") === 0) {
testStr = testStr.substring(2);
}
const parts = testStr.split(".");
const tld = parts[parts.length - 1];
if (opts.require_tld) {
// disallow fqdns without tld
if (parts.length < 2) {
return false;
}
if (
!opts.allow_numeric_tld &&
!/^([a-z\u00A1-\u00A8\u00AA-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]{2,}|xn[a-z0-9-]{2,})$/i.test(tld)
) {
return false;
}
// disallow spaces
if (/\s/.test(tld)) {
return false;
}
}
// reject numeric TLDs
if (!opts.allow_numeric_tld && /^\d+$/.test(tld)) {
return false;
}
return parts.every((part) => {
if (part.length > 63 && !opts.ignore_max_length) {
return false;
}
if (!/^[a-z_\u00a1-\uffff0-9-]+$/i.test(part)) {
return false;
}
// disallow full-width chars
if (/[\uff01-\uff5e]/.test(part)) {
return false;
}
// disallow parts starting or ending with hyphen
if (/^-|-$/.test(part)) {
return false;
}
if (!opts.allow_underscores && /_/.test(part)) {
return false;
}
return true;
});
const appCfg = getConfig();
// on cloud local ips are not allowed
if (
appCfg.isCloud &&
(validUrl.host === "host.docker.internal" ||
validUrl.host.match(/^10\.\d+\.\d+\.\d+/) ||
validUrl.host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Local IPs not allowed as URL" });
if (validUrl.host === "localhost" || validUrl.host === "127.0.0.1")
throw new BadRequestError({ message: "Localhost not allowed" });
};

View File

@ -1,8 +1,6 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
interface SlugSchemaInputs {
min?: number;
max?: number;
@ -29,13 +27,4 @@ export const GenericResourceNameSchema = z
.trim()
.min(1, { message: "Name must be at least 1 character" })
.max(64, { message: "Name must be 64 or fewer characters" })
.refine(
(val) =>
characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Hyphen,
CharacterType.Underscore,
CharacterType.Spaces
])(val),
"Name can only contain alphanumeric characters, dashes, underscores, and spaces"
);
.regex(/^[a-zA-Z0-9\-_\s]+$/, "Name can only contain alphanumeric characters, dashes, underscores, and spaces");

View File

@ -13,7 +13,6 @@ import {
InternalServerError,
NotFoundError,
OidcAuthError,
PermissionBoundaryError,
RateLimitError,
ScimRequestError,
UnauthorizedError
@ -118,7 +117,7 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
conditions: el.conditions
}))
});
} else if (error instanceof ForbiddenRequestError || error instanceof PermissionBoundaryError) {
} else if (error instanceof ForbiddenRequestError) {
void res.status(HttpStatusCodes.Forbidden).send({
reqId: req.id,
statusCode: HttpStatusCodes.Forbidden,

View File

@ -413,14 +413,7 @@ export const registerRoutes = async (
serviceTokenDAL,
projectDAL
});
const licenseService = licenseServiceFactory({
permissionService,
orgDAL,
licenseDAL,
keyStore,
identityOrgMembershipDAL,
projectDAL
});
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
const hsmService = hsmServiceFactory({
hsmModule,
@ -662,7 +655,6 @@ export const registerRoutes = async (
});
const orgAdminService = orgAdminServiceFactory({
smtpService,
projectDAL,
permissionService,
projectUserMembershipRoleDAL,
@ -965,8 +957,7 @@ export const registerRoutes = async (
projectSlackConfigDAL,
slackIntegrationDAL,
projectTemplateService,
groupProjectDAL,
smtpService
groupProjectDAL
});
const projectEnvService = projectEnvServiceFactory({

View File

@ -70,19 +70,6 @@ export const DefaultResponseErrorsSchema = {
})
};
export const booleanSchema = z
.union([z.boolean(), z.string().trim()])
.transform((value) => {
if (typeof value === "string") {
// ie if not empty, 0 or false, return true
return Boolean(value) && Number(value) !== 0 && value.toLowerCase() !== "false";
}
return value;
})
.optional()
.default(true);
export const sapPubSchema = SecretApprovalPoliciesSchema.merge(
z.object({
environment: z.object({

View File

@ -6,7 +6,6 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
@ -15,7 +14,6 @@ import {
validateAltNamesField,
validateCaDateField
} from "@app/services/certificate-authority/certificate-authority-validators";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerCaRouter = async (server: FastifyZodProvider) => {
server.route({
@ -651,16 +649,6 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueCert,
distinctId: getTelemetryDistinctId(req),
properties: {
caId: ca.id,
commonName: req.body.commonName,
...req.auditLogInfo
}
});
return {
certificate,
certificateChain,
@ -719,7 +707,7 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca, commonName } =
const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca } =
await server.services.certificateAuthority.signCertFromCa({
isInternal: false,
caId: req.params.caId,
@ -743,16 +731,6 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignCert,
distinctId: getTelemetryDistinctId(req),
properties: {
caId: ca.id,
commonName,
...req.auditLogInfo
}
});
return {
certificate: certificate.toString("pem"),
certificateChain,

View File

@ -5,7 +5,6 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CERTIFICATE_AUTHORITIES, CERTIFICATES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertExtendedKeyUsage, CertKeyUsage, CrlReason } from "@app/services/certificate/certificate-types";
@ -13,7 +12,6 @@ import {
validateAltNamesField,
validateCaDateField
} from "@app/services/certificate-authority/certificate-authority-validators";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerCertRouter = async (server: FastifyZodProvider) => {
server.route({
@ -152,17 +150,6 @@ export const registerCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueCert,
distinctId: getTelemetryDistinctId(req),
properties: {
caId: req.body.caId,
certificateTemplateId: req.body.certificateTemplateId,
commonName: req.body.commonName,
...req.auditLogInfo
}
});
return {
certificate,
certificateChain,
@ -241,7 +228,7 @@ export const registerCertRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca, commonName } =
const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca } =
await server.services.certificateAuthority.signCertFromCa({
isInternal: false,
actor: req.permission.type,
@ -264,17 +251,6 @@ export const registerCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignCert,
distinctId: getTelemetryDistinctId(req),
properties: {
caId: req.body.caId,
certificateTemplateId: req.body.certificateTemplateId,
commonName,
...req.auditLogInfo
}
});
return {
certificate: certificate.toString("pem"),
certificateChain,

View File

@ -16,12 +16,7 @@ import { secretsLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { getUserAgentType } from "@app/server/plugins/audit-log";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import {
booleanSchema,
SanitizedDynamicSecretSchema,
SanitizedTagSchema,
secretRawSchema
} from "@app/server/routes/sanitizedSchemas";
import { SanitizedDynamicSecretSchema, SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
import { SecretsOrderBy } from "@app/services/secret/secret-types";
@ -29,6 +24,20 @@ import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
const MAX_DEEP_SEARCH_LIMIT = 500; // arbitrary limit to prevent excessive results
// handle querystring boolean values
const booleanSchema = z
.union([z.boolean(), z.string().trim()])
.transform((value) => {
if (typeof value === "string") {
// ie if not empty, 0 or false, return true
return Boolean(value) && Number(value) !== 0 && value.toLowerCase() !== "false";
}
return value;
})
.optional()
.default(true);
const parseSecretPathSearch = (search?: string) => {
if (!search)
return {
@ -100,7 +109,6 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
search: z.string().trim().describe(DASHBOARD.SECRET_OVERVIEW_LIST.search).optional(),
includeSecrets: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeSecrets),
includeFolders: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeFolders),
includeImports: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeImports),
includeDynamicSecrets: booleanSchema.describe(DASHBOARD.SECRET_OVERVIEW_LIST.includeDynamicSecrets)
}),
response: {
@ -116,17 +124,9 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
})
.array()
.optional(),
imports: SecretImportsSchema.omit({ importEnv: true })
.extend({
importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() }),
environment: z.string()
})
.array()
.optional(),
totalFolderCount: z.number().optional(),
totalDynamicSecretCount: z.number().optional(),
totalSecretCount: z.number().optional(),
totalImportCount: z.number().optional(),
totalCount: z.number()
})
}
@ -143,7 +143,6 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
orderDirection,
includeFolders,
includeSecrets,
includeImports,
includeDynamicSecrets
} = req.query;
@ -160,7 +159,6 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
let remainingLimit = limit;
let adjustedOffset = offset;
let imports: Awaited<ReturnType<typeof server.services.secretImport.getImportsMultiEnv>> | undefined;
let folders: Awaited<ReturnType<typeof server.services.folder.getFoldersMultiEnv>> | undefined;
let secrets: Awaited<ReturnType<typeof server.services.secret.getSecretsRawMultiEnv>> | undefined;
let dynamicSecrets:
@ -170,53 +168,6 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
let totalFolderCount: number | undefined;
let totalDynamicSecretCount: number | undefined;
let totalSecretCount: number | undefined;
let totalImportCount: number | undefined;
if (includeImports) {
totalImportCount = await server.services.secretImport.getProjectImportMultiEnvCount({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId,
environments,
path: secretPath,
search
});
if (remainingLimit > 0 && totalImportCount > adjustedOffset) {
imports = await server.services.secretImport.getImportsMultiEnv({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId,
environments,
path: secretPath,
search,
limit: remainingLimit,
offset: adjustedOffset
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_SECRET_IMPORTS,
metadata: {
environment: environments.join(","),
folderId: imports?.[0]?.folderId,
numberOfImports: imports.length
}
}
});
remainingLimit -= imports.length;
adjustedOffset = 0;
} else {
adjustedOffset = Math.max(0, adjustedOffset - totalImportCount);
}
}
if (includeFolders) {
// this is the unique count, ie duplicate folders across envs only count as 1
@ -394,13 +345,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
folders,
dynamicSecrets,
secrets,
imports,
totalFolderCount,
totalDynamicSecretCount,
totalImportCount,
totalSecretCount,
totalCount:
(totalFolderCount ?? 0) + (totalDynamicSecretCount ?? 0) + (totalSecretCount ?? 0) + (totalImportCount ?? 0)
totalCount: (totalFolderCount ?? 0) + (totalDynamicSecretCount ?? 0) + (totalSecretCount ?? 0)
};
}
});
@ -897,7 +845,6 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
projectId: z.string().trim(),
environment: z.string().trim(),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
recursive: booleanSchema.default(false),
filterByAction: z
.enum([ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSecretActions.ReadValue])
.default(ProjectPermissionSecretActions.ReadValue)
@ -916,7 +863,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { projectId, environment, secretPath, filterByAction, recursive } = req.query;
const { projectId, environment, secretPath, filterByAction } = req.query;
const { secrets } = await server.services.secret.getAccessibleSecrets({
actorId: req.permission.id,
@ -926,8 +873,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
environment,
secretPath,
projectId,
filterByAction,
recursive
filterByAction
});
return { secrets };

View File

@ -24,7 +24,7 @@ const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick(
allowedAudience: true
}).extend({
caCert: z.string(),
tokenReviewerJwt: z.string().optional().nullable()
tokenReviewerJwt: z.string()
});
export const registerIdentityKubernetesRouter = async (server: FastifyZodProvider) => {
@ -98,7 +98,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
.object({
kubernetesHost: z.string().trim().min(1).describe(KUBERNETES_AUTH.ATTACH.kubernetesHost),
caCert: z.string().trim().default("").describe(KUBERNETES_AUTH.ATTACH.caCert),
tokenReviewerJwt: z.string().trim().optional().describe(KUBERNETES_AUTH.ATTACH.tokenReviewerJwt),
tokenReviewerJwt: z.string().trim().min(1).describe(KUBERNETES_AUTH.ATTACH.tokenReviewerJwt),
allowedNamespaces: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNamespaces), // TODO: validation
allowedNames: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNames),
allowedAudience: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedAudience),
@ -195,7 +195,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
.object({
kubernetesHost: z.string().trim().min(1).optional().describe(KUBERNETES_AUTH.UPDATE.kubernetesHost),
caCert: z.string().trim().optional().describe(KUBERNETES_AUTH.UPDATE.caCert),
tokenReviewerJwt: z.string().trim().nullable().optional().describe(KUBERNETES_AUTH.UPDATE.tokenReviewerJwt),
tokenReviewerJwt: z.string().trim().min(1).optional().describe(KUBERNETES_AUTH.UPDATE.tokenReviewerJwt),
allowedNamespaces: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNamespaces), // TODO: validation
allowedNames: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNames),
allowedAudience: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedAudience),

View File

@ -32,8 +32,6 @@ const IdentityOidcAuthResponseSchema = IdentityOidcAuthsSchema.pick({
caCert: z.string()
});
const MAX_OIDC_CLAIM_SIZE = 32_768;
export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
@ -57,7 +55,7 @@ export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider)
}
},
handler: async (req) => {
const { identityOidcAuth, accessToken, identityAccessToken, identityMembershipOrg, oidcTokenData } =
const { identityOidcAuth, accessToken, identityAccessToken, identityMembershipOrg } =
await server.services.identityOidcAuth.login({
identityId: req.body.identityId,
jwt: req.body.jwt
@ -71,11 +69,7 @@ export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider)
metadata: {
identityId: identityOidcAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
identityOidcAuthId: identityOidcAuth.id,
oidcClaimsReceived:
Buffer.from(JSON.stringify(oidcTokenData), "utf8").byteLength < MAX_OIDC_CLAIM_SIZE
? oidcTokenData
: { payload: "Error: Payload exceeds 32KB, provided oidc claim not recorded in audit log." }
identityOidcAuthId: identityOidcAuth.id
}
}
});

View File

@ -4,6 +4,7 @@ import {
AuditLogsSchema,
GroupsSchema,
IncidentContactsSchema,
OrganizationsSchema,
OrgMembershipsSchema,
OrgRolesSchema,
UsersSchema
@ -56,7 +57,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
organization: sanitizedOrganizationSchema
organization: OrganizationsSchema
})
}
},
@ -111,14 +112,12 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
description: "Get all audit logs for an organization",
querystring: z.object({
projectId: z.string().optional().describe(AUDIT_LOGS.EXPORT.projectId),
environment: z.string().optional().describe(AUDIT_LOGS.EXPORT.environment),
actorType: z.nativeEnum(ActorType).optional(),
secretPath: z
.string()
.optional()
.transform((val) => (!val ? val : removeTrailingSlash(val)))
.describe(AUDIT_LOGS.EXPORT.secretPath),
secretKey: z.string().optional().describe(AUDIT_LOGS.EXPORT.secretKey),
// eventType is split with , for multiple values, we need to transform it to array
eventType: z
@ -264,7 +263,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
response: {
200: z.object({
message: z.string(),
organization: sanitizedOrganizationSchema
organization: OrganizationsSchema
})
}
},

View File

@ -8,17 +8,15 @@ import {
ProjectSlackConfigsSchema,
ProjectType,
SecretFoldersSchema,
SortDirection,
UserEncryptionKeysSchema,
UsersSchema
} from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PROJECTS } from "@app/lib/api-docs";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { ProjectFilterType, SearchProjectSortBy } from "@app/services/project/project-types";
import { AuthMode } from "@app/services/auth/auth-type";
import { ProjectFilterType } from "@app/services/project/project-types";
import { validateSlackChannelsField } from "@app/services/slack/slack-auth-validators";
import { integrationAuthPubSchema, SanitizedProjectSchema } from "../sanitizedSchemas";
@ -706,107 +704,4 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
return environmentsFolders;
}
});
server.route({
method: "POST",
url: "/search",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
limit: z.number().default(100),
offset: z.number().default(0),
type: z.nativeEnum(ProjectType).optional(),
orderBy: z.nativeEnum(SearchProjectSortBy).optional().default(SearchProjectSortBy.NAME),
orderDirection: z.nativeEnum(SortDirection).optional().default(SortDirection.ASC),
name: z
.string()
.trim()
.refine((val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val), {
message: "Invalid pattern: only alphanumeric characters, - are allowed."
})
.optional()
}),
response: {
200: z.object({
projects: SanitizedProjectSchema.extend({ isMember: z.boolean() }).array(),
totalCount: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { docs: projects, totalCount } = await server.services.project.searchProjects({
permission: req.permission,
...req.body
});
return { projects, totalCount };
}
});
server.route({
method: "POST",
url: "/:workspaceId/project-access",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
workspaceId: z.string().trim()
}),
body: z.object({
comment: z
.string()
.trim()
.max(2500)
.refine(
(val) =>
characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Hyphen,
CharacterType.Comma,
CharacterType.Fullstop,
CharacterType.Spaces,
CharacterType.Exclamation
])(val),
{
message: "Invalid pattern: only alphanumeric characters, spaces, -.!, are allowed."
}
)
.optional()
}),
response: {
200: z.object({
message: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
await server.services.project.requestProjectAccess({
permission: req.permission,
comment: req.body.comment,
projectId: req.params.workspaceId
});
if (req.auth.actor === ActorType.USER) {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.params.workspaceId,
event: {
type: EventType.PROJECT_ACCESS_REQUEST,
metadata: {
projectId: req.params.workspaceId,
requesterEmail: req.auth.user.email || req.auth.user.username,
requesterId: req.auth.userId
}
}
});
}
return { message: "Project access request has been send to project admins" };
}
});
};

View File

@ -9,8 +9,6 @@ import { readLimit, secretsLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { booleanSchema } from "../sanitizedSchemas";
export const registerSecretFolderRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/",
@ -335,7 +333,6 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
querystring: z.object({
workspaceId: z.string().trim().describe(FOLDERS.LIST.workspaceId),
environment: z.string().trim().describe(FOLDERS.LIST.environment),
lastSecretModified: z.string().datetime().trim().optional().describe(FOLDERS.LIST.lastSecretModified),
path: z
.string()
.trim()
@ -350,14 +347,11 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
.default("/")
.transform(prefixWithSlash)
.transform(removeTrailingSlash)
.describe(FOLDERS.LIST.directory),
recursive: booleanSchema.default(false).describe(FOLDERS.LIST.recursive)
.describe(FOLDERS.LIST.directory)
}),
response: {
200: z.object({
folders: SecretFoldersSchema.extend({
relativePath: z.string().optional()
}).array()
folders: SecretFoldersSchema.array()
})
}
},

View File

@ -1,6 +1,7 @@
import { z } from "zod";
import {
OrganizationsSchema,
OrgMembershipsSchema,
ProjectMembershipsSchema,
ProjectsSchema,
@ -14,7 +15,6 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { GenericResourceNameSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { sanitizedOrganizationSchema } from "@app/services/org/org-schema";
export const registerOrgRouter = async (server: FastifyZodProvider) => {
server.route({
@ -335,7 +335,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
organization: sanitizedOrganizationSchema
organization: OrganizationsSchema
})
}
},
@ -365,7 +365,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
organization: sanitizedOrganizationSchema,
organization: OrganizationsSchema,
accessToken: z.string()
})
}
@ -396,30 +396,4 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
return { organization, accessToken: tokens.accessToken };
}
});
server.route({
method: "POST",
url: "/privilege-system-upgrade",
config: {
rateLimit: writeLimit
},
schema: {
response: {
200: z.object({
organization: sanitizedOrganizationSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const organization = await server.services.org.upgradePrivilegeSystem({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.permission.orgId
});
return { organization };
}
});
};

View File

@ -7,11 +7,9 @@ import { z } from "zod";
import { ActionProjectType, ProjectType, TCertificateAuthorities, TCertificateTemplates } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { isFQDN } from "@app/lib/validator/validate-url";
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
@ -60,6 +58,7 @@ import {
TSignIntermediateDTO,
TUpdateCaDTO
} from "./certificate-authority-types";
import { hostnameRegex } from "./certificate-authority-validators";
type TCertificateAuthorityServiceFactoryDep = {
certificateAuthorityDAL: Pick<
@ -1018,7 +1017,9 @@ export const certificateAuthorityServiceFactory = ({
const maxPathLength = certObj.getExtension(x509.BasicConstraintsExtension)?.pathLength;
// validate imported certificate and certificate chain
const certificates = extractX509CertFromChain(certificateChain)?.map((cert) => new x509.X509Certificate(cert));
const certificates = certificateChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => new x509.X509Certificate(cert));
if (!certificates) throw new BadRequestError({ message: "Failed to parse certificate chain" });
@ -1324,7 +1325,7 @@ export const certificateAuthorityServiceFactory = ({
}
// check if the altName is a valid hostname
if (isFQDN(altName, { allow_wildcard: true })) {
if (hostnameRegex.test(altName)) {
return {
type: "dns",
value: altName
@ -1701,7 +1702,7 @@ export const certificateAuthorityServiceFactory = ({
}
// check if the altName is a valid hostname
if (isFQDN(altName, { allow_wildcard: true })) {
if (hostnameRegex.test(altName)) {
return {
type: "dns",
value: altName
@ -1818,8 +1819,7 @@ export const certificateAuthorityServiceFactory = ({
certificateChain: `${issuingCaCertificate}\n${caCertChain}`.trim(),
issuingCaCertificate,
serialNumber,
ca,
commonName: cn
ca
};
};

View File

@ -1,7 +1,6 @@
import { z } from "zod";
import { isValidIp } from "@app/lib/ip";
import { isFQDN } from "@app/lib/validator/validate-url";
const isValidDate = (dateString: string) => {
const date = new Date(dateString);
@ -10,6 +9,7 @@ const isValidDate = (dateString: string) => {
export const validateCaDateField = z.string().trim().refine(isValidDate, { message: "Invalid date format" });
export const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
export const validateAltNamesField = z
.string()
.trim()
@ -27,7 +27,7 @@ export const validateAltNamesField = z
if (data === "") return true;
// Split and validate each alt name
return data.split(", ").every((name) => {
return isFQDN(name, { allow_wildcard: true }) || z.string().email().safeParse(name).success || isValidIp(name);
return hostnameRegex.test(name) || z.string().email().safeParse(name).success || isValidIp(name);
});
},
{

View File

@ -11,7 +11,6 @@ export const validateCertificateDetailsAgainstTemplate = (
},
template: TCertificateTemplates
) => {
// these are validated in router using validateTemplateRegexField
const commonNameRegex = new RegExp(template.commonName);
if (!commonNameRegex.test(cert.commonName)) {
throw new BadRequestError({

View File

@ -6,7 +6,6 @@ import { ActionProjectType, TCertificateTemplateEstConfigsUpdate } from "@app/db
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
@ -282,7 +281,9 @@ export const certificateTemplateServiceFactory = ({
});
// validate CA chain
const certificates = extractX509CertFromChain(caChain)?.map((cert) => new x509.X509Certificate(cert));
const certificates = caChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => new x509.X509Certificate(cert));
if (!certificates) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });
@ -378,7 +379,9 @@ export const certificateTemplateServiceFactory = ({
};
if (caChain) {
const certificates = extractX509CertFromChain(caChain)?.map((cert) => new x509.X509Certificate(cert));
const certificates = caChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => new x509.X509Certificate(cert));
if (!certificates) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });

View File

@ -1,27 +1,13 @@
import safe from "safe-regex";
import z from "zod";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
export const validateTemplateRegexField = z
.string()
.min(1)
.max(100)
.refine(
(val) =>
characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Spaces, // (space)
CharacterType.Asterisk, // *
CharacterType.At, // @
CharacterType.Hyphen, // -
CharacterType.Period, // .
CharacterType.Backslash // \
])(val),
{
message: "Invalid pattern: only alphanumeric characters, spaces, *, ., @, -, and \\ are allowed."
}
)
.regex(/^[a-zA-Z0-9 *@\-\\.\\]+$/, {
message: "Invalid pattern: only alphanumeric characters, spaces, *, ., @, -, and \\ are allowed."
})
// we ensure that the inputted pattern is computationally safe by limiting star height to 1
.refine((v) => safe(v), {
message: "Unsafe REGEX pattern"

View File

@ -1,15 +1,12 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType, ProjectMembershipRole, SecretKeyEncoding, TGroups } from "@app/db/schemas";
import {
constructPermissionErrorMessage,
validatePrivilegeChangeOperation
} from "@app/ee/services/permission/permission-fns";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionGroupActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { isUuidV4 } from "@app/lib/validator";
@ -73,7 +70,7 @@ export const groupProjectServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` });
if (project.version < 2) throw new BadRequestError({ message: `Failed to add group to E2EE project` });
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
@ -81,7 +78,7 @@ export const groupProjectServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Create, ProjectPermissionSub.Groups);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Groups);
let group: TGroups | null = null;
if (isUuidV4(groupIdOrName)) {
@ -105,21 +102,11 @@ export const groupProjectServiceFactory = ({
project.id
);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionGroupActions.GrantPrivileges,
ProjectPermissionSub.Groups,
permission,
rolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to assign group to role",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionGroupActions.GrantPrivileges,
ProjectPermissionSub.Groups
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to assign group to a more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
@ -261,7 +248,7 @@ export const groupProjectServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` });
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
@ -269,7 +256,7 @@ export const groupProjectServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Edit, ProjectPermissionSub.Groups);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Groups);
const group = await groupDAL.findOne({ orgId: actorOrgId, id: groupId });
if (!group) throw new NotFoundError({ message: `Failed to find group with ID ${groupId}` });
@ -282,21 +269,11 @@ export const groupProjectServiceFactory = ({
requestedRoleChange,
project.id
);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionGroupActions.GrantPrivileges,
ProjectPermissionSub.Groups,
permission,
rolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to assign group to role",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionGroupActions.GrantPrivileges,
ProjectPermissionSub.Groups
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to assign group to a more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
@ -383,7 +360,7 @@ export const groupProjectServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Delete, ProjectPermissionSub.Groups);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Groups);
const deletedProjectGroup = await groupProjectDAL.transaction(async (tx) => {
const groupMembers = await userGroupMembershipDAL.findGroupMembersNotInProject(group.id, project.id, tx);
@ -428,7 +405,7 @@ export const groupProjectServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
const groupMemberships = await groupProjectDAL.findByProjectId(project.id);
return groupMemberships;
@ -456,7 +433,7 @@ export const groupProjectServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
const [groupMembership] = await groupProjectDAL.findByProjectId(project.id, {
groupId

View File

@ -5,14 +5,11 @@ import jwt from "jsonwebtoken";
import { IdentityAuthMethod } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionIdentityActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import {
constructPermissionErrorMessage,
validatePrivilegeChangeOperation
} from "@app/ee/services/permission/permission-fns";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
import { ActorType, AuthTokenType } from "../auth/auth-type";
@ -42,31 +39,6 @@ type TIdentityAwsAuthServiceFactoryDep = {
export type TIdentityAwsAuthServiceFactory = ReturnType<typeof identityAwsAuthServiceFactory>;
const awsRegionFromHeader = (authorizationHeader: string): string | null => {
// https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-auth-using-authorization-header.html
// The Authorization header takes the following form.
// Authorization: AWS4-HMAC-SHA256
// Credential=AKIAIOSFODNN7EXAMPLE/20230719/us-east-1/sts/aws4_request,
// SignedHeaders=content-length;content-type;host;x-amz-date,
// Signature=fe5f80f77d5fa3beca038a248ff027d0445342fe2855ddc963176630326f1024
//
// The credential is in the form of "<your-access-key-id>/<date>/<aws-region>/<aws-service>/aws4_request"
try {
const fields = authorizationHeader.split(" ");
for (const field of fields) {
if (field.startsWith("Credential=")) {
const parts = field.split("/");
if (parts.length >= 3) {
return parts[2];
}
}
}
} catch {
return null;
}
return null;
};
export const identityAwsAuthServiceFactory = ({
identityAccessTokenDAL,
identityAwsAuthDAL,
@ -85,9 +57,6 @@ export const identityAwsAuthServiceFactory = ({
const headers: TAwsGetCallerIdentityHeaders = JSON.parse(Buffer.from(iamRequestHeaders, "base64").toString());
const body: string = Buffer.from(iamRequestBody, "base64").toString();
const region = headers.Authorization ? awsRegionFromHeader(headers.Authorization) : null;
const url = region ? `https://sts.${region}.amazonaws.com` : identityAwsAuth.stsEndpoint;
const {
data: {
GetCallerIdentityResponse: {
@ -96,7 +65,7 @@ export const identityAwsAuthServiceFactory = ({
}
}: { data: TGetCallerIdentityResponse } = await axios({
method: iamHttpRequestMethod,
url,
url: headers?.Host ? `https://${headers.Host}` : identityAwsAuth.stsEndpoint,
headers,
data: body
});
@ -124,8 +93,7 @@ export const identityAwsAuthServiceFactory = ({
.some((principalArn) => {
// convert wildcard ARN to a regular expression: "arn:aws:iam::123456789012:*" -> "^arn:aws:iam::123456789012:.*$"
// considers exact matches + wildcard matches
// heavily validated in router
const regex = new RegExp(`^${principalArn.replaceAll("*", ".*")}$`);
const regex = new RegExp(`^${principalArn.replace(/\*/g, ".*")}$`);
return regex.test(extractPrincipalArn(Arn));
});
@ -207,7 +175,7 @@ export const identityAwsAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
@ -286,7 +254,7 @@ export const identityAwsAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => {
@ -340,7 +308,7 @@ export const identityAwsAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
return { ...awsIdentityAuth, orgId: identityMembershipOrg.orgId };
};
@ -358,14 +326,14 @@ export const identityAwsAuthServiceFactory = ({
message: "The identity does not have aws auth"
});
}
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
const { permission: rolePermission } = await permissionService.getOrgPermission(
ActorType.IDENTITY,
@ -375,22 +343,11 @@ export const identityAwsAuthServiceFactory = ({
actorOrgId
);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.RevokeAuth,
OrgPermissionSubjects.Identity,
permission,
rolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to revoke aws auth of identity with more privileged role",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.RevokeAuth,
OrgPermissionSubjects.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to revoke aws auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});

Some files were not shown because too many files have changed in this diff Show More