mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-02 16:55:02 +00:00
Compare commits
1 Commits
misc/expor
...
daniel/add
Author | SHA1 | Date | |
---|---|---|---|
d38243a1c6 |
@ -36,22 +36,16 @@ CLIENT_ID_HEROKU=
|
||||
CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITHUB_APP=
|
||||
CLIENT_SLUG_GITHUB_APP=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITHUB_APP=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
CLIENT_PRIVATE_KEY_GITHUB_APP=
|
||||
CLIENT_APP_ID_GITHUB_APP=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
SENTRY_DSN=
|
||||
|
||||
|
@ -1,2 +1 @@
|
||||
DB_CONNECTION_URI=
|
||||
AUDIT_LOGS_DB_CONNECTION_URI=
|
||||
|
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@ -6,7 +6,6 @@
|
||||
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Improvement
|
||||
- [ ] Breaking change
|
||||
- [ ] Documentation
|
||||
|
||||
|
91
.github/workflows/build-binaries.yml
vendored
91
.github/workflows/build-binaries.yml
vendored
@ -7,6 +7,7 @@ on:
|
||||
description: "Version number"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./backend
|
||||
@ -48,9 +49,9 @@ jobs:
|
||||
- name: Package into node binary
|
||||
run: |
|
||||
if [ "${{ matrix.os }}" != "linux" ]; then
|
||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
||||
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
||||
else
|
||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
||||
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
||||
fi
|
||||
|
||||
# Set up .deb package structure (Debian/Ubuntu only)
|
||||
@ -82,86 +83,6 @@ jobs:
|
||||
dpkg-deb --build infisical-core
|
||||
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
|
||||
|
||||
### RPM
|
||||
|
||||
# Set up .rpm package structure
|
||||
- name: Set up .rpm package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core-rpm/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
|
||||
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
|
||||
|
||||
# Install RPM build tools
|
||||
- name: Install RPM build tools
|
||||
if: matrix.os == 'linux'
|
||||
run: sudo apt-get update && sudo apt-get install -y rpm
|
||||
|
||||
# Create .spec file for RPM
|
||||
- name: Create .spec file for RPM
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core.spec
|
||||
|
||||
%global _enable_debug_package 0
|
||||
%global debug_package %{nil}
|
||||
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
|
||||
|
||||
Name: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Release: 1%{?dist}
|
||||
Summary: Infisical Core standalone executable
|
||||
License: Proprietary
|
||||
URL: https://app.infisical.com
|
||||
|
||||
%description
|
||||
Infisical Core standalone executable (app.infisical.com)
|
||||
|
||||
%install
|
||||
mkdir -p %{buildroot}/usr/local/bin
|
||||
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
|
||||
|
||||
%files
|
||||
/usr/local/bin/infisical-core
|
||||
|
||||
%pre
|
||||
|
||||
%post
|
||||
|
||||
%preun
|
||||
|
||||
%postun
|
||||
EOF
|
||||
|
||||
# Build .rpm file
|
||||
- name: Build .rpm package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
# Create necessary directories
|
||||
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
|
||||
# Copy the binary directly to SOURCES
|
||||
cp ./binary/infisical-core rpmbuild/SOURCES/
|
||||
|
||||
# Run rpmbuild with verbose output
|
||||
rpmbuild -vv -bb \
|
||||
--define "_topdir $(pwd)/rpmbuild" \
|
||||
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
|
||||
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
|
||||
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
|
||||
infisical-core.spec
|
||||
|
||||
# Try to find the RPM file
|
||||
find rpmbuild -name "*.rpm"
|
||||
|
||||
# Move the RPM file if found
|
||||
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
|
||||
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
|
||||
else
|
||||
echo "RPM file not found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x" # Specify the Python version you need
|
||||
@ -176,12 +97,6 @@ jobs:
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
|
||||
|
||||
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
|
||||
- name: Publish .rpm to Cloudsmith
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
|
||||
|
||||
# Publish .exe file to Cloudsmith (Windows only)
|
||||
- name: Publish to Cloudsmith (Windows)
|
||||
if: matrix.os == 'win'
|
||||
|
@ -6,15 +6,9 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
@ -127,7 +121,6 @@ jobs:
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -63,7 +63,6 @@ yarn-error.log*
|
||||
|
||||
# Editor specific
|
||||
.vscode/*
|
||||
.idea/*
|
||||
|
||||
frontend-build
|
||||
|
||||
|
@ -95,10 +95,6 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
|
@ -123,7 +123,7 @@ describe("Project Environment Router", async () => {
|
||||
id: deletedProjectEnvironment.id,
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
position: 5,
|
||||
position: 4,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
|
@ -1,36 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [{id:seedData1.id, type: ApproverType.User}],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@ -1,61 +1,73 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath,
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: importEnv,
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(payload.id);
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
@ -77,60 +89,25 @@ describe("Secret Import Router", async () => {
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
@ -184,55 +161,22 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@ -257,552 +201,6 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
@ -1,406 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
@ -510,7 +510,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
@ -532,7 +532,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
@ -557,7 +557,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(writeSecrets.statusCode).toBe(403);
|
||||
expect(writeSecrets.statusCode).toBe(401);
|
||||
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
||||
|
||||
// but read access should still work fine
|
||||
|
@ -1,330 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@ -8,7 +8,6 @@ type TRawSecret = {
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
|
@ -1075,7 +1075,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(404);
|
||||
expect(createSecRes.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test("Update secret raw", async () => {
|
||||
@ -1093,7 +1093,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(404);
|
||||
expect(updateSecRes.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test("Delete secret raw", async () => {
|
||||
@ -1110,6 +1110,6 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: deletedSecretReqBody
|
||||
});
|
||||
expect(deletedSecRes.statusCode).toBe(404);
|
||||
expect(deletedSecRes.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
@ -1,73 +0,0 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
@ -1,93 +0,0 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
@ -1,128 +0,0 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@ -11,11 +11,10 @@ import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -29,31 +28,19 @@ export default {
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -71,12 +58,10 @@ export default {
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
@ -95,9 +80,6 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
1817
backend/package-lock.json
generated
1817
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -45,20 +45,12 @@
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
@ -87,7 +79,6 @@
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
"@types/sjcl": "^1.0.34",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
@ -111,6 +102,7 @@
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
@ -120,20 +112,17 @@
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
@ -141,8 +130,6 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@team-plain/typescript-sdk": "^4.6.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
@ -168,10 +155,8 @@
|
||||
"jwks-rsa": "^3.1.0",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"ldif": "0.5.1",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
@ -189,11 +174,8 @@
|
||||
"pino": "^8.16.2",
|
||||
"pkijs": "^3.2.4",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.3.8",
|
||||
"probot": "^13.0.0",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"sjcl": "^1.0.8",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
|
@ -90,12 +90,7 @@ const main = async () => {
|
||||
.whereRaw("table_schema = current_schema()")
|
||||
.select<{ tableName: string }[]>("table_name as tableName")
|
||||
.orderBy("table_name")
|
||||
).filter(
|
||||
(el) =>
|
||||
!el.tableName.includes("_migrations") &&
|
||||
!el.tableName.includes("audit_logs_") &&
|
||||
el.tableName !== "intermediate_audit_logs"
|
||||
);
|
||||
).filter((el) => !el.tableName.includes("_migrations"));
|
||||
|
||||
for (let i = 0; i < tables.length; i += 1) {
|
||||
const { tableName } = tables[i];
|
||||
|
@ -1,84 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import promptSync from "prompt-sync";
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import { existsSync } from "fs";
|
||||
|
||||
const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
};
|
||||
|
||||
const main = () => {
|
||||
const action = prompt(
|
||||
"Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: "
|
||||
);
|
||||
if (action === "1") {
|
||||
exportDb();
|
||||
} else if (action === "2") {
|
||||
importDbForOrg();
|
||||
} else {
|
||||
console.log("Invalid action");
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
10
backend/src/@types/fastify.d.ts
vendored
10
backend/src/@types/fastify.d.ts
vendored
@ -38,9 +38,6 @@ import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-se
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
@ -73,14 +70,12 @@ import { TSecretReplicationServiceFactory } from "@app/services/secret-replicati
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyRequest {
|
||||
@ -182,11 +177,6 @@ declare module "fastify" {
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
slack: TSlackServiceFactory;
|
||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||
cmek: TCmekServiceFactory;
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
44
backend/src/@types/knex.d.ts
vendored
44
backend/src/@types/knex.d.ts
vendored
@ -101,9 +101,6 @@ import {
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate,
|
||||
@ -196,9 +193,6 @@ import {
|
||||
TProjectRolesUpdate,
|
||||
TProjects,
|
||||
TProjectsInsert,
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate,
|
||||
TProjectsUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
@ -305,9 +299,6 @@ import {
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate,
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate,
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
@ -331,16 +322,8 @@ import {
|
||||
TUsersUpdate,
|
||||
TWebhooks,
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate,
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
import {
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
@ -554,11 +537,6 @@ declare module "knex/types/tables" {
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
@ -798,25 +776,5 @@ declare module "knex/types/tables" {
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate
|
||||
>;
|
||||
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType<
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
4
backend/src/@types/ldif.d.ts
vendored
4
backend/src/@types/ldif.d.ts
vendored
@ -1,4 +0,0 @@
|
||||
declare module "ldif" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function parse(input: string, ...args: any[]): any;
|
||||
}
|
@ -1,75 +0,0 @@
|
||||
// eslint-disable-next-line
|
||||
import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import type { Knex } from "knex";
|
||||
import path from "path";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env.migration")
|
||||
});
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) {
|
||||
console.info("Dedicated audit log database not found. No further migrations necessary");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
console.info("Executing migration on audit log database...");
|
||||
|
||||
export default {
|
||||
development: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
seeds: {
|
||||
directory: "./seeds"
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
},
|
||||
production: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
@ -1,2 +1,2 @@
|
||||
export type { TDbClient } from "./instance";
|
||||
export { initAuditLogDbConnection, initDbConnection } from "./instance";
|
||||
export { initDbConnection } from "./instance";
|
||||
|
@ -70,45 +70,3 @@ export const initDbConnection = ({
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
export const initAuditLogDbConnection = ({
|
||||
dbConnectionUri,
|
||||
dbRootCert
|
||||
}: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
}) => {
|
||||
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
|
||||
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
|
||||
// eslint-disable-next-line
|
||||
const db: Knex<any, unknown[]> = knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: dbConnectionUri,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
// @ts-expect-error I have no clue why only for the port there is a type error
|
||||
// eslint-disable-next-line
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: dbRootCert
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
}
|
||||
});
|
||||
|
||||
// we add these overrides so that auditLogDb and the primary DB are interchangeable
|
||||
db.primaryNode = () => {
|
||||
return db;
|
||||
};
|
||||
|
||||
db.replicaNode = () => {
|
||||
return db;
|
||||
};
|
||||
|
||||
return db;
|
||||
};
|
||||
|
@ -1,161 +0,0 @@
|
||||
import kx, { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const INTERMEDIATE_AUDIT_LOG_TABLE = "intermediate_audit_logs";
|
||||
|
||||
const formatPartitionDate = (date: Date) => {
|
||||
const year = date.getFullYear();
|
||||
const month = String(date.getMonth() + 1).padStart(2, "0");
|
||||
const day = String(date.getDate()).padStart(2, "0");
|
||||
|
||||
return `${year}-${month}-${day}`;
|
||||
};
|
||||
|
||||
const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
|
||||
const startDateStr = formatPartitionDate(startDate);
|
||||
const endDateStr = formatPartitionDate(endDate);
|
||||
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
|
||||
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
||||
);
|
||||
};
|
||||
|
||||
const up = async (knex: Knex): Promise<void> => {
|
||||
console.info("Dropping primary key of audit log table...");
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
// remove existing keys
|
||||
t.dropPrimary();
|
||||
});
|
||||
|
||||
// Get all indices of the audit log table and drop them
|
||||
const indexNames: { rows: { indexname: string }[] } = await knex.raw(
|
||||
`
|
||||
SELECT indexname
|
||||
FROM pg_indexes
|
||||
WHERE tablename = '${TableName.AuditLog}'
|
||||
`
|
||||
);
|
||||
|
||||
console.log(
|
||||
"Deleting existing audit log indices:",
|
||||
indexNames.rows.map((e) => e.indexname)
|
||||
);
|
||||
|
||||
for await (const row of indexNames.rows) {
|
||||
await knex.raw(`DROP INDEX IF EXISTS ${row.indexname}`);
|
||||
}
|
||||
|
||||
// renaming audit log to intermediate table
|
||||
console.log("Renaming audit log table to the intermediate name");
|
||||
await knex.schema.renameTable(TableName.AuditLog, INTERMEDIATE_AUDIT_LOG_TABLE);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.AuditLog))) {
|
||||
const createTableSql = knex.schema
|
||||
.createTable(TableName.AuditLog, (t) => {
|
||||
t.uuid("id").defaultTo(knex.fn.uuid());
|
||||
t.string("actor").notNullable();
|
||||
t.jsonb("actorMetadata").notNullable();
|
||||
t.string("ipAddress");
|
||||
t.string("eventType").notNullable();
|
||||
t.jsonb("eventMetadata");
|
||||
t.string("userAgent");
|
||||
t.string("userAgentType");
|
||||
t.datetime("expiresAt");
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("orgId");
|
||||
t.string("projectId");
|
||||
t.string("projectName");
|
||||
t.primary(["id", "createdAt"]);
|
||||
})
|
||||
.toString();
|
||||
|
||||
console.info("Creating partition table...");
|
||||
await knex.schema.raw(`
|
||||
${createTableSql} PARTITION BY RANGE ("createdAt");
|
||||
`);
|
||||
|
||||
console.log("Adding indices...");
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
t.index(["projectId", "createdAt"]);
|
||||
t.index(["orgId", "createdAt"]);
|
||||
t.index("expiresAt");
|
||||
t.index("orgId");
|
||||
t.index("projectId");
|
||||
});
|
||||
|
||||
console.log("Adding GIN indices...");
|
||||
|
||||
await knex.raw(
|
||||
`CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.AuditLog} USING gin("actorMetadata" jsonb_path_ops)`
|
||||
);
|
||||
console.log("GIN index for actorMetadata done");
|
||||
|
||||
await knex.raw(
|
||||
`CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.AuditLog} USING gin("eventMetadata" jsonb_path_ops)`
|
||||
);
|
||||
console.log("GIN index for eventMetadata done");
|
||||
|
||||
// create default partition
|
||||
console.log("Creating default partition...");
|
||||
await knex.schema.raw(`CREATE TABLE ${TableName.AuditLog}_default PARTITION OF ${TableName.AuditLog} DEFAULT`);
|
||||
|
||||
const nextDate = new Date();
|
||||
nextDate.setDate(nextDate.getDate() + 1);
|
||||
const nextDateStr = formatPartitionDate(nextDate);
|
||||
|
||||
console.log("Attaching existing audit log table as a partition...");
|
||||
await knex.schema.raw(`
|
||||
ALTER TABLE ${INTERMEDIATE_AUDIT_LOG_TABLE} ADD CONSTRAINT audit_log_old
|
||||
CHECK ( "createdAt" < DATE '${nextDateStr}' );
|
||||
|
||||
ALTER TABLE ${TableName.AuditLog} ATTACH PARTITION ${INTERMEDIATE_AUDIT_LOG_TABLE}
|
||||
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
|
||||
`);
|
||||
|
||||
// create partition from now until end of month
|
||||
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
|
||||
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
|
||||
|
||||
// create partitions 4 years ahead
|
||||
const partitionMonths = 4 * 12;
|
||||
const partitionPromises: Promise<void>[] = [];
|
||||
for (let x = 1; x <= partitionMonths; x += 1) {
|
||||
partitionPromises.push(
|
||||
createAuditLogPartition(
|
||||
knex,
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(partitionPromises);
|
||||
console.log("Partition migration complete");
|
||||
}
|
||||
};
|
||||
|
||||
export const executeMigration = async (url: string) => {
|
||||
console.log("Executing migration...");
|
||||
const knex = kx({
|
||||
client: "pg",
|
||||
connection: url
|
||||
});
|
||||
|
||||
await knex.transaction(async (tx) => {
|
||||
await up(tx);
|
||||
});
|
||||
};
|
||||
|
||||
const dbUrl = process.env.AUDIT_LOGS_DB_CONNECTION_URI;
|
||||
if (!dbUrl) {
|
||||
console.error("Please provide a DB connection URL to the AUDIT_LOGS_DB_CONNECTION_URI env");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
void executeMigration(dbUrl).then(() => {
|
||||
console.log("Migration: partition-audit-logs DONE");
|
||||
process.exit(0);
|
||||
});
|
@ -115,14 +115,7 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
@ -154,27 +147,13 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
committerId: knex(TableName.ProjectMembership)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
statusChangeBy: knex(TableName.ProjectMembership)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
@ -198,20 +177,8 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.join(
|
||||
TableName.SecretApprovalRequest,
|
||||
`${TableName.SecretApprovalRequest}.id`,
|
||||
`${TableName.SecretApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member").notNullable().alter();
|
||||
|
@ -1,25 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (!doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("password").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("password");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,96 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) {
|
||||
await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("integration").notNullable();
|
||||
tb.string("slug").notNullable();
|
||||
tb.uuid("orgId").notNullable();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
tb.string("description");
|
||||
tb.unique(["orgId", "slug"]);
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) {
|
||||
await knex.schema.createTable(TableName.SlackIntegrations, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).notNullable();
|
||||
tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE");
|
||||
tb.string("teamId").notNullable();
|
||||
tb.string("teamName").notNullable();
|
||||
tb.string("slackUserId").notNullable();
|
||||
tb.string("slackAppId").notNullable();
|
||||
tb.binary("encryptedBotAccessToken").notNullable();
|
||||
tb.string("slackBotId").notNullable();
|
||||
tb.string("slackBotUserId").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SlackIntegrations);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) {
|
||||
await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("projectId").notNullable().unique();
|
||||
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
tb.uuid("slackIntegrationId").notNullable();
|
||||
tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE");
|
||||
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
|
||||
tb.string("accessRequestChannels").notNullable().defaultTo("");
|
||||
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
|
||||
tb.string("secretRequestChannels").notNullable().defaultTo("");
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
|
||||
}
|
||||
|
||||
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
|
||||
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedSlackClientSecret"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
if (!doesSuperAdminHaveSlackClientId) {
|
||||
tb.binary("encryptedSlackClientId");
|
||||
}
|
||||
if (!doesSuperAdminHaveSlackClientSecret) {
|
||||
tb.binary("encryptedSlackClientSecret");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs);
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SlackIntegrations);
|
||||
await dropOnUpdateTrigger(knex, TableName.SlackIntegrations);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations);
|
||||
await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
|
||||
|
||||
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
|
||||
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedSlackClientSecret"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
if (doesSuperAdminHaveSlackClientId) {
|
||||
tb.dropColumn("encryptedSlackClientId");
|
||||
}
|
||||
if (doesSuperAdminHaveSlackClientSecret) {
|
||||
tb.dropColumn("encryptedSlackClientSecret");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
|
||||
TableName.CertificateAuthority,
|
||||
"requireTemplateForIssuance"
|
||||
);
|
||||
if (!hasRequireTemplateForIssuanceColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.dropColumn("requireTemplateForIssuance");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,85 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// Certificate template
|
||||
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
|
||||
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => {
|
||||
if (!hasKeyUsagesCol) {
|
||||
tb.specificType("keyUsages", "text[]");
|
||||
}
|
||||
|
||||
if (!hasExtendedKeyUsagesCol) {
|
||||
tb.specificType("extendedKeyUsages", "text[]");
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasKeyUsagesCol) {
|
||||
await knex(TableName.CertificateTemplate).update({
|
||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasExtendedKeyUsagesCol) {
|
||||
await knex(TableName.CertificateTemplate).update({
|
||||
extendedKeyUsages: []
|
||||
});
|
||||
}
|
||||
|
||||
// Certificate
|
||||
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
|
||||
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
|
||||
await knex.schema.alterTable(TableName.Certificate, (tb) => {
|
||||
if (!doesCertTableHaveKeyUsages) {
|
||||
tb.specificType("keyUsages", "text[]");
|
||||
}
|
||||
|
||||
if (!doesCertTableHaveExtendedKeyUsages) {
|
||||
tb.specificType("extendedKeyUsages", "text[]");
|
||||
}
|
||||
});
|
||||
|
||||
if (!doesCertTableHaveKeyUsages) {
|
||||
await knex(TableName.Certificate).update({
|
||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
|
||||
});
|
||||
}
|
||||
|
||||
if (!doesCertTableHaveExtendedKeyUsages) {
|
||||
await knex(TableName.Certificate).update({
|
||||
extendedKeyUsages: []
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// Certificate Template
|
||||
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
|
||||
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplate, (t) => {
|
||||
if (hasKeyUsagesCol) {
|
||||
t.dropColumn("keyUsages");
|
||||
}
|
||||
if (hasExtendedKeyUsagesCol) {
|
||||
t.dropColumn("extendedKeyUsages");
|
||||
}
|
||||
});
|
||||
|
||||
// Certificate
|
||||
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
|
||||
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
if (doesCertTableHaveKeyUsages) {
|
||||
t.dropColumn("keyUsages");
|
||||
}
|
||||
if (doesCertTableHaveExtendedKeyUsages) {
|
||||
t.dropColumn("extendedKeyUsages");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,76 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAccessApproverGroupId = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
"approverGroupId"
|
||||
);
|
||||
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
||||
const hasSecretApproverGroupId = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalPolicyApprover,
|
||||
"approverGroupId"
|
||||
);
|
||||
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
|
||||
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
|
||||
// add column approverGroupId to AccessApprovalPolicyApprover
|
||||
if (!hasAccessApproverGroupId) {
|
||||
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
}
|
||||
|
||||
// make approverUserId nullable
|
||||
if (hasAccessApproverUserId) {
|
||||
table.uuid("approverUserId").nullable().alter();
|
||||
}
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
|
||||
// add column approverGroupId to SecretApprovalPolicyApprover
|
||||
if (!hasSecretApproverGroupId) {
|
||||
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
}
|
||||
|
||||
// make approverUserId nullable
|
||||
if (hasSecretApproverUserId) {
|
||||
table.uuid("approverUserId").nullable().alter();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAccessApproverGroupId = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
"approverGroupId"
|
||||
);
|
||||
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
||||
const hasSecretApproverGroupId = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalPolicyApprover,
|
||||
"approverGroupId"
|
||||
);
|
||||
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
|
||||
if (hasAccessApproverGroupId) {
|
||||
table.dropColumn("approverGroupId");
|
||||
}
|
||||
// make approverUserId not nullable
|
||||
if (hasAccessApproverUserId) {
|
||||
table.uuid("approverUserId").notNullable().alter();
|
||||
}
|
||||
});
|
||||
|
||||
// remove
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
|
||||
if (hasSecretApproverGroupId) {
|
||||
table.dropColumn("approverGroupId");
|
||||
}
|
||||
// make approverUserId not nullable
|
||||
if (hasSecretApproverUserId) {
|
||||
table.uuid("approverUserId").notNullable().alter();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityMetadata))) {
|
||||
await knex.schema.createTable(TableName.IdentityMetadata, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("key").notNullable();
|
||||
tb.string("value").notNullable();
|
||||
tb.uuid("orgId").notNullable();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
tb.uuid("userId");
|
||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
tb.uuid("identityId");
|
||||
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityMetadata);
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("iv").nullable().alter();
|
||||
t.string("tag").nullable().alter();
|
||||
t.string("encryptedValue").nullable().alter();
|
||||
|
||||
t.binary("encryptedSecret").nullable();
|
||||
t.string("hashedHex").nullable().alter();
|
||||
|
||||
t.string("identifier", 64).nullable();
|
||||
t.unique("identifier");
|
||||
t.index("identifier");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("encryptedSecret");
|
||||
|
||||
t.dropColumn("identifier");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed"))) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
||||
tb.datetime("lastUsed");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
||||
tb.dropColumn("lastUsed");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
|
||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
// drop constraint if exists (won't exist if rolled back, see below)
|
||||
await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex);
|
||||
|
||||
// projectId for CMEK functionality
|
||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
||||
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
|
||||
if (hasOrgId) {
|
||||
table.unique(["orgId", "projectId", "slug"]);
|
||||
}
|
||||
|
||||
if (hasSlug) {
|
||||
table.renameColumn("slug", "name");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
|
||||
const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name");
|
||||
|
||||
// remove projectId for CMEK functionality
|
||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
||||
if (hasName) {
|
||||
table.renameColumn("name", "slug");
|
||||
}
|
||||
|
||||
if (hasOrgId) {
|
||||
table.dropUnique(["orgId", "projectId", "slug"]);
|
||||
}
|
||||
table.dropColumn("projectId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (!hasSlug) {
|
||||
// add slug back temporarily and set value equal to name
|
||||
await knex.schema
|
||||
.alterTable(TableName.KmsKey, (table) => {
|
||||
table.string("slug", 32);
|
||||
})
|
||||
.then(() => knex(TableName.KmsKey).update("slug", knex.ref("name")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (hasSlug) {
|
||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
||||
table.dropColumn("slug");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
|
||||
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesOrgIdExist) {
|
||||
t.dropForeign("orgId");
|
||||
}
|
||||
|
||||
if (doesProjectIdExist) {
|
||||
t.dropForeign("projectId");
|
||||
}
|
||||
|
||||
// add normalized field
|
||||
if (!doesProjectNameExist) {
|
||||
t.string("projectName");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesOrgIdExist) {
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
}
|
||||
if (doesProjectIdExist) {
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
}
|
||||
|
||||
// remove normalized field
|
||||
if (doesProjectNameExist) {
|
||||
t.dropColumn("projectName");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// org default role
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole");
|
||||
|
||||
if (!hasDefaultRoleCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (tb) => {
|
||||
tb.string("defaultMembershipRole").notNullable().defaultTo("member");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// org default role
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole");
|
||||
|
||||
if (hasDefaultRoleCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (tb) => {
|
||||
tb.dropColumn("defaultMembershipRole");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 255).alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// add external group to org role mapping table
|
||||
if (!(await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping))) {
|
||||
await knex.schema.createTable(TableName.ExternalGroupOrgRoleMapping, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("groupName").notNullable();
|
||||
t.index("groupName");
|
||||
t.string("role").notNullable();
|
||||
t.uuid("roleId");
|
||||
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["orgId", "groupName"]);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping)) {
|
||||
await dropOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping);
|
||||
|
||||
await knex.schema.dropTable(TableName.ExternalGroupOrgRoleMapping);
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export const dropConstraintIfExists = (tableName: TableName, constraintName: string, knex: Knex) =>
|
||||
knex.raw(`ALTER TABLE ${tableName} DROP CONSTRAINT IF EXISTS ${constraintName};`);
|
@ -54,7 +54,7 @@ export const getSecretManagerDataKey = async (knex: Knex, projectId: string) =>
|
||||
} else {
|
||||
const [kmsDoc] = await knex(TableName.KmsKey)
|
||||
.insert({
|
||||
name: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||
slug: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||
orgId: project.orgId,
|
||||
isReserved: false
|
||||
})
|
||||
|
@ -12,8 +12,7 @@ export const AccessApprovalPoliciesApproversSchema = z.object({
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
approverUserId: z.string().uuid().nullable().optional(),
|
||||
approverGroupId: z.string().uuid().nullable().optional()
|
||||
approverUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;
|
||||
|
@ -20,8 +20,7 @@ export const AuditLogsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string().nullable().optional(),
|
||||
projectName: z.string().nullable().optional()
|
||||
projectId: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAuditLogs = z.infer<typeof AuditLogsSchema>;
|
||||
|
@ -28,8 +28,7 @@ export const CertificateAuthoritiesSchema = z.object({
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional(),
|
||||
activeCaCertId: z.string().uuid().nullable().optional(),
|
||||
requireTemplateForIssuance: z.boolean().default(false)
|
||||
activeCaCertId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
|
||||
|
@ -16,9 +16,7 @@ export const CertificateTemplatesSchema = z.object({
|
||||
subjectAlternativeName: z.string(),
|
||||
ttl: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
keyUsages: z.string().array().nullable().optional(),
|
||||
extendedKeyUsages: z.string().array().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TCertificateTemplates = z.infer<typeof CertificateTemplatesSchema>;
|
||||
|
@ -22,9 +22,7 @@ export const CertificatesSchema = z.object({
|
||||
revocationReason: z.number().nullable().optional(),
|
||||
altNames: z.string().default("").nullable().optional(),
|
||||
caCertId: z.string().uuid(),
|
||||
certificateTemplateId: z.string().uuid().nullable().optional(),
|
||||
keyUsages: z.string().array().nullable().optional(),
|
||||
extendedKeyUsages: z.string().array().nullable().optional()
|
||||
certificateTemplateId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificates = z.infer<typeof CertificatesSchema>;
|
||||
|
@ -1,27 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ExternalGroupOrgRoleMappingsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
groupName: z.string(),
|
||||
role: z.string(),
|
||||
roleId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TExternalGroupOrgRoleMappings = z.infer<typeof ExternalGroupOrgRoleMappingsSchema>;
|
||||
export type TExternalGroupOrgRoleMappingsInsert = Omit<
|
||||
z.input<typeof ExternalGroupOrgRoleMappingsSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TExternalGroupOrgRoleMappingsUpdate = Partial<
|
||||
Omit<z.input<typeof ExternalGroupOrgRoleMappingsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,23 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityMetadataSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
key: z.string(),
|
||||
value: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
identityId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityMetadata = z.infer<typeof IdentityMetadataSchema>;
|
||||
export type TIdentityMetadataInsert = Omit<z.input<typeof IdentityMetadataSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityMetadataUpdate = Partial<Omit<z.input<typeof IdentityMetadataSchema>, TImmutableDBKeys>>;
|
@ -31,7 +31,6 @@ export * from "./identity-aws-auths";
|
||||
export * from "./identity-azure-auths";
|
||||
export * from "./identity-gcp-auths";
|
||||
export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-metadata";
|
||||
export * from "./identity-oidc-auths";
|
||||
export * from "./identity-org-memberships";
|
||||
export * from "./identity-project-additional-privilege";
|
||||
@ -63,7 +62,6 @@ export * from "./project-environments";
|
||||
export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
export * from "./project-slack-configs";
|
||||
export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
@ -103,7 +101,6 @@ export * from "./secret-versions-v2";
|
||||
export * from "./secrets";
|
||||
export * from "./secrets-v2";
|
||||
export * from "./service-tokens";
|
||||
export * from "./slack-integrations";
|
||||
export * from "./super-admin";
|
||||
export * from "./trusted-ips";
|
||||
export * from "./user-actions";
|
||||
@ -112,4 +109,3 @@ export * from "./user-encryption-keys";
|
||||
export * from "./user-group-membership";
|
||||
export * from "./users";
|
||||
export * from "./webhooks";
|
||||
export * from "./workflow-integrations";
|
||||
|
@ -13,11 +13,9 @@ export const KmsKeysSchema = z.object({
|
||||
isDisabled: z.boolean().default(false).nullable().optional(),
|
||||
isReserved: z.boolean().default(true).nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string().nullable().optional(),
|
||||
slug: z.string().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||
|
@ -17,7 +17,6 @@ export enum TableName {
|
||||
Groups = "groups",
|
||||
GroupProjectMembership = "group_project_memberships",
|
||||
GroupProjectMembershipRole = "group_project_membership_roles",
|
||||
ExternalGroupOrgRoleMapping = "external_group_org_role_mappings",
|
||||
UserGroupMembership = "user_group_membership",
|
||||
UserAliases = "user_aliases",
|
||||
UserEncryptionKey = "user_encryption_keys",
|
||||
@ -71,8 +70,6 @@ export enum TableName {
|
||||
IdentityProjectMembership = "identity_project_memberships",
|
||||
IdentityProjectMembershipRole = "identity_project_membership_role",
|
||||
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
||||
// used by both identity and users
|
||||
IdentityMetadata = "identity_metadata",
|
||||
ScimToken = "scim_tokens",
|
||||
AccessApprovalPolicy = "access_approval_policies",
|
||||
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
|
||||
@ -117,10 +114,7 @@ export enum TableName {
|
||||
InternalKms = "internal_kms",
|
||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||
// @depreciated
|
||||
KmsKeyVersion = "kms_key_versions",
|
||||
WorkflowIntegrations = "workflow_integrations",
|
||||
SlackIntegrations = "slack_integrations",
|
||||
ProjectSlackConfigs = "project_slack_configs"
|
||||
KmsKeyVersion = "kms_key_versions"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
|
@ -26,8 +26,7 @@ export const OidcConfigsSchema = z.object({
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
orgId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
|
@ -19,8 +19,7 @@ export const OrganizationsSchema = z.object({
|
||||
authEnforced: z.boolean().default(false).nullable().optional(),
|
||||
scimEnabled: z.boolean().default(false).nullable().optional(),
|
||||
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member")
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@ -1,24 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectSlackConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
slackIntegrationId: z.string().uuid(),
|
||||
isAccessRequestNotificationEnabled: z.boolean().default(false),
|
||||
accessRequestChannels: z.string().default(""),
|
||||
isSecretRequestNotificationEnabled: z.boolean().default(false),
|
||||
secretRequestChannels: z.string().default(""),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TProjectSlackConfigs = z.infer<typeof ProjectSlackConfigsSchema>;
|
||||
export type TProjectSlackConfigsInsert = Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TProjectSlackConfigsUpdate = Partial<Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>>;
|
@ -12,8 +12,7 @@ export const SecretApprovalPoliciesApproversSchema = z.object({
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
approverUserId: z.string().uuid().nullable().optional(),
|
||||
approverGroupId: z.string().uuid().nullable().optional()
|
||||
approverUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;
|
||||
|
@ -5,16 +5,14 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretSharingSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedValue: z.string().nullable().optional(),
|
||||
iv: z.string().nullable().optional(),
|
||||
tag: z.string().nullable().optional(),
|
||||
hashedHex: z.string().nullable().optional(),
|
||||
encryptedValue: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
hashedHex: z.string(),
|
||||
expiresAt: z.date(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
@ -23,10 +21,7 @@ export const SecretSharingSchema = z.object({
|
||||
expiresAfterViews: z.number().nullable().optional(),
|
||||
accessType: z.string().default("anyone"),
|
||||
name: z.string().nullable().optional(),
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional()
|
||||
lastViewedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@ -1,27 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SlackIntegrationsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
teamId: z.string(),
|
||||
teamName: z.string(),
|
||||
slackUserId: z.string(),
|
||||
slackAppId: z.string(),
|
||||
encryptedBotAccessToken: zodBuffer,
|
||||
slackBotId: z.string(),
|
||||
slackBotUserId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSlackIntegrations = z.infer<typeof SlackIntegrationsSchema>;
|
||||
export type TSlackIntegrationsInsert = Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>;
|
||||
export type TSlackIntegrationsUpdate = Partial<Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>>;
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SuperAdminSchema = z.object({
|
||||
@ -21,9 +19,7 @@ export const SuperAdminSchema = z.object({
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustOidcEmails: z.boolean().default(false).nullable().optional(),
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||
enabledLoginMethods: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -1,22 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const WorkflowIntegrationsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
integration: z.string(),
|
||||
slug: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
description: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TWorkflowIntegrations = z.infer<typeof WorkflowIntegrationsSchema>;
|
||||
export type TWorkflowIntegrationsInsert = Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>;
|
||||
export type TWorkflowIntegrationsUpdate = Partial<Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>>;
|
@ -1,9 +1,7 @@
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { EnforcementLevel } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -12,32 +10,28 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
projectSlug: z.string().trim(),
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().default("/"),
|
||||
environment: z.string(),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
projectSlug: z.string().trim(),
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().default("/"),
|
||||
environment: z.string(),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.accessApprovalPolicy.createAccessApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -56,9 +50,6 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
projectSlug: z.string().trim()
|
||||
@ -67,15 +58,14 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
200: z.object({
|
||||
approvals: sapPubSchema
|
||||
.extend({
|
||||
approvers: z
|
||||
.object({ type: z.nativeEnum(ApproverType), id: z.string().nullable().optional() })
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
userApprovers: z
|
||||
.object({
|
||||
userId: z.string()
|
||||
})
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -125,37 +115,33 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
server.route({
|
||||
url: "/:policyId",
|
||||
method: "PATCH",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
policyId: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().optional(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).optional(),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
await server.services.accessApprovalPolicy.updateAccessApprovalPolicy({
|
||||
policyId: req.params.policyId,
|
||||
@ -171,9 +157,6 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
server.route({
|
||||
url: "/:policyId",
|
||||
method: "DELETE",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
policyId: z.string()
|
||||
@ -184,7 +167,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.accessApprovalPolicy.deleteAccessApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -196,44 +179,4 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
return { approval };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:policyId",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
policyId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema.extend({
|
||||
approvers: z
|
||||
.object({
|
||||
type: z.nativeEnum(ApproverType),
|
||||
id: z.string().nullable().optional(),
|
||||
name: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.accessApprovalPolicy.getAccessApprovalPolicyById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.params
|
||||
});
|
||||
|
||||
return { approval };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -11,30 +11,6 @@ export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get CRL in DER format (deprecated)",
|
||||
params: z.object({
|
||||
crlId: z.string().trim().describe(CA_CRLS.GET.crlId)
|
||||
}),
|
||||
response: {
|
||||
200: z.instanceof(Buffer)
|
||||
}
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId);
|
||||
|
||||
res.header("Content-Type", "application/pkix-crl");
|
||||
|
||||
return Buffer.from(crl);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:crlId/der",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get CRL in DER format",
|
||||
params: z.object({
|
||||
|
@ -2,7 +2,7 @@ import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { DEFAULT_REQUEST_SCHEMA, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -18,6 +18,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
body: z.object({
|
||||
dynamicSecretName: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.dynamicSecretName).toLowerCase(),
|
||||
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.projectSlug),
|
||||
@ -65,6 +66,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.DELETE.leaseId)
|
||||
}),
|
||||
@ -107,6 +109,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.leaseId)
|
||||
}),
|
||||
@ -160,6 +163,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.GET_BY_LEASEID.leaseId)
|
||||
}),
|
||||
|
@ -4,7 +4,7 @@ import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/providers/models";
|
||||
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { DEFAULT_REQUEST_SCHEMA, DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -20,6 +20,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
body: z.object({
|
||||
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.CREATE.projectSlug),
|
||||
provider: DynamicSecretProviderSchema.describe(DYNAMIC_SECRETS.CREATE.provider),
|
||||
@ -77,39 +78,6 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/entra-id/users",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
|
||||
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
|
||||
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
name: z.string().min(1).describe("The name of the user"),
|
||||
id: z.string().min(1).describe("The ID of the user"),
|
||||
email: z.string().min(1).describe("The email of the user")
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
|
||||
tenantId: req.body.tenantId,
|
||||
applicationId: req.body.applicationId,
|
||||
clientSecret: req.body.clientSecret
|
||||
});
|
||||
return data;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:name",
|
||||
@ -117,6 +85,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
name: z.string().toLowerCase().describe(DYNAMIC_SECRETS.UPDATE.name)
|
||||
}),
|
||||
@ -184,6 +153,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
name: z.string().toLowerCase().describe(DYNAMIC_SECRETS.DELETE.name)
|
||||
}),
|
||||
@ -220,6 +190,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
name: z.string().min(1).describe(DYNAMIC_SECRETS.GET_BY_NAME.name)
|
||||
}),
|
||||
@ -257,6 +228,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
querystring: z.object({
|
||||
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.LIST.projectSlug),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRETS.LIST.path),
|
||||
@ -270,7 +242,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const dynamicSecretCfgs = await server.services.dynamicSecret.listDynamicSecretsByEnv({
|
||||
const dynamicSecretCfgs = await server.services.dynamicSecret.list({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -288,6 +260,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
name: z.string().min(1).describe(DYNAMIC_SECRETS.LIST_LEAES_BY_NAME.name)
|
||||
}),
|
||||
|
@ -26,7 +26,7 @@ const sanitizedExternalSchemaForGetAll = KmsKeysSchema.pick({
|
||||
isDisabled: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
name: true
|
||||
slug: true
|
||||
})
|
||||
.extend({
|
||||
externalKms: ExternalKmsSchema.pick({
|
||||
@ -57,7 +57,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().min(1).trim().toLowerCase(),
|
||||
slug: z.string().min(1).trim().toLowerCase(),
|
||||
description: z.string().trim().optional(),
|
||||
provider: ExternalKmsInputSchema
|
||||
}),
|
||||
@ -74,7 +74,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.body.name,
|
||||
slug: req.body.slug,
|
||||
provider: req.body.provider,
|
||||
description: req.body.description
|
||||
});
|
||||
@ -87,7 +87,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
kmsId: externalKms.id,
|
||||
provider: req.body.provider.type,
|
||||
name: req.body.name,
|
||||
slug: req.body.slug,
|
||||
description: req.body.description
|
||||
}
|
||||
}
|
||||
@ -108,7 +108,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
id: z.string().trim().min(1)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().min(1).trim().toLowerCase().optional(),
|
||||
slug: z.string().min(1).trim().toLowerCase().optional(),
|
||||
description: z.string().trim().optional(),
|
||||
provider: ExternalKmsInputUpdateSchema
|
||||
}),
|
||||
@ -125,7 +125,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.body.name,
|
||||
slug: req.body.slug,
|
||||
provider: req.body.provider,
|
||||
description: req.body.description,
|
||||
id: req.params.id
|
||||
@ -139,7 +139,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
kmsId: externalKms.id,
|
||||
provider: req.body.provider.type,
|
||||
name: req.body.name,
|
||||
slug: req.body.slug,
|
||||
description: req.body.description
|
||||
}
|
||||
}
|
||||
@ -182,7 +182,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
type: EventType.DELETE_KMS,
|
||||
metadata: {
|
||||
kmsId: externalKms.id,
|
||||
name: externalKms.name
|
||||
slug: externalKms.slug
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -224,7 +224,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
type: EventType.GET_KMS,
|
||||
metadata: {
|
||||
kmsId: externalKms.id,
|
||||
name: externalKms.name
|
||||
slug: externalKms.slug
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -260,13 +260,13 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/name/:name",
|
||||
url: "/slug/:slug",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
name: z.string().trim().min(1)
|
||||
slug: z.string().trim().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -276,12 +276,12 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const externalKms = await server.services.externalKms.findByName({
|
||||
const externalKms = await server.services.externalKms.findBySlug({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.params.name
|
||||
slug: req.params.slug
|
||||
});
|
||||
return { externalKms };
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "POST",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
|
||||
@ -43,59 +43,12 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:id",
|
||||
method: "GET",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.GET_BY_ID.id)
|
||||
}),
|
||||
response: {
|
||||
200: GroupsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const group = await server.services.group.getGroupById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.id
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "GET",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
response: {
|
||||
200: GroupsSchema.array()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const groups = await server.services.org.getOrgGroups({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return groups;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:id",
|
||||
url: "/:currentSlug",
|
||||
method: "PATCH",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.UPDATE.id)
|
||||
currentSlug: z.string().trim().describe(GROUPS.UPDATE.currentSlug)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
@ -117,7 +70,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const group = await server.services.group.updateGroup({
|
||||
id: req.params.id,
|
||||
currentSlug: req.params.currentSlug,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -130,12 +83,12 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:id",
|
||||
url: "/:slug",
|
||||
method: "DELETE",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.DELETE.id)
|
||||
slug: z.string().trim().describe(GROUPS.DELETE.slug)
|
||||
}),
|
||||
response: {
|
||||
200: GroupsSchema
|
||||
@ -143,7 +96,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const group = await server.services.group.deleteGroup({
|
||||
id: req.params.id,
|
||||
groupSlug: req.params.slug,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -156,11 +109,11 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/users",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
url: "/:slug/users",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.LIST_USERS.id)
|
||||
slug: z.string().trim().describe(GROUPS.LIST_USERS.slug)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||
@ -188,25 +141,24 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { users, totalCount } = await server.services.group.listGroupUsers({
|
||||
id: req.params.id,
|
||||
groupSlug: req.params.slug,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return { users, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:id/users/:username",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
url: "/:slug/users/:username",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.ADD_USER.id),
|
||||
slug: z.string().trim().describe(GROUPS.ADD_USER.slug),
|
||||
username: z.string().trim().describe(GROUPS.ADD_USER.username)
|
||||
}),
|
||||
response: {
|
||||
@ -221,7 +173,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const user = await server.services.group.addUserToGroup({
|
||||
id: req.params.id,
|
||||
groupSlug: req.params.slug,
|
||||
username: req.params.username,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
@ -235,11 +187,11 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id/users/:username",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
url: "/:slug/users/:username",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.DELETE_USER.id),
|
||||
slug: z.string().trim().describe(GROUPS.DELETE_USER.slug),
|
||||
username: z.string().trim().describe(GROUPS.DELETE_USER.username)
|
||||
}),
|
||||
response: {
|
||||
@ -254,7 +206,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const user = await server.services.group.removeUserFromGroup({
|
||||
id: req.params.id,
|
||||
groupSlug: req.params.slug,
|
||||
username: req.params.username,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
|
@ -5,7 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@ -61,7 +61,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
handler: async (req) => {
|
||||
const { permissions, privilegePermission } = req.body;
|
||||
if (!permissions && !privilegePermission) {
|
||||
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
|
||||
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
|
||||
}
|
||||
|
||||
const permission = privilegePermission
|
||||
@ -140,7 +140,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
handler: async (req) => {
|
||||
const { permissions, privilegePermission } = req.body;
|
||||
if (!permissions && !privilegePermission) {
|
||||
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
|
||||
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
|
||||
}
|
||||
|
||||
const permission = privilegePermission
|
||||
@ -224,7 +224,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
handler: async (req) => {
|
||||
const { permissions, privilegePermission, ...updatedInfo } = req.body.privilegeDetails;
|
||||
if (!permissions && !privilegePermission) {
|
||||
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
|
||||
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
|
||||
}
|
||||
|
||||
const permission = privilegePermission
|
||||
|
@ -1,6 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// TODO(akhilmhdh): Fix this when license service gets it type
|
||||
// TODO(akhilmhdh): Fix this when licence service gets it type
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
@ -3,11 +3,10 @@ import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { ProjectPermissionSchema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { ProjectPermissionSchema, SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
@ -102,7 +101,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
|
@ -87,12 +87,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
/*
|
||||
* Daniel: This endpoint is no longer is use.
|
||||
* We are keeping it for now because it has been exposed in our public api docs for a while, so by removing it we are likely to break users workflows.
|
||||
*
|
||||
* Please refer to the new endpoint, GET /api/v1/organization/audit-logs, for the same (and more) functionality.
|
||||
*/
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:workspaceId/audit-logs",
|
||||
@ -107,7 +101,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.projectId)
|
||||
workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.workspaceId)
|
||||
}),
|
||||
querystring: z.object({
|
||||
eventType: z.nativeEnum(EventType).optional().describe(AUDIT_LOGS.EXPORT.eventType),
|
||||
@ -128,12 +122,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
project: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
.optional(),
|
||||
event: z.object({
|
||||
type: z.string(),
|
||||
metadata: z.any()
|
||||
@ -150,20 +138,16 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogs = await server.services.auditLog.listAuditLogs({
|
||||
const auditLogs = await server.services.auditLog.listProjectAuditLogs({
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actor: req.permission.type,
|
||||
|
||||
filter: {
|
||||
...req.query,
|
||||
projectId: req.params.workspaceId,
|
||||
endDate: req.query.endDate,
|
||||
startDate: req.query.startDate || getLastMidnightDateISO(),
|
||||
auditLogActorId: req.query.actor,
|
||||
eventType: req.query.eventType ? [req.query.eventType] : undefined
|
||||
}
|
||||
projectId: req.params.workspaceId,
|
||||
...req.query,
|
||||
endDate: req.query.endDate,
|
||||
startDate: req.query.startDate || getLastMidnightDateISO(),
|
||||
auditLogActor: req.query.actor,
|
||||
actor: req.permission.type
|
||||
});
|
||||
return { auditLogs };
|
||||
}
|
||||
@ -203,7 +187,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({
|
||||
secretManagerKmsKey: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
isExternal: z.boolean()
|
||||
})
|
||||
})
|
||||
@ -243,7 +227,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({
|
||||
secretManagerKmsKey: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
isExternal: z.boolean()
|
||||
})
|
||||
})
|
||||
@ -268,7 +252,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
secretManagerKmsKey: {
|
||||
id: secretManagerKmsKey.id,
|
||||
name: secretManagerKmsKey.name
|
||||
slug: secretManagerKmsKey.slug
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -336,7 +320,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({
|
||||
secretManagerKmsKey: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
isExternal: z.boolean()
|
||||
})
|
||||
})
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { RateLimitSchema } from "@app/db/schemas";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@ -29,7 +29,7 @@ export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
|
||||
handler: async () => {
|
||||
const rateLimit = await server.services.rateLimit.getRateLimits();
|
||||
if (!rateLimit) {
|
||||
throw new NotFoundError({
|
||||
throw new BadRequestError({
|
||||
name: "Get Rate Limit Error",
|
||||
message: "Rate limit configuration does not exist."
|
||||
});
|
||||
|
@ -61,7 +61,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
id: samlConfigId
|
||||
};
|
||||
} else {
|
||||
throw new BadRequestError({ message: "Missing sso identifier or org slug" });
|
||||
throw new BadRequestError({ message: "Missing sso identitier or org slug" });
|
||||
}
|
||||
|
||||
const ssoConfig = await server.services.saml.getSaml(ssoLookupDetails);
|
||||
@ -100,55 +100,25 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
async (req, profile, cb) => {
|
||||
try {
|
||||
if (!profile) throw new BadRequestError({ message: "Missing profile" });
|
||||
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
|
||||
|
||||
const email =
|
||||
profile?.email ??
|
||||
// entra sends data in this format
|
||||
(profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email"] as string) ??
|
||||
(profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved\
|
||||
|
||||
const firstName = (profile.firstName ??
|
||||
// entra sends data in this format
|
||||
profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/firstName"]) as string;
|
||||
|
||||
const lastName =
|
||||
profile.lastName ?? profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/lastName"];
|
||||
|
||||
if (!email || !firstName) {
|
||||
logger.info(
|
||||
{
|
||||
err: new Error("Invalid saml request. Missing email or first name"),
|
||||
profile
|
||||
},
|
||||
`email: ${email} firstName: ${profile.firstName as string}`
|
||||
);
|
||||
if (!email || !profile.firstName) {
|
||||
throw new BadRequestError({ message: "Invalid request. Missing email or first name" });
|
||||
}
|
||||
|
||||
const userMetadata = Object.keys(profile.attributes || {})
|
||||
.map((key) => {
|
||||
// for the ones like in format: http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email
|
||||
const formatedKey = key.startsWith("http") ? key.split("/").at(-1) || "" : key;
|
||||
return {
|
||||
key: formatedKey,
|
||||
value: String((profile.attributes as Record<string, string>)[key]).substring(0, 1020)
|
||||
};
|
||||
})
|
||||
.filter((el) => el.key && !["email", "firstName", "lastName"].includes(el.key));
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
|
||||
externalId: profile.nameID,
|
||||
email,
|
||||
firstName,
|
||||
lastName: lastName as string,
|
||||
firstName: profile.firstName as string,
|
||||
lastName: profile.lastName as string,
|
||||
relayState: (req.body as { RelayState?: string }).RelayState,
|
||||
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
|
||||
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string,
|
||||
metadata: userMetadata
|
||||
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string
|
||||
});
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
cb(error as Error);
|
||||
cb(null, {});
|
||||
}
|
||||
},
|
||||
() => {}
|
||||
|
@ -5,47 +5,22 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const ScimUserSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim().default("work")
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
});
|
||||
|
||||
const ScimGroupSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
});
|
||||
|
||||
export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
if (!strBody) {
|
||||
done(null, undefined);
|
||||
return;
|
||||
}
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/scim-tokens",
|
||||
method: "POST",
|
||||
@ -152,7 +127,25 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(ScimUserSchema),
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -180,7 +173,30 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
201: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -200,24 +216,41 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email()
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
active: z.boolean().default(true)
|
||||
// displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -227,8 +260,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
externalId: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
@ -258,115 +291,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim().default("work")
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
active: req.body?.active,
|
||||
email: primaryEmail,
|
||||
externalId: req.body.userName
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PATCH",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any().optional()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.updateScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return user;
|
||||
}
|
||||
});
|
||||
server.route({
|
||||
url: "/Groups",
|
||||
method: "POST",
|
||||
@ -381,10 +305,25 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
.optional() // okta-specific
|
||||
}),
|
||||
response: {
|
||||
200: ScimGroupSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -405,12 +344,26 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
querystring: z.object({
|
||||
startIndex: z.coerce.number().default(1),
|
||||
count: z.coerce.number().default(20),
|
||||
filter: z.string().trim().optional(),
|
||||
excludedAttributes: z.string().trim().optional()
|
||||
filter: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(ScimGroupSchema),
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -424,8 +377,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgId: req.permission.orgId,
|
||||
startIndex: req.query.startIndex,
|
||||
filter: req.query.filter,
|
||||
limit: req.query.count,
|
||||
isMembersExcluded: req.query.excludedAttributes === "members"
|
||||
limit: req.query.count
|
||||
});
|
||||
|
||||
return groups;
|
||||
@ -440,7 +392,20 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: ScimGroupSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -449,7 +414,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -473,12 +437,25 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimGroupSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.replaceScimGroup({
|
||||
const group = await req.server.services.scim.updateScimGroupNamePut({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
...req.body
|
||||
@ -500,34 +477,54 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
op: z.union([z.literal("replace"), z.literal("Replace")]),
|
||||
value: z.object({
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim()
|
||||
})
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any()
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add")]),
|
||||
path: z.string().trim(),
|
||||
value: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim().optional()
|
||||
})
|
||||
)
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimGroupSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroup({
|
||||
const group = await req.server.services.scim.updateScimGroupNamePatch({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -553,4 +550,60 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
return group;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
active: req.body.active
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { EnforcementLevel } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -17,33 +16,32 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
workspaceId: z.string(),
|
||||
name: z.string().optional(),
|
||||
environment: z.string(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.default("/")
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val)),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
workspaceId: z.string(),
|
||||
name: z.string().optional(),
|
||||
environment: z.string(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.default("/")
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val)),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.secretApprovalPolicy.createSecretApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -69,31 +67,30 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
params: z.object({
|
||||
sapId: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().optional(),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.secretApprovalPolicy.updateSecretApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -123,7 +120,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.secretApprovalPolicy.deleteSecretApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -150,10 +147,9 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
200: z.object({
|
||||
approvals: sapPubSchema
|
||||
.extend({
|
||||
approvers: z
|
||||
userApprovers: z
|
||||
.object({
|
||||
id: z.string().nullable().optional(),
|
||||
type: z.nativeEnum(ApproverType)
|
||||
userId: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
@ -174,44 +170,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:sapId",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
sapId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema.extend({
|
||||
approvers: z
|
||||
.object({
|
||||
id: z.string().nullable().optional(),
|
||||
type: z.nativeEnum(ApproverType),
|
||||
name: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.secretApprovalPolicy.getSecretApprovalPolicyById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.params
|
||||
});
|
||||
|
||||
return { approval };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/board",
|
||||
method: "GET",
|
||||
@ -228,7 +186,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
200: z.object({
|
||||
policy: sapPubSchema
|
||||
.extend({
|
||||
userApprovers: z.object({ userId: z.string().nullable().optional() }).array()
|
||||
userApprovers: z.object({ userId: z.string() }).array()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
|
@ -13,7 +13,7 @@ import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const approvalRequestUser = z.object({ userId: z.string().nullable().optional() }).merge(
|
||||
const approvalRequestUser = z.object({ userId: z.string() }).merge(
|
||||
UsersSchema.pick({
|
||||
email: true,
|
||||
firstName: true,
|
||||
@ -46,11 +46,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: z
|
||||
.object({
|
||||
userId: z.string().nullable().optional()
|
||||
})
|
||||
.array(),
|
||||
approvers: z.string().array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string()
|
||||
}),
|
||||
@ -58,11 +54,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
environment: z.string(),
|
||||
reviewers: z.object({ userId: z.string(), status: z.string() }).array(),
|
||||
approvers: z
|
||||
.object({
|
||||
userId: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
approvers: z.string().array()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
|
@ -2,8 +2,6 @@ import { z } from "zod";
|
||||
|
||||
import { GitAppOrgSchema, SecretScanningGitRisksSchema } from "@app/db/schemas";
|
||||
import { SecretScanningRiskStatus } from "@app/ee/services/secret-scanning/secret-scanning-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -25,13 +23,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(req.auth.orgId)) {
|
||||
throw new BadRequestError({
|
||||
message: "Secret scanning is temporarily unavailable."
|
||||
});
|
||||
}
|
||||
|
||||
const session = await server.services.secretScanning.createInstallationSession({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
@ -39,7 +30,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
});
|
||||
|
||||
return session;
|
||||
}
|
||||
});
|
||||
|
@ -5,38 +5,22 @@ import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies } from
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
import { ApproverType } from "./access-approval-policy-types";
|
||||
|
||||
export type TAccessApprovalPolicyDALFactory = ReturnType<typeof accessApprovalPolicyDALFactory>;
|
||||
|
||||
export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
const accessApprovalPolicyOrm = ormify(db, TableName.AccessApprovalPolicy);
|
||||
|
||||
const accessApprovalPolicyFindQuery = async (
|
||||
tx: Knex,
|
||||
filter: TFindFilter<TAccessApprovalPolicies>,
|
||||
customFilter?: {
|
||||
policyId?: string;
|
||||
}
|
||||
) => {
|
||||
const accessApprovalPolicyFindQuery = async (tx: Knex, filter: TFindFilter<TAccessApprovalPolicies>) => {
|
||||
const result = await tx(TableName.AccessApprovalPolicy)
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.where((qb) => {
|
||||
if (customFilter?.policyId) {
|
||||
void qb.where(`${TableName.AccessApprovalPolicy}.id`, "=", customFilter.policyId);
|
||||
}
|
||||
})
|
||||
.join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.AccessApprovalPolicyApprover}.approverUserId`, `${TableName.Users}.id`)
|
||||
.select(tx.ref("username").withSchema(TableName.Users).as("approverUsername"))
|
||||
.select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("approverGroupId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
|
||||
@ -46,10 +30,10 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
return result;
|
||||
};
|
||||
|
||||
const findById = async (policyId: string, tx?: Knex) => {
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: policyId
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formattedDoc = sqlNestRelationships({
|
||||
data: doc,
|
||||
@ -66,18 +50,9 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverUserId: id }) => ({
|
||||
id,
|
||||
type: "user"
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "approverGroupId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverGroupId: id }) => ({
|
||||
id,
|
||||
type: "group"
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
@ -89,15 +64,9 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const find = async (
|
||||
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
|
||||
customFilter?: {
|
||||
policyId?: string;
|
||||
},
|
||||
tx?: Knex
|
||||
) => {
|
||||
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter, customFilter);
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
|
||||
|
||||
const formattedDocs = sqlNestRelationships({
|
||||
data: docs,
|
||||
@ -115,19 +84,9 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverUserId: id, approverUsername }) => ({
|
||||
id,
|
||||
type: ApproverType.User,
|
||||
name: approverUsername
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "approverGroupId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverGroupId: id }) => ({
|
||||
id,
|
||||
type: ApproverType.Group
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TIsApproversValid } from "./access-approval-policy-types";
|
||||
import { TVerifyApprovers } from "./access-approval-policy-types";
|
||||
|
||||
export const isApproversValid = async ({
|
||||
export const verifyApprovers = async ({
|
||||
userIds,
|
||||
projectId,
|
||||
orgId,
|
||||
@ -13,9 +14,9 @@ export const isApproversValid = async ({
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
permissionService
|
||||
}: TIsApproversValid) => {
|
||||
try {
|
||||
for await (const userId of userIds) {
|
||||
}: TVerifyApprovers) => {
|
||||
for await (const userId of userIds) {
|
||||
try {
|
||||
const { permission: approverPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
userId,
|
||||
@ -28,9 +29,8 @@ export const isApproversValid = async ({
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: envSlug, secretPath })
|
||||
);
|
||||
} catch (err) {
|
||||
throw new BadRequestError({ message: "One or more approvers doesn't have access to be specified secret path" });
|
||||
}
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
@ -2,21 +2,17 @@ import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
|
||||
import { isApproversValid } from "./access-approval-policy-fns";
|
||||
import { verifyApprovers } from "./access-approval-policy-fns";
|
||||
import {
|
||||
ApproverType,
|
||||
TCreateAccessApprovalPolicy,
|
||||
TDeleteAccessApprovalPolicy,
|
||||
TGetAccessApprovalPolicyByIdDTO,
|
||||
TGetAccessPolicyCountByEnvironmentDTO,
|
||||
TListAccessApprovalPoliciesDTO,
|
||||
TUpdateAccessApprovalPolicy
|
||||
@ -29,8 +25,6 @@ type TSecretApprovalPolicyServiceFactoryDep = {
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "find" | "findOne">;
|
||||
accessApprovalPolicyApproverDAL: TAccessApprovalPolicyApproverDALFactory;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
||||
groupDAL: TGroupDALFactory;
|
||||
userDAL: Pick<TUserDALFactory, "find">;
|
||||
};
|
||||
|
||||
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
|
||||
@ -38,11 +32,9 @@ export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprov
|
||||
export const accessApprovalPolicyServiceFactory = ({
|
||||
accessApprovalPolicyDAL,
|
||||
accessApprovalPolicyApproverDAL,
|
||||
groupDAL,
|
||||
permissionService,
|
||||
projectEnvDAL,
|
||||
projectDAL,
|
||||
userDAL
|
||||
projectDAL
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
const createAccessApprovalPolicy = async ({
|
||||
name,
|
||||
@ -52,29 +44,15 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
approvers,
|
||||
approverUserIds,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
}: TCreateAccessApprovalPolicy) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
// If there is a group approver people might be added to the group later to meet the approvers quota
|
||||
const groupApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.Group)
|
||||
.map((approver) => approver.id) as string[];
|
||||
|
||||
const userApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.User)
|
||||
.map((approver) => approver.id)
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const userApproverNames = approvers
|
||||
.map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
|
||||
if (approvals > approverUserIds.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -89,65 +67,18 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
ProjectPermissionSub.SecretApproval
|
||||
);
|
||||
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
|
||||
if (!env) throw new NotFoundError({ message: "Environment not found" });
|
||||
if (!env) throw new BadRequestError({ message: "Environment not found" });
|
||||
|
||||
let approverUserIds = userApprovers;
|
||||
if (userApproverNames.length) {
|
||||
const approverUsers = await userDAL.find({
|
||||
$in: {
|
||||
username: userApproverNames
|
||||
}
|
||||
});
|
||||
|
||||
const approverNamesFromDb = approverUsers.map((user) => user.username);
|
||||
const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username));
|
||||
|
||||
if (invalidUsernames.length) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid approver user: ${invalidUsernames.join(", ")}`
|
||||
});
|
||||
}
|
||||
|
||||
approverUserIds = approverUserIds.concat(approverUsers.map((user) => user.id));
|
||||
}
|
||||
|
||||
const usersPromises: Promise<
|
||||
{
|
||||
id: string;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
isPartOfGroup: boolean;
|
||||
}[]
|
||||
>[] = [];
|
||||
const verifyAllApprovers = [...approverUserIds];
|
||||
|
||||
for (const groupId of groupApprovers) {
|
||||
usersPromises.push(groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }));
|
||||
}
|
||||
const verifyGroupApprovers = (await Promise.all(usersPromises))
|
||||
.flat()
|
||||
.filter((user) => user.isPartOfGroup)
|
||||
.map((user) => user.id);
|
||||
verifyAllApprovers.push(...verifyGroupApprovers);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
await verifyApprovers({
|
||||
projectId: project.id,
|
||||
orgId: actorOrgId,
|
||||
envSlug: environment,
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: verifyAllApprovers
|
||||
userIds: approverUserIds
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await accessApprovalPolicyDAL.create(
|
||||
{
|
||||
@ -159,26 +90,13 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (approverUserIds.length) {
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (groupApprovers) {
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
groupApprovers.map((groupId) => ({
|
||||
approverGroupId: groupId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
return doc;
|
||||
});
|
||||
return { ...accessApproval, environment: env, projectId: project.id };
|
||||
@ -192,7 +110,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
projectSlug
|
||||
}: TListAccessApprovalPoliciesDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
// Anyone in the project should be able to get the policies.
|
||||
/* const { permission } = */ await permissionService.getProjectPermission(
|
||||
@ -210,7 +128,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
const updateAccessApprovalPolicy = async ({
|
||||
policyId,
|
||||
approvers,
|
||||
approverUserIds,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@ -220,30 +138,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
enforcementLevel
|
||||
}: TUpdateAccessApprovalPolicy) => {
|
||||
const groupApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.Group)
|
||||
.map((approver) => approver.id) as string[];
|
||||
|
||||
const userApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.User)
|
||||
.map((approver) => approver.id)
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const userApproverNames = approvers
|
||||
.map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId);
|
||||
const currentAppovals = approvals || accessApprovalPolicy.approvals;
|
||||
if (
|
||||
groupApprovers?.length === 0 &&
|
||||
userApprovers &&
|
||||
currentAppovals > userApprovers.length + userApproverNames.length
|
||||
) {
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
}
|
||||
|
||||
if (!accessApprovalPolicy) throw new NotFoundError({ message: "Secret approval policy not found" });
|
||||
if (!accessApprovalPolicy) throw new BadRequestError({ message: "Secret approval policy not found" });
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -265,100 +161,26 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
|
||||
if (userApprovers.length || userApproverNames.length) {
|
||||
let userApproverIds = userApprovers;
|
||||
if (userApproverNames.length) {
|
||||
const approverUsers = await userDAL.find({
|
||||
$in: {
|
||||
username: userApproverNames
|
||||
}
|
||||
});
|
||||
|
||||
const approverNamesFromDb = approverUsers.map((user) => user.username);
|
||||
const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username));
|
||||
|
||||
if (invalidUsernames.length) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid approver user: ${invalidUsernames.join(", ")}`
|
||||
});
|
||||
}
|
||||
|
||||
userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id));
|
||||
}
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
if (approverUserIds) {
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalPolicy.environment.slug,
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: userApproverIds
|
||||
userIds: approverUserIds
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
userApproverIds.map((userId) => ({
|
||||
approverUserIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (groupApprovers) {
|
||||
const usersPromises: Promise<
|
||||
{
|
||||
id: string;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
isPartOfGroup: boolean;
|
||||
}[]
|
||||
>[] = [];
|
||||
|
||||
for (const groupId of groupApprovers) {
|
||||
usersPromises.push(groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }));
|
||||
}
|
||||
const verifyGroupApprovers = (await Promise.all(usersPromises))
|
||||
.flat()
|
||||
.filter((user) => user.isPartOfGroup)
|
||||
.map((user) => user.id);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalPolicy.environment.slug,
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: verifyGroupApprovers
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
groupApprovers.map((groupId) => ({
|
||||
approverGroupId: groupId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return doc;
|
||||
});
|
||||
return {
|
||||
@ -376,7 +198,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TDeleteAccessApprovalPolicy) => {
|
||||
const policy = await accessApprovalPolicyDAL.findById(policyId);
|
||||
if (!policy) throw new NotFoundError({ message: "Secret approval policy not found" });
|
||||
if (!policy) throw new BadRequestError({ message: "Secret approval policy not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -404,7 +226,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
}: TGetAccessPolicyCountByEnvironmentDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -413,53 +235,22 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new BadRequestError({ message: "User not found in project" });
|
||||
|
||||
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
|
||||
if (!environment) throw new NotFoundError({ message: "Environment not found" });
|
||||
if (!environment) throw new BadRequestError({ message: "Environment not found" });
|
||||
|
||||
const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id });
|
||||
if (!policies) throw new NotFoundError({ message: "No policies found" });
|
||||
if (!policies) throw new BadRequestError({ message: "No policies found" });
|
||||
|
||||
return { count: policies.length };
|
||||
};
|
||||
|
||||
const getAccessApprovalPolicyById = async ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
policyId
|
||||
}: TGetAccessApprovalPolicyByIdDTO) => {
|
||||
const [policy] = await accessApprovalPolicyDAL.find({}, { policyId });
|
||||
|
||||
if (!policy) {
|
||||
throw new NotFoundError({
|
||||
message: "Cannot find access approval policy"
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
policy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
return policy;
|
||||
};
|
||||
|
||||
return {
|
||||
getAccessPolicyCountByEnvSlug,
|
||||
createAccessApprovalPolicy,
|
||||
deleteAccessApprovalPolicy,
|
||||
updateAccessApprovalPolicy,
|
||||
getAccessApprovalPolicyByProjectSlug,
|
||||
getAccessApprovalPolicyById
|
||||
getAccessApprovalPolicyByProjectSlug
|
||||
};
|
||||
};
|
||||
|
@ -3,7 +3,7 @@ import { ActorAuthMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
|
||||
export type TIsApproversValid = {
|
||||
export type TVerifyApprovers = {
|
||||
userIds: string[];
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
envSlug: string;
|
||||
@ -13,16 +13,11 @@ export type TIsApproversValid = {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export enum ApproverType {
|
||||
Group = "group",
|
||||
User = "user"
|
||||
}
|
||||
|
||||
export type TCreateAccessApprovalPolicy = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
|
||||
approverUserIds: string[];
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
@ -31,7 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
policyId: string;
|
||||
approvals?: number;
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
|
||||
approverUserIds?: string[];
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
@ -46,10 +41,6 @@ export type TGetAccessPolicyCountByEnvironmentDTO = {
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetAccessApprovalPolicyByIdDTO = {
|
||||
policyId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListAccessApprovalPoliciesDTO = {
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -39,12 +39,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.UserGroupMembership,
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
|
||||
`${TableName.UserGroupMembership}.groupId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("requestedByUser"),
|
||||
@ -65,7 +59,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"))
|
||||
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.Environment),
|
||||
@ -149,12 +142,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
|
||||
},
|
||||
{ key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId },
|
||||
{
|
||||
key: "approverGroupUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverGroupUserId }) => approverGroupUserId
|
||||
}
|
||||
{ key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId }
|
||||
]
|
||||
});
|
||||
|
||||
@ -184,28 +172,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`requestedByUser.id`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
.join(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
|
||||
.leftJoin<TUsers>(
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyApproverUser"),
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
|
||||
"accessApprovalPolicyApproverUser.id"
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.UserGroupMembership,
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
|
||||
`${TableName.UserGroupMembership}.groupId`
|
||||
)
|
||||
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyGroupApproverUser"),
|
||||
`${TableName.UserGroupMembership}.userId`,
|
||||
"accessApprovalPolicyGroupApproverUser.id"
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalRequestReviewer,
|
||||
@ -223,15 +200,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
.select(
|
||||
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
|
||||
tx.ref("userId").withSchema(TableName.UserGroupMembership),
|
||||
tx.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"),
|
||||
tx.ref("email").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupEmail"),
|
||||
tx.ref("username").withSchema("accessApprovalPolicyApproverUser").as("approverUsername"),
|
||||
tx.ref("username").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupUsername"),
|
||||
tx.ref("firstName").withSchema("accessApprovalPolicyApproverUser").as("approverFirstName"),
|
||||
tx.ref("firstName").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupFirstName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalPolicyApproverUser").as("approverLastName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupLastName"),
|
||||
tx.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
|
||||
tx.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
|
||||
tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
|
||||
@ -310,23 +282,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "userId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({
|
||||
userId,
|
||||
approverGroupEmail: email,
|
||||
approverGroupUsername: username,
|
||||
approverGroupLastName: lastName,
|
||||
approverFirstName: firstName
|
||||
}) => ({
|
||||
userId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { PackRule, unpackRules } from "@casl/ability/extra";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
|
||||
import { TVerifyPermission } from "./access-approval-request-types";
|
||||
|
||||
@ -19,7 +19,7 @@ export const verifyRequestedPermissions = ({ permissions }: TVerifyPermission) =
|
||||
);
|
||||
|
||||
if (!permission || !permission.length) {
|
||||
throw new BadRequestError({ message: "No permission provided" });
|
||||
throw new UnauthorizedError({ message: "No permission provided" });
|
||||
}
|
||||
|
||||
const requestedPermissions: string[] = [];
|
||||
@ -39,10 +39,10 @@ export const verifyRequestedPermissions = ({ permissions }: TVerifyPermission) =
|
||||
const permissionEnv = firstPermission.conditions?.environment;
|
||||
|
||||
if (!permissionEnv || typeof permissionEnv !== "string") {
|
||||
throw new BadRequestError({ message: "Permission environment is not a string" });
|
||||
throw new UnauthorizedError({ message: "Permission environment is not a string" });
|
||||
}
|
||||
if (!permissionSecretPath || typeof permissionSecretPath !== "string") {
|
||||
throw new BadRequestError({ message: "Permission path is not a string" });
|
||||
throw new UnauthorizedError({ message: "Permission path is not a string" });
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -3,22 +3,17 @@ import ms from "ms";
|
||||
|
||||
import { ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal";
|
||||
import { triggerSlackNotification } from "@app/services/slack/slack-fns";
|
||||
import { SlackTriggerFeature } from "@app/services/slack/slack-types";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-policy/access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal";
|
||||
import { isApproversValid } from "../access-approval-policy/access-approval-policy-fns";
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { verifyApprovers } from "../access-approval-policy/access-approval-policy-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "../project-user-additional-privilege/project-user-additional-privilege-types";
|
||||
@ -38,10 +33,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
accessApprovalPolicyApproverDAL: Pick<TAccessApprovalPolicyApproverDALFactory, "find">;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
projectDAL: Pick<
|
||||
TProjectDALFactory,
|
||||
"checkProjectUpgradeStatus" | "findProjectBySlug" | "findProjectWithOrg" | "findById"
|
||||
>;
|
||||
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus" | "findProjectBySlug">;
|
||||
accessApprovalRequestDAL: Pick<
|
||||
TAccessApprovalRequestDALFactory,
|
||||
| "create"
|
||||
@ -58,21 +50,17 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
TAccessApprovalRequestReviewerDALFactory,
|
||||
"create" | "find" | "findOne" | "transaction"
|
||||
>;
|
||||
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleMembers">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"findUserByProjectMembershipId" | "findUsersByProjectMembershipIds" | "find" | "findById"
|
||||
>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
projectSlackConfigDAL: Pick<TProjectSlackConfigDALFactory, "getIntegrationDetailsByProject">;
|
||||
};
|
||||
|
||||
export type TAccessApprovalRequestServiceFactory = ReturnType<typeof accessApprovalRequestServiceFactory>;
|
||||
|
||||
export const accessApprovalRequestServiceFactory = ({
|
||||
groupDAL,
|
||||
projectDAL,
|
||||
projectEnvDAL,
|
||||
permissionService,
|
||||
@ -83,9 +71,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
accessApprovalPolicyApproverDAL,
|
||||
additionalPrivilegeDAL,
|
||||
smtpService,
|
||||
userDAL,
|
||||
kmsService,
|
||||
projectSlackConfigDAL
|
||||
userDAL
|
||||
}: TSecretApprovalRequestServiceFactoryDep) => {
|
||||
const createAccessApprovalRequest = async ({
|
||||
isTemporary,
|
||||
@ -99,7 +85,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
}: TCreateAccessApprovalRequestDTO) => {
|
||||
const cfg = getConfig();
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new UnauthorizedError({ message: "Project not found" });
|
||||
|
||||
// Anyone can create an access approval request.
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
@ -109,56 +95,31 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" });
|
||||
|
||||
const requestedByUser = await userDAL.findById(actorId);
|
||||
if (!requestedByUser) throw new ForbiddenRequestError({ message: "User not found" });
|
||||
if (!requestedByUser) throw new UnauthorizedError({ message: "User not found" });
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(project.id);
|
||||
|
||||
const { envSlug, secretPath, accessTypes } = verifyRequestedPermissions({ permissions: requestedPermissions });
|
||||
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
|
||||
|
||||
if (!environment) throw new NotFoundError({ message: "Environment not found" });
|
||||
if (!environment) throw new UnauthorizedError({ message: "Environment not found" });
|
||||
|
||||
const policy = await accessApprovalPolicyDAL.findOne({
|
||||
envId: environment.id,
|
||||
secretPath
|
||||
});
|
||||
if (!policy) throw new NotFoundError({ message: "No policy matching criteria was found." });
|
||||
|
||||
const approverIds: string[] = [];
|
||||
const approverGroupIds: string[] = [];
|
||||
if (!policy) throw new UnauthorizedError({ message: "No policy matching criteria was found." });
|
||||
|
||||
const approvers = await accessApprovalPolicyApproverDAL.find({
|
||||
policyId: policy.id
|
||||
});
|
||||
|
||||
approvers.forEach((approver) => {
|
||||
if (approver.approverUserId) {
|
||||
approverIds.push(approver.approverUserId);
|
||||
} else if (approver.approverGroupId) {
|
||||
approverGroupIds.push(approver.approverGroupId);
|
||||
}
|
||||
});
|
||||
|
||||
const groupUsers = (
|
||||
await Promise.all(
|
||||
approverGroupIds.map((groupApproverId) =>
|
||||
groupDAL.findAllGroupPossibleMembers({
|
||||
orgId: actorOrgId,
|
||||
groupId: groupApproverId
|
||||
})
|
||||
)
|
||||
)
|
||||
).flat();
|
||||
approverIds.push(...groupUsers.filter((user) => user.isPartOfGroup).map((user) => user.id));
|
||||
|
||||
const approverUsers = await userDAL.find({
|
||||
$in: {
|
||||
id: [...new Set(approverIds)]
|
||||
id: approvers.map((approver) => approver.approverUserId)
|
||||
}
|
||||
});
|
||||
|
||||
@ -205,36 +166,13 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
|
||||
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
|
||||
const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`;
|
||||
|
||||
await triggerSlackNotification({
|
||||
projectId: project.id,
|
||||
projectSlackConfigDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
notification: {
|
||||
type: SlackTriggerFeature.ACCESS_REQUEST,
|
||||
payload: {
|
||||
projectName: project.name,
|
||||
requesterFullName,
|
||||
isTemporary,
|
||||
requesterEmail: requestedByUser.email as string,
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
permissions: accessTypes,
|
||||
approvalUrl
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: approverUsers.filter((approver) => approver.email).map((approver) => approver.email!),
|
||||
subjectLine: "Access Approval Request",
|
||||
|
||||
substitutions: {
|
||||
projectName: project.name,
|
||||
requesterFullName,
|
||||
requesterFullName: `${requestedByUser.firstName} ${requestedByUser.lastName}`,
|
||||
requesterEmail: requestedByUser.email,
|
||||
isTemporary,
|
||||
...(isTemporary && {
|
||||
@ -243,7 +181,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
permissions: accessTypes,
|
||||
approvalUrl
|
||||
approvalUrl: `${cfg.SITE_URL}/project/${project.id}/approval`
|
||||
},
|
||||
template: SmtpTemplates.AccessApprovalRequest
|
||||
});
|
||||
@ -264,7 +202,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TListApprovalRequestsDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new UnauthorizedError({ message: "Project not found" });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -273,9 +211,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" });
|
||||
|
||||
const policies = await accessApprovalPolicyDAL.find({ projectId: project.id });
|
||||
let requests = await accessApprovalRequestDAL.findRequestsWithPrivilegeByPolicyIds(policies.map((p) => p.id));
|
||||
@ -300,7 +236,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TReviewAccessRequestDTO) => {
|
||||
const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId);
|
||||
if (!accessApprovalRequest) throw new NotFoundError({ message: "Secret approval request not found" });
|
||||
if (!accessApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" });
|
||||
|
||||
const { policy } = accessApprovalRequest;
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||
@ -311,21 +247,19 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" });
|
||||
|
||||
if (
|
||||
!hasRole(ProjectMembershipRole.Admin) &&
|
||||
accessApprovalRequest.requestedByUserId !== actorId && // The request wasn't made by the current user
|
||||
!policy.approvers.find((approver) => approver.userId === actorId) // The request isn't performed by an assigned approver
|
||||
) {
|
||||
throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" });
|
||||
throw new UnauthorizedError({ message: "You are not authorized to approve this request" });
|
||||
}
|
||||
|
||||
const reviewerProjectMembership = await projectMembershipDAL.findById(membership.id);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalRequest.environment,
|
||||
@ -335,10 +269,6 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
userIds: [reviewerProjectMembership.userId]
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new ForbiddenRequestError({ message: "You don't have access to approve this request" });
|
||||
}
|
||||
|
||||
const existingReviews = await accessApprovalRequestReviewerDAL.find({ requestId: accessApprovalRequest.id });
|
||||
if (existingReviews.some((review) => review.status === ApprovalStatus.REJECTED)) {
|
||||
throw new BadRequestError({ message: "The request has already been rejected by another reviewer" });
|
||||
@ -421,7 +351,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
|
||||
const getCount = async ({ projectSlug, actor, actorAuthMethod, actorId, actorOrgId }: TGetAccessRequestCountDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new UnauthorizedError({ message: "Project not found" });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -430,9 +360,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new BadRequestError({ message: "User not found in project" });
|
||||
|
||||
const count = await accessApprovalRequestDAL.getCount({ projectId: project.id });
|
||||
|
||||
|
@ -2,11 +2,10 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateLocalIps } from "@app/lib/validator";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
@ -43,15 +42,13 @@ export const auditLogStreamServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TCreateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const appCfg = getConfig();
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams) {
|
||||
if (!plan.auditLogStreams)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create audit log streams due to plan restriction. Upgrade plan to create group."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
@ -62,9 +59,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
if (appCfg.isCloud) {
|
||||
blockLocalAndPrivateIpAddresses(url);
|
||||
}
|
||||
validateLocalIps(url);
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
@ -121,7 +116,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TUpdateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams)
|
||||
@ -130,14 +125,13 @@ export const auditLogStreamServiceFactory = ({
|
||||
});
|
||||
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
|
||||
if (url) validateLocalIps(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
@ -179,10 +173,10 @@ export const auditLogStreamServiceFactory = ({
|
||||
};
|
||||
|
||||
const deleteById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TDeleteAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
@ -194,7 +188,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
|
||||
const getById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TGetDetailsAuditLogStreamDTO) => {
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
|
@ -1,15 +1,10 @@
|
||||
// weird commonjs-related error in the CI requires us to do the import like this
|
||||
import knex from "knex";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { EventType } from "./audit-log-types";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@ -29,117 +24,51 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
const auditLogOrm = ormify(db, TableName.AuditLog);
|
||||
|
||||
const find = async (
|
||||
{
|
||||
orgId,
|
||||
projectId,
|
||||
userAgentType,
|
||||
startDate,
|
||||
endDate,
|
||||
limit = 20,
|
||||
offset = 0,
|
||||
actorId,
|
||||
actorType,
|
||||
eventType,
|
||||
eventMetadata
|
||||
}: Omit<TFindQuery, "actor" | "eventType"> & {
|
||||
actorId?: string;
|
||||
actorType?: ActorType;
|
||||
eventType?: EventType[];
|
||||
eventMetadata?: Record<string, string>;
|
||||
},
|
||||
tx?: knex.Knex
|
||||
{ orgId, projectId, userAgentType, startDate, endDate, limit = 20, offset = 0, actor, eventType }: TFindQuery,
|
||||
tx?: Knex
|
||||
) => {
|
||||
if (!orgId && !projectId) {
|
||||
throw new Error("Either orgId or projectId must be provided");
|
||||
}
|
||||
|
||||
try {
|
||||
// Find statements
|
||||
const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog)
|
||||
// eslint-disable-next-line func-names
|
||||
.where(function () {
|
||||
if (orgId) {
|
||||
void this.where(`${TableName.AuditLog}.orgId`, orgId);
|
||||
} else if (projectId) {
|
||||
void this.where(`${TableName.AuditLog}.projectId`, projectId);
|
||||
}
|
||||
});
|
||||
|
||||
if (userAgentType) {
|
||||
void sqlQuery.where("userAgentType", userAgentType);
|
||||
}
|
||||
|
||||
// Select statements
|
||||
void sqlQuery
|
||||
.select(selectAllTableCols(TableName.AuditLog))
|
||||
.where(
|
||||
stripUndefinedInWhere({
|
||||
projectId,
|
||||
orgId,
|
||||
eventType,
|
||||
actor,
|
||||
userAgentType
|
||||
})
|
||||
)
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.orderBy(`${TableName.AuditLog}.createdAt`, "desc");
|
||||
|
||||
// Special case: Filter by actor ID
|
||||
if (actorId) {
|
||||
void sqlQuery.whereRaw(`"actorMetadata" @> jsonb_build_object('userId', ?::text)`, [actorId]);
|
||||
}
|
||||
|
||||
// Special case: Filter by key/value pairs in eventMetadata field
|
||||
if (eventMetadata && Object.keys(eventMetadata).length) {
|
||||
Object.entries(eventMetadata).forEach(([key, value]) => {
|
||||
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object(?::text, ?::text)`, [key, value]);
|
||||
});
|
||||
}
|
||||
|
||||
// Filter by actor type
|
||||
if (actorType) {
|
||||
void sqlQuery.where("actor", actorType);
|
||||
}
|
||||
|
||||
// Filter by event types
|
||||
if (eventType?.length) {
|
||||
void sqlQuery.whereIn("eventType", eventType);
|
||||
}
|
||||
|
||||
// Filter by date range
|
||||
.orderBy("createdAt", "desc");
|
||||
if (startDate) {
|
||||
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, ">=", startDate);
|
||||
void sqlQuery.where("createdAt", ">=", startDate);
|
||||
}
|
||||
if (endDate) {
|
||||
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, "<=", endDate);
|
||||
void sqlQuery.where("createdAt", "<=", endDate);
|
||||
}
|
||||
|
||||
// we timeout long running queries to prevent DB resource issues (2 minutes)
|
||||
const docs = await sqlQuery.timeout(1000 * 120);
|
||||
|
||||
const docs = await sqlQuery;
|
||||
return docs;
|
||||
} catch (error) {
|
||||
if (error instanceof knex.KnexTimeoutError) {
|
||||
throw new GatewayTimeoutError({
|
||||
error,
|
||||
message: "Failed to fetch audit logs due to timeout. Add more search filters."
|
||||
});
|
||||
}
|
||||
|
||||
throw new DatabaseError({ error });
|
||||
}
|
||||
};
|
||||
|
||||
// delete all audit log that have expired
|
||||
const pruneAuditLog = async (tx?: knex.Knex) => {
|
||||
const pruneAuditLog = async (tx?: Knex) => {
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
.where("expiresAt", "<", today)
|
||||
.select("id")
|
||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
|
||||
.whereIn("id", findExpiredLogSubQuery)
|
||||
@ -155,9 +84,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@ -74,7 +74,6 @@ export const auditLogQueueServiceFactory = ({
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
|
@ -3,7 +3,6 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TAuditLogDALFactory } from "./audit-log-dal";
|
||||
@ -12,7 +11,7 @@ import { EventType, TCreateAuditLogDTO, TListProjectAuditLogDTO } from "./audit-
|
||||
|
||||
type TAuditLogServiceFactoryDep = {
|
||||
auditLogDAL: TAuditLogDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
auditLogQueue: TAuditLogQueueServiceFactory;
|
||||
};
|
||||
|
||||
@ -23,48 +22,38 @@ export const auditLogServiceFactory = ({
|
||||
auditLogQueue,
|
||||
permissionService
|
||||
}: TAuditLogServiceFactoryDep) => {
|
||||
const listAuditLogs = async ({ actorAuthMethod, actorId, actorOrgId, actor, filter }: TListProjectAuditLogDTO) => {
|
||||
// Filter logs for specific project
|
||||
if (filter.projectId) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
filter.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
} else {
|
||||
// Organization-wide logs
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
/**
|
||||
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
|
||||
* to the organization level ✅
|
||||
*/
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
}
|
||||
|
||||
// If project ID is not provided, then we need to return all the audit logs for the organization itself.
|
||||
const listProjectAuditLogs = async ({
|
||||
userAgentType,
|
||||
eventType,
|
||||
offset,
|
||||
limit,
|
||||
endDate,
|
||||
startDate,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
auditLogActor
|
||||
}: TListProjectAuditLogDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
const auditLogs = await auditLogDAL.find({
|
||||
startDate: filter.startDate,
|
||||
endDate: filter.endDate,
|
||||
limit: filter.limit,
|
||||
offset: filter.offset,
|
||||
eventType: filter.eventType,
|
||||
userAgentType: filter.userAgentType,
|
||||
actorId: filter.auditLogActorId,
|
||||
actorType: filter.actorType,
|
||||
eventMetadata: filter.eventMetadata,
|
||||
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
|
||||
startDate,
|
||||
endDate,
|
||||
limit,
|
||||
offset,
|
||||
eventType,
|
||||
userAgentType,
|
||||
actor: auditLogActor,
|
||||
projectId
|
||||
});
|
||||
|
||||
return auditLogs.map(({ eventType: logEventType, actor: eActor, actorMetadata, eventMetadata, ...el }) => ({
|
||||
...el,
|
||||
event: { type: logEventType, metadata: eventMetadata },
|
||||
@ -87,6 +76,6 @@ export const auditLogServiceFactory = ({
|
||||
|
||||
return {
|
||||
createAuditLog,
|
||||
listAuditLogs
|
||||
listProjectAuditLogs
|
||||
};
|
||||
};
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
@ -6,23 +5,19 @@ import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
userAgentType?: UserAgentType;
|
||||
eventType?: EventType[];
|
||||
offset?: number;
|
||||
limit: number;
|
||||
endDate?: string;
|
||||
startDate?: string;
|
||||
projectId?: string;
|
||||
auditLogActorId?: string;
|
||||
actorType?: ActorType;
|
||||
eventMetadata?: Record<string, string>;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
auditLogActor?: string;
|
||||
projectId: string;
|
||||
eventType?: string;
|
||||
startDate?: string;
|
||||
endDate?: string;
|
||||
userAgentType?: string;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TCreateAuditLogDTO = {
|
||||
event: Event;
|
||||
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor | PlatformActor;
|
||||
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor;
|
||||
orgId?: string;
|
||||
projectId?: string;
|
||||
} & BaseAuthData;
|
||||
@ -123,7 +118,6 @@ export enum EventType {
|
||||
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
|
||||
DELETE_WEBHOOK = "delete-webhook",
|
||||
GET_SECRET_IMPORTS = "get-secret-imports",
|
||||
GET_SECRET_IMPORT = "get-secret-import",
|
||||
CREATE_SECRET_IMPORT = "create-secret-import",
|
||||
UPDATE_SECRET_IMPORT = "update-secret-import",
|
||||
DELETE_SECRET_IMPORT = "delete-secret-import",
|
||||
@ -146,7 +140,6 @@ export enum EventType {
|
||||
GET_CA_CRLS = "get-certificate-authority-crls",
|
||||
ISSUE_CERT = "issue-cert",
|
||||
SIGN_CERT = "sign-cert",
|
||||
GET_CA_CERTIFICATE_TEMPLATES = "get-ca-certificate-templates",
|
||||
GET_CERT = "get-cert",
|
||||
DELETE_CERT = "delete-cert",
|
||||
REVOKE_CERT = "revoke-cert",
|
||||
@ -176,23 +169,7 @@ export enum EventType {
|
||||
GET_CERTIFICATE_TEMPLATE = "get-certificate-template",
|
||||
CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "create-certificate-template-est-config",
|
||||
UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "update-certificate-template-est-config",
|
||||
GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config",
|
||||
ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration",
|
||||
ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration",
|
||||
GET_SLACK_INTEGRATION = "get-slack-integration",
|
||||
UPDATE_SLACK_INTEGRATION = "update-slack-integration",
|
||||
DELETE_SLACK_INTEGRATION = "delete-slack-integration",
|
||||
GET_PROJECT_SLACK_CONFIG = "get-project-slack-config",
|
||||
UPDATE_PROJECT_SLACK_CONFIG = "update-project-slack-config",
|
||||
INTEGRATION_SYNCED = "integration-synced",
|
||||
CREATE_CMEK = "create-cmek",
|
||||
UPDATE_CMEK = "update-cmek",
|
||||
DELETE_CMEK = "delete-cmek",
|
||||
GET_CMEKS = "get-cmeks",
|
||||
CMEK_ENCRYPT = "cmek-encrypt",
|
||||
CMEK_DECRYPT = "cmek-decrypt",
|
||||
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
|
||||
GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping"
|
||||
GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@ -213,8 +190,6 @@ interface IdentityActorMetadata {
|
||||
|
||||
interface ScimClientActorMetadata {}
|
||||
|
||||
interface PlatformActorMetadata {}
|
||||
|
||||
export interface UserActor {
|
||||
type: ActorType.USER;
|
||||
metadata: UserActorMetadata;
|
||||
@ -225,11 +200,6 @@ export interface ServiceActor {
|
||||
metadata: ServiceActorMetadata;
|
||||
}
|
||||
|
||||
export interface PlatformActor {
|
||||
type: ActorType.PLATFORM;
|
||||
metadata: PlatformActorMetadata;
|
||||
}
|
||||
|
||||
export interface IdentityActor {
|
||||
type: ActorType.IDENTITY;
|
||||
metadata: IdentityActorMetadata;
|
||||
@ -240,7 +210,7 @@ export interface ScimClientActor {
|
||||
metadata: ScimClientActorMetadata;
|
||||
}
|
||||
|
||||
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor;
|
||||
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor;
|
||||
|
||||
interface GetSecretsEvent {
|
||||
type: EventType.GET_SECRETS;
|
||||
@ -1014,14 +984,6 @@ interface GetSecretImportsEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretImportEvent {
|
||||
type: EventType.GET_SECRET_IMPORT;
|
||||
metadata: {
|
||||
secretImportId: string;
|
||||
folderId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSecretImportEvent {
|
||||
type: EventType.CREATE_SECRET_IMPORT;
|
||||
metadata: {
|
||||
@ -1230,14 +1192,6 @@ interface SignCert {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCertificateTemplates {
|
||||
type: EventType.GET_CA_CERTIFICATE_TEMPLATES;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCert {
|
||||
type: EventType.GET_CERT;
|
||||
metadata: {
|
||||
@ -1368,7 +1322,7 @@ interface CreateKmsEvent {
|
||||
metadata: {
|
||||
kmsId: string;
|
||||
provider: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
@ -1377,7 +1331,7 @@ interface DeleteKmsEvent {
|
||||
type: EventType.DELETE_KMS;
|
||||
metadata: {
|
||||
kmsId: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1386,7 +1340,7 @@ interface UpdateKmsEvent {
|
||||
metadata: {
|
||||
kmsId: string;
|
||||
provider: string;
|
||||
name?: string;
|
||||
slug?: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
@ -1395,7 +1349,7 @@ interface GetKmsEvent {
|
||||
type: EventType.GET_KMS;
|
||||
metadata: {
|
||||
kmsId: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1404,7 +1358,7 @@ interface UpdateProjectKmsEvent {
|
||||
metadata: {
|
||||
secretManagerKmsKey: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
@ -1492,132 +1446,6 @@ interface GetCertificateTemplateEstConfig {
|
||||
};
|
||||
}
|
||||
|
||||
interface AttemptCreateSlackIntegration {
|
||||
type: EventType.ATTEMPT_CREATE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
slug: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AttemptReinstallSlackIntegration {
|
||||
type: EventType.ATTEMPT_REINSTALL_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateSlackIntegration {
|
||||
type: EventType.UPDATE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
slug: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSlackIntegration {
|
||||
type: EventType.DELETE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSlackIntegration {
|
||||
type: EventType.GET_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateProjectSlackConfig {
|
||||
type: EventType.UPDATE_PROJECT_SLACK_CONFIG;
|
||||
metadata: {
|
||||
id: string;
|
||||
slackIntegrationId: string;
|
||||
isAccessRequestNotificationEnabled: boolean;
|
||||
accessRequestChannels: string;
|
||||
isSecretRequestNotificationEnabled: boolean;
|
||||
secretRequestChannels: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetProjectSlackConfig {
|
||||
type: EventType.GET_PROJECT_SLACK_CONFIG;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
interface IntegrationSyncedEvent {
|
||||
type: EventType.INTEGRATION_SYNCED;
|
||||
metadata: {
|
||||
integrationId: string;
|
||||
lastSyncJobId: string;
|
||||
lastUsed: Date;
|
||||
syncMessage: string;
|
||||
isSynced: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateCmekEvent {
|
||||
type: EventType.CREATE_CMEK;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
encryptionAlgorithm: SymmetricEncryption;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteCmekEvent {
|
||||
type: EventType.DELETE_CMEK;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCmekEvent {
|
||||
type: EventType.UPDATE_CMEK;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCmeksEvent {
|
||||
type: EventType.GET_CMEKS;
|
||||
metadata: {
|
||||
keyIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface CmekEncryptEvent {
|
||||
type: EventType.CMEK_ENCRYPT;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CmekDecryptEvent {
|
||||
type: EventType.CMEK_DECRYPT;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetExternalGroupOrgRoleMappingsEvent {
|
||||
type: EventType.GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS;
|
||||
metadata?: Record<string, never>; // not needed, based off orgId
|
||||
}
|
||||
|
||||
interface UpdateExternalGroupOrgRoleMappingsEvent {
|
||||
type: EventType.UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS;
|
||||
metadata: {
|
||||
mappings: { groupName: string; roleSlug: string }[];
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -1697,7 +1525,6 @@ export type Event =
|
||||
| UpdateWebhookStatusEvent
|
||||
| DeleteWebhookEvent
|
||||
| GetSecretImportsEvent
|
||||
| GetSecretImportEvent
|
||||
| CreateSecretImportEvent
|
||||
| UpdateSecretImportEvent
|
||||
| DeleteSecretImportEvent
|
||||
@ -1720,7 +1547,6 @@ export type Event =
|
||||
| GetCaCrls
|
||||
| IssueCert
|
||||
| SignCert
|
||||
| GetCaCertificateTemplates
|
||||
| GetCert
|
||||
| DeleteCert
|
||||
| RevokeCert
|
||||
@ -1750,20 +1576,4 @@ export type Event =
|
||||
| DeleteCertificateTemplate
|
||||
| CreateCertificateTemplateEstConfig
|
||||
| UpdateCertificateTemplateEstConfig
|
||||
| GetCertificateTemplateEstConfig
|
||||
| AttemptCreateSlackIntegration
|
||||
| AttemptReinstallSlackIntegration
|
||||
| UpdateSlackIntegration
|
||||
| DeleteSlackIntegration
|
||||
| GetSlackIntegration
|
||||
| UpdateProjectSlackConfig
|
||||
| GetProjectSlackConfig
|
||||
| IntegrationSyncedEvent
|
||||
| CreateCmekEvent
|
||||
| UpdateCmekEvent
|
||||
| DeleteCmekEvent
|
||||
| GetCmeksEvent
|
||||
| CmekEncryptEvent
|
||||
| CmekDecryptEvent
|
||||
| GetExternalGroupOrgRoleMappingsEvent
|
||||
| UpdateExternalGroupOrgRoleMappingsEvent;
|
||||
| GetCertificateTemplateEstConfig;
|
||||
|
@ -2,9 +2,10 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
// import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -18,6 +19,7 @@ type TCertificateAuthorityCrlServiceFactoryDep = {
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
// licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateAuthorityCrlServiceFactory = ReturnType<typeof certificateAuthorityCrlServiceFactory>;
|
||||
@ -64,7 +66,7 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
*/
|
||||
const getCaCrls = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCrlsDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new NotFoundError({ message: "CA not found" });
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -79,6 +81,13 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
// const plan = await licenseService.getPlan(actorOrgId);
|
||||
// if (!plan.caCrl)
|
||||
// throw new BadRequestError({
|
||||
// message:
|
||||
// "Failed to get CA certificate revocation lists (CRLs) due to plan restriction. Upgrade plan to get the CA CRL."
|
||||
// });
|
||||
|
||||
const caCrls = await certificateAuthorityCrlDAL.find({ caId: ca.id }, { sort: [["createdAt", "desc"]] });
|
||||
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
|
@ -7,7 +7,7 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
@ -61,7 +61,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
}: TCreateDynamicSecretLeaseDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -84,10 +84,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
|
||||
const totalLeasesTaken = await dynamicSecretLeaseDAL.countLeasesForDynamicSecret(dynamicSecretCfg.id);
|
||||
if (totalLeasesTaken >= appCfg.MAX_LEASE_LIMIT)
|
||||
@ -134,7 +134,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
leaseId
|
||||
}: TRenewDynamicSecretLeaseDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -157,10 +157,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
|
||||
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
@ -208,7 +208,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
isForced
|
||||
}: TDeleteDynamicSecretLeaseDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -224,10 +224,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
|
||||
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
@ -273,7 +273,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TListDynamicSecretLeasesDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -289,10 +289,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
|
||||
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id });
|
||||
return dynamicSecretLeases;
|
||||
@ -309,7 +309,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TDetailsDynamicSecretLeaseDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -325,10 +325,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
|
||||
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
|
||||
|
||||
return dynamicSecretLease;
|
||||
};
|
||||
|
@ -1,70 +1,10 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TDynamicSecretDALFactory = ReturnType<typeof dynamicSecretDALFactory>;
|
||||
|
||||
export const dynamicSecretDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.DynamicSecret);
|
||||
|
||||
// find dynamic secrets for multiple environments (folder IDs are cross env, thus need to rank for pagination)
|
||||
const listDynamicSecretsByFolderIds = async (
|
||||
{
|
||||
folderIds,
|
||||
search,
|
||||
limit,
|
||||
offset = 0,
|
||||
orderBy = SecretsOrderBy.Name,
|
||||
orderDirection = OrderByDirection.ASC
|
||||
}: {
|
||||
folderIds: string[];
|
||||
search?: string;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
orderBy?: SecretsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
},
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.DynamicSecret)
|
||||
.whereIn("folderId", folderIds)
|
||||
.where((bd) => {
|
||||
if (search) {
|
||||
void bd.whereILike(`${TableName.DynamicSecret}.name`, `%${search}%`);
|
||||
}
|
||||
})
|
||||
.leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||
.leftJoin(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.select(
|
||||
selectAllTableCols(TableName.DynamicSecret),
|
||||
db.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
db.raw(`DENSE_RANK() OVER (ORDER BY ${TableName.DynamicSecret}."name" ${orderDirection}) as rank`)
|
||||
)
|
||||
.orderBy(`${TableName.DynamicSecret}.${orderBy}`, orderDirection);
|
||||
|
||||
if (limit) {
|
||||
const rankOffset = offset + 1;
|
||||
return await (tx || db)
|
||||
.with("w", query)
|
||||
.select("*")
|
||||
.from<Awaited<typeof query>[number]>("w")
|
||||
.where("w.rank", ">=", rankOffset)
|
||||
.andWhere("w.rank", "<", rankOffset + limit);
|
||||
}
|
||||
|
||||
const dynamicSecrets = await query;
|
||||
|
||||
return dynamicSecrets;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "List dynamic secret multi env" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...orm, listDynamicSecretsByFolderIds };
|
||||
return orm;
|
||||
};
|
||||
|
@ -5,8 +5,7 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@ -18,12 +17,9 @@ import {
|
||||
TCreateDynamicSecretDTO,
|
||||
TDeleteDynamicSecretDTO,
|
||||
TDetailsDynamicSecretDTO,
|
||||
TGetDynamicSecretsCountDTO,
|
||||
TListDynamicSecretsDTO,
|
||||
TListDynamicSecretsMultiEnvDTO,
|
||||
TUpdateDynamicSecretDTO
|
||||
} from "./dynamic-secret-types";
|
||||
import { AzureEntraIDProvider } from "./providers/azure-entra-id";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "./providers/models";
|
||||
|
||||
type TDynamicSecretServiceFactoryDep = {
|
||||
@ -35,7 +31,7 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
"pruneDynamicSecret" | "unsetLeaseRevocation"
|
||||
>;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
};
|
||||
@ -66,7 +62,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TCreateDynamicSecretDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -89,7 +85,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (existingDynamicSecret)
|
||||
@ -134,7 +130,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TUpdateDynamicSecretDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
|
||||
@ -158,10 +154,10 @@ export const dynamicSecretServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
|
||||
if (newName) {
|
||||
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name: newName, folderId: folder.id });
|
||||
@ -213,7 +209,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
isForced
|
||||
}: TDeleteDynamicSecretDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
|
||||
@ -230,7 +226,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
@ -271,7 +267,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
actor
|
||||
}: TDetailsDynamicSecretDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -287,10 +283,10 @@ export const dynamicSecretServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
@ -304,104 +300,19 @@ export const dynamicSecretServiceFactory = ({
|
||||
return { ...dynamicSecretCfg, inputs: providerInputs };
|
||||
};
|
||||
|
||||
// get unique dynamic secret count across multiple envs
|
||||
const getCountMultiEnv = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
actor,
|
||||
projectId,
|
||||
path,
|
||||
environmentSlugs,
|
||||
search,
|
||||
isInternal
|
||||
}: TListDynamicSecretsMultiEnvDTO) => {
|
||||
if (!isInternal) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// verify user has access to each env in request
|
||||
environmentSlugs.forEach((environmentSlug) =>
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path);
|
||||
if (!folders.length) throw new NotFoundError({ message: "Folders not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.find(
|
||||
{ $in: { folderId: folders.map((folder) => folder.id) }, $search: search ? { name: `%${search}%` } : undefined },
|
||||
{ countDistinct: "name" }
|
||||
);
|
||||
|
||||
return Number(dynamicSecretCfg[0]?.count ?? 0);
|
||||
};
|
||||
|
||||
// get dynamic secret count for a single env
|
||||
const getDynamicSecretCount = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
actor,
|
||||
path,
|
||||
environmentSlug,
|
||||
search,
|
||||
projectId
|
||||
}: TGetDynamicSecretsCountDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.find(
|
||||
{ folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined },
|
||||
{ count: true }
|
||||
);
|
||||
return Number(dynamicSecretCfg[0]?.count ?? 0);
|
||||
};
|
||||
|
||||
const listDynamicSecretsByEnv = async ({
|
||||
const list = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
actor,
|
||||
projectSlug,
|
||||
path,
|
||||
environmentSlug,
|
||||
limit,
|
||||
offset,
|
||||
orderBy,
|
||||
orderDirection = OrderByDirection.ASC,
|
||||
search,
|
||||
...params
|
||||
environmentSlug
|
||||
}: TListDynamicSecretsDTO) => {
|
||||
let { projectId } = params;
|
||||
|
||||
if (!projectId) {
|
||||
if (!projectSlug) throw new BadRequestError({ message: "Project ID or slug required" });
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
projectId = project.id;
|
||||
}
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -415,86 +326,17 @@ export const dynamicSecretServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.find(
|
||||
{ folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined },
|
||||
{
|
||||
limit,
|
||||
offset,
|
||||
sort: orderBy ? [[orderBy, orderDirection]] : undefined
|
||||
}
|
||||
);
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.find({ folderId: folder.id });
|
||||
return dynamicSecretCfg;
|
||||
};
|
||||
|
||||
// get dynamic secrets for multiple envs
|
||||
const listDynamicSecretsByFolderIds = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
actor,
|
||||
path,
|
||||
environmentSlugs,
|
||||
projectId,
|
||||
isInternal,
|
||||
...params
|
||||
}: TListDynamicSecretsMultiEnvDTO) => {
|
||||
if (!isInternal) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// verify user has access to each env in request
|
||||
environmentSlugs.forEach((environmentSlug) =>
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path);
|
||||
if (!folders.length) throw new NotFoundError({ message: "Folders not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.listDynamicSecretsByFolderIds({
|
||||
folderIds: folders.map((folder) => folder.id),
|
||||
...params
|
||||
});
|
||||
|
||||
return dynamicSecretCfg;
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async ({
|
||||
tenantId,
|
||||
applicationId,
|
||||
clientSecret
|
||||
}: {
|
||||
tenantId: string;
|
||||
applicationId: string;
|
||||
clientSecret: string;
|
||||
}) => {
|
||||
const azureEntraIdUsers = await AzureEntraIDProvider().fetchAzureEntraIdUsers(
|
||||
tenantId,
|
||||
applicationId,
|
||||
clientSecret
|
||||
);
|
||||
return azureEntraIdUsers;
|
||||
};
|
||||
|
||||
return {
|
||||
create,
|
||||
updateByName,
|
||||
deleteByName,
|
||||
getDetails,
|
||||
listDynamicSecretsByEnv,
|
||||
listDynamicSecretsByFolderIds,
|
||||
getDynamicSecretCount,
|
||||
getCountMultiEnv,
|
||||
fetchAzureEntraIdUsers
|
||||
list
|
||||
};
|
||||
};
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { DynamicSecretProviderSchema } from "./providers/models";
|
||||
|
||||
@ -51,20 +50,5 @@ export type TDetailsDynamicSecretDTO = {
|
||||
export type TListDynamicSecretsDTO = {
|
||||
path: string;
|
||||
environmentSlug: string;
|
||||
projectSlug?: string;
|
||||
projectId?: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: SecretsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
search?: string;
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListDynamicSecretsMultiEnvDTO = Omit<
|
||||
TListDynamicSecretsDTO,
|
||||
"projectId" | "environmentSlug" | "projectSlug"
|
||||
> & { projectId: string; environmentSlugs: string[]; isInternal?: boolean };
|
||||
|
||||
export type TGetDynamicSecretsCountDTO = Omit<TListDynamicSecretsDTO, "projectSlug" | "projectId"> & {
|
||||
projectId: string;
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user