Compare commits
169 Commits
feature/es
...
infisical/
Author | SHA1 | Date | |
---|---|---|---|
ebc584d36f | |||
656d979d7d | |||
5382f3de2d | |||
b2b858f7e8 | |||
302e068c74 | |||
95b92caff3 | |||
5d894b6d43 | |||
dab3e2efad | |||
04cbbccd25 | |||
7f48e9d62e | |||
8a0018eff2 | |||
e6a920caa3 | |||
11411ca4eb | |||
b7c79fa45b | |||
18951b99de | |||
bd05c440c3 | |||
9ca5013a59 | |||
b65b8bc362 | |||
f494c182ff | |||
2fae822e1f | |||
5df140cbd5 | |||
d93cbb023d | |||
9056d1be0c | |||
5f503949eb | |||
9cf917de07 | |||
ce7bb82f02 | |||
7cd092c0cf | |||
cbfb9af0b9 | |||
ef236106b4 | |||
773a338397 | |||
afb5820113 | |||
5acc0fc243 | |||
c56469ecdb | |||
c59a53180c | |||
f56d265e62 | |||
cc0ff98d4f | |||
4a14c3efd2 | |||
b2d2297914 | |||
836bb6d835 | |||
177eb2afee | |||
594df18611 | |||
3bcb8bf6fc | |||
a74c37c18b | |||
3ece81d663 | |||
f6d87ebf32 | |||
23483ab7e1 | |||
fe31d44d22 | |||
58bab4d163 | |||
8f48a64fd6 | |||
929dc059c3 | |||
45e471b16a | |||
7c540b6be8 | |||
7dbe8dd3c9 | |||
0dec602729 | |||
66ded779fc | |||
01d24291f2 | |||
55b36b033e | |||
8f461bf50c | |||
1847491cb3 | |||
541c7b63cd | |||
7e5e177680 | |||
40f552e4f1 | |||
ecb54ee3b3 | |||
b975996158 | |||
122f789cdf | |||
c9911aa841 | |||
32cd0d8af8 | |||
585f0d9f1b | |||
d0292aa139 | |||
4e9be8ca3c | |||
ad49e9eaf1 | |||
fed60f7c03 | |||
1bc0e3087a | |||
80a4f838a1 | |||
d31ec44f50 | |||
d0caef37ce | |||
2d26febe58 | |||
c23ad8ebf2 | |||
bad068ef19 | |||
53430608a8 | |||
b9071ab2b3 | |||
a556c02df6 | |||
bfab270d68 | |||
8ea6a1f3d5 | |||
3c39bf6a0f | |||
828644799f | |||
411e67ae41 | |||
4914bc4b5a | |||
d7050a1947 | |||
3c59422511 | |||
c81204e6d5 | |||
880f39519f | |||
8646f6c50b | |||
437a9e6ccb | |||
b54139bd37 | |||
8a6a36ac54 | |||
c6eb973da0 | |||
21750a8c20 | |||
a598665b2f | |||
56bbf502a2 | |||
9975f7d83f | |||
7ad366b363 | |||
cca4d68d94 | |||
b82b94db54 | |||
de9cb265e0 | |||
5611b9aba1 | |||
53075d503a | |||
e47cfa262a | |||
0ab7a4e713 | |||
5138d588db | |||
7e2d093e29 | |||
2d780e0566 | |||
7ac4ad3194 | |||
3ab6eb62c8 | |||
8eb234a12f | |||
85590af99e | |||
5c7cec0c81 | |||
68f768749b | |||
2c7e342b18 | |||
632900e516 | |||
5fd975b1d7 | |||
d45ac66064 | |||
47cba8ec3c | |||
d4aab66da2 | |||
0dc4c92c89 | |||
f49c963367 | |||
fe11b8e57e | |||
79680b6a73 | |||
58838c541f | |||
03cc71cfed | |||
02529106c9 | |||
0401f55bc3 | |||
403e0d2d9d | |||
d939ff289d | |||
d1816c3051 | |||
cb350788c0 | |||
cd58768d6f | |||
dcd6f4d55d | |||
3c828614b8 | |||
09e7988596 | |||
e73d3f87f3 | |||
b53607f8e4 | |||
8f79d3210a | |||
3ddb4cd27a | |||
a5555c3816 | |||
8479c406a5 | |||
8e0b4254b1 | |||
069651bdb4 | |||
9061ec2dff | |||
b0a5023723 | |||
69fe5bf71d | |||
f12d4d80c6 | |||
56f2a3afa4 | |||
406da1b5f0 | |||
da45e132a3 | |||
fb719a9383 | |||
3c64359597 | |||
e420973dd2 | |||
15cc157c5f | |||
ad89ffe94d | |||
4de1713a18 | |||
1917e0fdb7 | |||
4b07234997 | |||
6a402950c3 | |||
63333159ca | |||
ce4ba24ef2 | |||
f606e31b98 | |||
ecdbb3eb53 | |||
0321ec32fb |
@ -6,9 +6,15 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
|
35
backend/e2e-test/routes/v1/secret-approval-policy.spec.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: string[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [seedData1.id],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@ -1,73 +1,61 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: importEnv,
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
await deleteSecretImport(payload.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
@ -89,25 +77,60 @@ describe("Secret Import Router", async () => {
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
@ -161,22 +184,55 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@ -201,6 +257,552 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
406
backend/e2e-test/routes/v1/secret-replication.spec.ts
Normal file
@ -0,0 +1,406 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
330
backend/e2e-test/routes/v3/secret-reference.spec.ts
Normal file
@ -0,0 +1,330 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@ -8,6 +8,7 @@ type TRawSecret = {
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
|
73
backend/e2e-test/testUtils/folders.ts
Normal file
@ -0,0 +1,73 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
93
backend/e2e-test/testUtils/secret-imports.ts
Normal file
@ -0,0 +1,93 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
128
backend/e2e-test/testUtils/secrets.ts
Normal file
@ -0,0 +1,128 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@ -11,10 +11,11 @@ import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -28,19 +29,31 @@ export default {
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -58,10 +71,12 @@ export default {
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
@ -80,6 +95,9 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
866
backend/package-lock.json
generated
@ -50,6 +50,7 @@
|
||||
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@ -102,15 +103,16 @@
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-kms": "^3.609.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
@ -175,6 +177,8 @@
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
@ -7,6 +7,7 @@ import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-se
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
@ -36,7 +37,6 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/services/certificate-est/certificate-est-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
|
@ -115,7 +115,14 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
@ -147,13 +154,27 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
committerId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
statusChangeBy: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
@ -177,8 +198,20 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalRequest,
|
||||
`${TableName.SecretApprovalRequest}.id`,
|
||||
`${TableName.SecretApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member").notNullable().alter();
|
||||
|
@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (!doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("password").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("password");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -21,6 +21,7 @@ export const SecretSharingSchema = z.object({
|
||||
expiresAfterViews: z.number().nullable().optional(),
|
||||
accessType: z.string().default("anyone"),
|
||||
name: z.string().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
lastViewedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
|
@ -42,6 +42,11 @@ export const registerCertificateEstRouter = async (server: FastifyZodProvider) =
|
||||
|
||||
await res.hijack();
|
||||
|
||||
// definitive connection timeout to clean-up open connections and prevent memory leak
|
||||
res.raw.setTimeout(10 * 1000, () => {
|
||||
res.raw.end();
|
||||
});
|
||||
|
||||
res.raw.setHeader(wwwAuthenticateHeader, `Basic realm="infisical"`);
|
||||
res.raw.setHeader("Content-Length", 0);
|
||||
res.raw.statusCode = 401;
|
@ -17,11 +17,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().default("/"),
|
||||
environment: z.string(),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
@ -127,11 +127,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.trim()
|
||||
.optional()
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
|
@ -1,6 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// TODO(akhilmhdh): Fix this when licence service gets it type
|
||||
// TODO(akhilmhdh): Fix this when license service gets it type
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
@ -5,19 +5,47 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const ScimUserSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
});
|
||||
|
||||
const ScimGroupSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
});
|
||||
|
||||
export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/scim-tokens",
|
||||
method: "POST",
|
||||
@ -124,25 +152,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
),
|
||||
Resources: z.array(ScimUserSchema),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -170,30 +180,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
201: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -213,10 +200,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
@ -226,28 +215,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
// displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
active: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -257,8 +228,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
externalId: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
@ -288,6 +259,116 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
active: req.body?.active,
|
||||
email: primaryEmail,
|
||||
externalId: req.body.userName
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PATCH",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any().optional()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.updateScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return user;
|
||||
}
|
||||
});
|
||||
server.route({
|
||||
url: "/Groups",
|
||||
method: "POST",
|
||||
@ -302,25 +383,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional() // okta-specific
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -341,26 +407,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
querystring: z.object({
|
||||
startIndex: z.coerce.number().default(1),
|
||||
count: z.coerce.number().default(20),
|
||||
filter: z.string().trim().optional()
|
||||
filter: z.string().trim().optional(),
|
||||
excludedAttributes: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
),
|
||||
Resources: z.array(ScimGroupSchema),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -374,7 +426,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgId: req.permission.orgId,
|
||||
startIndex: req.query.startIndex,
|
||||
filter: req.query.filter,
|
||||
limit: req.query.count
|
||||
limit: req.query.count,
|
||||
isMembersExcluded: req.query.excludedAttributes === "members"
|
||||
});
|
||||
|
||||
return groups;
|
||||
@ -389,20 +442,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -411,6 +451,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -434,25 +475,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePut({
|
||||
const group = await req.server.services.scim.replaceScimGroup({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
...req.body
|
||||
@ -474,54 +502,34 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.literal("replace"),
|
||||
value: z.object({
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim()
|
||||
})
|
||||
}),
|
||||
z.object({
|
||||
op: z.literal("remove"),
|
||||
path: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
op: z.literal("add"),
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim().optional()
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
)
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePatch({
|
||||
const group = await req.server.services.scim.updateScimGroup({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -547,60 +555,4 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
return group;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
active: req.body.active
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -44,7 +44,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
approverUserIds,
|
||||
approvers,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
@ -52,7 +52,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
if (approvals > approverUserIds.length)
|
||||
if (approvals > approvers.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -76,7 +76,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: approverUserIds
|
||||
userIds: approvers
|
||||
});
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
@ -91,7 +91,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approvers.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
@ -128,7 +128,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
const updateAccessApprovalPolicy = async ({
|
||||
policyId,
|
||||
approverUserIds,
|
||||
approvers,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@ -161,7 +161,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (approverUserIds) {
|
||||
if (approvers) {
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
@ -169,12 +169,12 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: approverUserIds
|
||||
userIds: approvers
|
||||
});
|
||||
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approvers.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
|
@ -17,7 +17,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
approverUserIds: string[];
|
||||
approvers: string[];
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
@ -26,7 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
policyId: string;
|
||||
approvals?: number;
|
||||
approverUserIds?: string[];
|
||||
approvers?: string[];
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
|
@ -5,6 +5,7 @@ import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@ -62,7 +63,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
@ -84,7 +87,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@ -1,16 +1,17 @@
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { getCaCertChain, getCaCertChains } from "@app/services/certificate-authority/certificate-authority-fns";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateDALFactory } from "@app/services/certificate-template/certificate-template-dal";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { isCertChainValid } from "../certificate/certificate-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "../certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "../certificate-authority/certificate-authority-dal";
|
||||
import { getCaCertChain, getCaCertChains } from "../certificate-authority/certificate-authority-fns";
|
||||
import { TCertificateAuthorityServiceFactory } from "../certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateDALFactory } from "../certificate-template/certificate-template-dal";
|
||||
import { TCertificateTemplateServiceFactory } from "../certificate-template/certificate-template-service";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { convertRawCertsToPkcs7 } from "./certificate-est-fns";
|
||||
|
||||
type TCertificateEstServiceFactoryDep = {
|
||||
@ -21,6 +22,7 @@ type TCertificateEstServiceFactoryDep = {
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "find" | "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateEstServiceFactory = ReturnType<typeof certificateEstServiceFactory>;
|
||||
@ -32,7 +34,8 @@ export const certificateEstServiceFactory = ({
|
||||
certificateAuthorityCertDAL,
|
||||
certificateAuthorityDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
licenseService
|
||||
}: TCertificateEstServiceFactoryDep) => {
|
||||
const simpleReenroll = async ({
|
||||
csr,
|
||||
@ -48,6 +51,14 @@ export const certificateEstServiceFactory = ({
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to perform EST operation - simpleReenroll due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
@ -146,6 +157,14 @@ export const certificateEstServiceFactory = ({
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to perform EST operation - simpleEnroll due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
@ -196,6 +215,24 @@ export const certificateEstServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const estConfig = await certificateTemplateService.getEstConfiguration({
|
||||
isInternal: true,
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to perform EST operation - caCerts due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
});
|
||||
}
|
||||
|
||||
const ca = await certificateAuthorityDAL.findById(certTemplate.caId);
|
||||
if (!ca) {
|
||||
throw new NotFoundError({
|
@ -0,0 +1,226 @@
|
||||
import {
|
||||
CreateUserCommand,
|
||||
CreateUserGroupCommand,
|
||||
DeleteUserCommand,
|
||||
DescribeReplicationGroupsCommand,
|
||||
DescribeUserGroupsCommand,
|
||||
ElastiCache,
|
||||
ModifyReplicationGroupCommand,
|
||||
ModifyUserGroupCommand
|
||||
} from "@aws-sdk/client-elasticache";
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const CreateElastiCacheUserSchema = z.object({
|
||||
UserId: z.string().trim().min(1),
|
||||
UserName: z.string().trim().min(1),
|
||||
Engine: z.string().default("redis"),
|
||||
Passwords: z.array(z.string().trim().min(1)).min(1).max(1), // Minimum password length is 16 characters, required by AWS.
|
||||
AccessString: z.string().trim().min(1) // Example: "on ~* +@all"
|
||||
});
|
||||
|
||||
const DeleteElasticCacheUserSchema = z.object({
|
||||
UserId: z.string().trim().min(1)
|
||||
});
|
||||
|
||||
type TElastiCacheRedisUser = { userId: string; password: string };
|
||||
type TBasicAWSCredentials = { accessKeyId: string; secretAccessKey: string };
|
||||
|
||||
type TCreateElastiCacheUserInput = z.infer<typeof CreateElastiCacheUserSchema>;
|
||||
type TDeleteElastiCacheUserInput = z.infer<typeof DeleteElasticCacheUserSchema>;
|
||||
|
||||
const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: string) => {
|
||||
const elastiCache = new ElastiCache({
|
||||
region,
|
||||
credentials
|
||||
});
|
||||
const infisicalGroup = "infisical-managed-group-elasticache";
|
||||
|
||||
const ensureInfisicalGroupExists = async (clusterName: string) => {
|
||||
const replicationGroups = await elastiCache.send(new DescribeUserGroupsCommand());
|
||||
|
||||
const existingGroup = replicationGroups.UserGroups?.find((group) => group.UserGroupId === infisicalGroup);
|
||||
|
||||
let newlyCreatedGroup = false;
|
||||
if (!existingGroup) {
|
||||
const createGroupCommand = new CreateUserGroupCommand({
|
||||
UserGroupId: infisicalGroup,
|
||||
UserIds: ["default"],
|
||||
Engine: "redis"
|
||||
});
|
||||
|
||||
await elastiCache.send(createGroupCommand);
|
||||
newlyCreatedGroup = true;
|
||||
}
|
||||
|
||||
if (existingGroup || newlyCreatedGroup) {
|
||||
const replicationGroup = (
|
||||
await elastiCache.send(
|
||||
new DescribeReplicationGroupsCommand({
|
||||
ReplicationGroupId: clusterName
|
||||
})
|
||||
)
|
||||
).ReplicationGroups?.[0];
|
||||
|
||||
if (!replicationGroup?.UserGroupIds?.includes(infisicalGroup)) {
|
||||
// If the replication group doesn't have the infisical user group, we need to associate it
|
||||
const modifyGroupCommand = new ModifyReplicationGroupCommand({
|
||||
UserGroupIdsToAdd: [infisicalGroup],
|
||||
UserGroupIdsToRemove: [],
|
||||
ApplyImmediately: true,
|
||||
ReplicationGroupId: clusterName
|
||||
});
|
||||
await elastiCache.send(modifyGroupCommand);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const addUserToInfisicalGroup = async (userId: string) => {
|
||||
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
||||
|
||||
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
||||
UserGroupId: infisicalGroup,
|
||||
UserIdsToAdd: [userId],
|
||||
UserIdsToRemove: []
|
||||
});
|
||||
|
||||
await elastiCache.send(addUserToGroupCommand);
|
||||
};
|
||||
|
||||
const createUser = async (creationInput: TCreateElastiCacheUserInput, clusterName: string) => {
|
||||
await ensureInfisicalGroupExists(clusterName);
|
||||
|
||||
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
||||
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
|
||||
return {
|
||||
userId: creationInput.UserId,
|
||||
password: creationInput.Passwords[0]
|
||||
};
|
||||
};
|
||||
|
||||
const deleteUser = async (
|
||||
deletionInput: TDeleteElastiCacheUserInput
|
||||
): Promise<Pick<TElastiCacheRedisUser, "userId">> => {
|
||||
await elastiCache.send(new DeleteUserCommand(deletionInput));
|
||||
return { userId: deletionInput.UserId };
|
||||
};
|
||||
|
||||
const verifyCredentials = async (clusterName: string) => {
|
||||
await elastiCache.send(
|
||||
new DescribeReplicationGroupsCommand({
|
||||
ReplicationGroupId: clusterName
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
createUser,
|
||||
deleteUser,
|
||||
verifyCredentials
|
||||
};
|
||||
};
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-";
|
||||
return `inf-${customAlphabet(charset, 32)()}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
};
|
||||
|
||||
export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = DynamicSecretAwsElastiCacheSchema.parse(inputs);
|
||||
|
||||
// We need to ensure the that the creation & revocation statements are valid and can be used to create and revoke users.
|
||||
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
|
||||
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
|
||||
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).verifyCredentials(providerInputs.clusterName);
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!(await validateConnection(providerInputs))) {
|
||||
throw new BadRequestError({ message: "Failed to establish connection" });
|
||||
}
|
||||
|
||||
const leaseUsername = generateUsername();
|
||||
const leasePassword = generatePassword();
|
||||
const leaseExpiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username: leaseUsername,
|
||||
password: leasePassword,
|
||||
expiration: leaseExpiration
|
||||
});
|
||||
|
||||
const parsedStatement = CreateElastiCacheUserSchema.parse(JSON.parse(creationStatement));
|
||||
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).createUser(parsedStatement, providerInputs.clusterName);
|
||||
|
||||
return {
|
||||
entityId: leaseUsername,
|
||||
data: {
|
||||
DB_USERNAME: leaseUsername,
|
||||
DB_PASSWORD: leasePassword
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username: entityId });
|
||||
const parsedStatement = DeleteElasticCacheUserSchema.parse(JSON.parse(revokeStatement));
|
||||
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).deleteUser(parsedStatement);
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -0,0 +1,126 @@
|
||||
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const ElasticSearchDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
})
|
||||
},
|
||||
auth: {
|
||||
...(providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey
|
||||
? {
|
||||
apiKey: {
|
||||
api_key: providerInputs.auth.apiKey,
|
||||
id: providerInputs.auth.apiKeyId
|
||||
}
|
||||
}
|
||||
: {
|
||||
username: providerInputs.auth.username,
|
||||
password: providerInputs.auth.password
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
return connection;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,10 +1,18 @@
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchDatabaseProvider } from "./elastic-search";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider()
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),
|
||||
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider(),
|
||||
[DynamicSecretProviders.MongoAtlas]: MongoAtlasProvider(),
|
||||
[DynamicSecretProviders.ElasticSearch]: ElasticSearchDatabaseProvider()
|
||||
});
|
||||
|
@ -7,6 +7,56 @@ export enum SqlProviders {
|
||||
MsSQL = "mssql"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
User = "user",
|
||||
ApiKey = "api-key"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
username: z.string().trim(), // this is often "default".
|
||||
password: z.string().trim().optional(),
|
||||
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsElastiCacheSchema = z.object({
|
||||
clusterName: z.string().trim().min(1),
|
||||
accessKeyId: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
|
||||
region: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretElasticSearchSchema = z.object({
|
||||
host: z.string().trim().min(1),
|
||||
port: z.number(),
|
||||
roles: z.array(z.string().trim().min(1)).min(1),
|
||||
|
||||
// two auth types "user, apikey"
|
||||
auth: z.discriminatedUnion("type", [
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.User),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.ApiKey),
|
||||
apiKey: z.string().trim(),
|
||||
apiKeyId: z.string().trim()
|
||||
})
|
||||
]),
|
||||
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
client: z.nativeEnum(SqlProviders),
|
||||
host: z.string().trim().toLowerCase(),
|
||||
@ -44,16 +94,61 @@ export const DynamicSecretAwsIamSchema = z.object({
|
||||
policyArns: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretMongoAtlasSchema = z.object({
|
||||
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
|
||||
adminPrivateKey: z.string().trim().min(1).describe("Admin user private api key"),
|
||||
groupId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.describe("Unique 24-hexadecimal digit string that identifies your project. This is same as project id"),
|
||||
roles: z
|
||||
.object({
|
||||
collectionName: z.string().optional().describe("Collection on which this role applies."),
|
||||
databaseName: z.string().min(1).describe("Database to which the user is granted access privileges."),
|
||||
roleName: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
' Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
|
||||
)
|
||||
})
|
||||
.array()
|
||||
.min(1),
|
||||
scopes: z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
"Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access."
|
||||
),
|
||||
type: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe("Category of resource that this database user can access. Enum: CLUSTER, DATA_LAKE, STREAM")
|
||||
})
|
||||
.array()
|
||||
});
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
AwsIam = "aws-iam"
|
||||
AwsIam = "aws-iam",
|
||||
Redis = "redis",
|
||||
AwsElastiCache = "aws-elasticache",
|
||||
MongoAtlas = "mongo-db-atlas",
|
||||
ElasticSearch = "elastic-search"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
146
backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts
Normal file
@ -0,0 +1,146 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoAtlasSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
Accept: "application/vnd.atlas.2023-02-01+json",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
const digestAuth = createDigestAuthRequestInterceptor(
|
||||
client,
|
||||
providerInputs.adminPublicKey,
|
||||
providerInputs.adminPrivateKey
|
||||
);
|
||||
return digestAuth;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
})
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
method: "GET",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((err) => {
|
||||
if ((err as AxiosError).response?.status === 404) return false;
|
||||
throw err;
|
||||
});
|
||||
if (isExisting) {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
182
backend/src/ee/services/dynamic-secret/providers/redis.ts
Normal file
@ -0,0 +1,182 @@
|
||||
import handlebars from "handlebars";
|
||||
import { Redis } from "ioredis";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
const executeTransactions = async (connection: Redis, commands: string[]): Promise<(string | null)[] | null> => {
|
||||
// Initiate a transaction
|
||||
const pipeline = connection.multi();
|
||||
|
||||
// Add all commands to the pipeline
|
||||
for (const command of commands) {
|
||||
const args = command
|
||||
.split(" ")
|
||||
.map((arg) => arg.trim())
|
||||
.filter((arg) => arg.length > 0);
|
||||
pipeline.call(args[0], ...args.slice(1));
|
||||
}
|
||||
|
||||
// Execute the transaction
|
||||
const results = await pipeline.exec();
|
||||
|
||||
if (!results) {
|
||||
throw new BadRequestError({ message: "Redis transaction failed: No results returned" });
|
||||
}
|
||||
|
||||
// Check for errors in the results
|
||||
const errors = results.filter(([err]) => err !== null);
|
||||
if (errors.length > 0) {
|
||||
throw new BadRequestError({ message: "Redis transaction failed with errors" });
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
return results.map(([_, result]) => result as string | null);
|
||||
};
|
||||
|
||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1" || dbHost === providerInputs.host)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
username: providerInputs.username,
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
password: providerInputs.password,
|
||||
...(providerInputs.ca && {
|
||||
tls: {
|
||||
rejectUnauthorized: false,
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let result: string;
|
||||
if (providerInputs.password) {
|
||||
result = await connection.auth(providerInputs.username, providerInputs.password, () => {});
|
||||
} else {
|
||||
result = await connection.auth(providerInputs.username, () => {});
|
||||
}
|
||||
|
||||
if (result !== "OK") {
|
||||
throw new BadRequestError({ message: `Invalid credentials, Redis returned ${result} status` });
|
||||
}
|
||||
|
||||
return connection;
|
||||
} catch (err) {
|
||||
if (connection) await connection.quit();
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return pingResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
|
||||
await executeTransactions(connection, queries);
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
|
||||
await executeTransactions(connection, queries);
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await executeTransactions(connection, queries);
|
||||
}
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -26,8 +26,10 @@ export const getDefaultOnPremFeatures = () => {
|
||||
status: null,
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
secretApproval: false,
|
||||
secretApproval: true,
|
||||
secretRotation: true,
|
||||
caCrl: false
|
||||
};
|
||||
};
|
||||
|
||||
export const setupLicenseRequestWithStore = () => {};
|
@ -45,18 +45,19 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
readLimit: 60,
|
||||
writeLimit: 200,
|
||||
secretsLimit: 40
|
||||
}
|
||||
},
|
||||
pkiEst: false
|
||||
});
|
||||
|
||||
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
let token: string;
|
||||
const licenceReq = axios.create({
|
||||
const licenseReq = axios.create({
|
||||
baseURL,
|
||||
timeout: 35 * 1000
|
||||
// signal: AbortSignal.timeout(60 * 1000)
|
||||
});
|
||||
|
||||
const refreshLicence = async () => {
|
||||
const refreshLicense = async () => {
|
||||
const appCfg = getConfig();
|
||||
const {
|
||||
data: { token: authToken }
|
||||
@ -74,7 +75,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
return token;
|
||||
};
|
||||
|
||||
licenceReq.interceptors.request.use(
|
||||
licenseReq.interceptors.request.use(
|
||||
(config) => {
|
||||
if (token && config.headers) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
@ -85,7 +86,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(err) => Promise.reject(err)
|
||||
);
|
||||
|
||||
licenceReq.interceptors.response.use(
|
||||
licenseReq.interceptors.response.use(
|
||||
(response) => response,
|
||||
async (err) => {
|
||||
const originalRequest = (err as AxiosError).config;
|
||||
@ -96,15 +97,15 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(originalRequest as any)._retry = true; // injected
|
||||
|
||||
// refresh
|
||||
await refreshLicence();
|
||||
await refreshLicense();
|
||||
|
||||
licenceReq.defaults.headers.common.Authorization = `Bearer ${token}`;
|
||||
return licenceReq(originalRequest!);
|
||||
licenseReq.defaults.headers.common.Authorization = `Bearer ${token}`;
|
||||
return licenseReq(originalRequest!);
|
||||
}
|
||||
|
||||
return Promise.reject(err);
|
||||
}
|
||||
);
|
||||
|
||||
return { request: licenceReq, refreshLicence };
|
||||
return { request: licenseReq, refreshLicense };
|
||||
};
|
@ -16,8 +16,8 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { getDefaultOnPremFeatures, setupLicenceRequestWithStore } from "./licence-fns";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
InstanceType,
|
||||
TAddOrgPmtMethodDTO,
|
||||
@ -64,13 +64,13 @@ export const licenseServiceFactory = ({
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
|
||||
const appCfg = getConfig();
|
||||
const licenseServerCloudApi = setupLicenceRequestWithStore(
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
appCfg.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_CLOUD_LOGIN,
|
||||
appCfg.LICENSE_SERVER_KEY || ""
|
||||
);
|
||||
|
||||
const licenseServerOnPremApi = setupLicenceRequestWithStore(
|
||||
const licenseServerOnPremApi = setupLicenseRequestWithStore(
|
||||
appCfg.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_ON_PREM_LOGIN,
|
||||
appCfg.LICENSE_KEY || ""
|
||||
@ -79,7 +79,7 @@ export const licenseServiceFactory = ({
|
||||
const init = async () => {
|
||||
try {
|
||||
if (appCfg.LICENSE_SERVER_KEY) {
|
||||
const token = await licenseServerCloudApi.refreshLicence();
|
||||
const token = await licenseServerCloudApi.refreshLicense();
|
||||
if (token) instanceType = InstanceType.Cloud;
|
||||
logger.info(`Instance type: ${InstanceType.Cloud}`);
|
||||
isValidLicense = true;
|
||||
@ -87,7 +87,7 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
|
||||
if (appCfg.LICENSE_KEY) {
|
||||
const token = await licenseServerOnPremApi.refreshLicence();
|
||||
const token = await licenseServerOnPremApi.refreshLicense();
|
||||
if (token) {
|
||||
const {
|
||||
data: { currentPlan }
|
||||
|
@ -63,6 +63,7 @@ export type TFeatureSet = {
|
||||
writeLimit: number;
|
||||
secretsLimit: number;
|
||||
};
|
||||
pkiEst: boolean;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@ -44,19 +44,18 @@ export const buildScimUser = ({
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
groups = [],
|
||||
active
|
||||
active,
|
||||
createdAt,
|
||||
updatedAt
|
||||
}: {
|
||||
orgMembershipId: string;
|
||||
username: string;
|
||||
email?: string | null;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
groups?: {
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
active: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}): TScimUser => {
|
||||
const scimUser = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
@ -64,9 +63,9 @@ export const buildScimUser = ({
|
||||
userName: username,
|
||||
displayName: `${firstName} ${lastName}`,
|
||||
name: {
|
||||
givenName: firstName,
|
||||
givenName: firstName || "",
|
||||
middleName: null,
|
||||
familyName: lastName
|
||||
familyName: lastName || ""
|
||||
},
|
||||
emails: email
|
||||
? [
|
||||
@ -78,10 +77,10 @@ export const buildScimUser = ({
|
||||
]
|
||||
: [],
|
||||
active,
|
||||
groups,
|
||||
meta: {
|
||||
resourceType: "User",
|
||||
location: null
|
||||
created: createdAt,
|
||||
lastModified: updatedAt
|
||||
}
|
||||
};
|
||||
|
||||
@ -109,14 +108,18 @@ export const buildScimGroupList = ({
|
||||
export const buildScimGroup = ({
|
||||
groupId,
|
||||
name,
|
||||
members
|
||||
members,
|
||||
updatedAt,
|
||||
createdAt
|
||||
}: {
|
||||
groupId: string;
|
||||
name: string;
|
||||
members: {
|
||||
value: string;
|
||||
display: string;
|
||||
display?: string;
|
||||
}[];
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}): TScimGroup => {
|
||||
const scimGroup = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
@ -125,7 +128,8 @@ export const buildScimGroup = ({
|
||||
members,
|
||||
meta: {
|
||||
resourceType: "Group",
|
||||
location: null
|
||||
created: createdAt,
|
||||
lastModified: updatedAt
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { scimPatch } from "scim-patch";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, TableName, TOrgMemberships, TUsers } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
@ -9,7 +10,6 @@ import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-grou
|
||||
import { TScimDALFactory } from "@app/ee/services/scim/scim-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
@ -32,14 +32,7 @@ import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import {
|
||||
buildScimGroup,
|
||||
buildScimGroupList,
|
||||
buildScimUser,
|
||||
buildScimUserList,
|
||||
extractScimValueFromPath,
|
||||
parseScimFilter
|
||||
} from "./scim-fns";
|
||||
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns";
|
||||
import {
|
||||
TCreateScimGroupDTO,
|
||||
TCreateScimTokenDTO,
|
||||
@ -64,12 +57,18 @@ type TScimServiceFactoryDep = {
|
||||
scimDAL: Pick<TScimDALFactory, "create" | "find" | "findById" | "deleteById">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById"
|
||||
"find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById" | "updateById"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "findOne" | "create" | "delete">;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "findOne" | "create" | "delete" | "update">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction" | "updateMembershipById"
|
||||
| "createMembership"
|
||||
| "findById"
|
||||
| "findMembership"
|
||||
| "findMembershipWithScimFilter"
|
||||
| "deleteMembershipById"
|
||||
| "transaction"
|
||||
| "updateMembershipById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "findOne" | "create" | "updateById" | "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser">;
|
||||
@ -193,7 +192,12 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
// SCIM server endpoints
|
||||
const listScimUsers = async ({ startIndex, limit, filter, orgId }: TListScimUsersDTO): Promise<TListScimUsers> => {
|
||||
const listScimUsers = async ({
|
||||
startIndex = 0,
|
||||
limit = 100,
|
||||
filter,
|
||||
orgId
|
||||
}: TListScimUsersDTO): Promise<TListScimUsers> => {
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org.scimEnabled)
|
||||
@ -207,23 +211,20 @@ export const scimServiceFactory = ({
|
||||
...(limit && { limit })
|
||||
};
|
||||
|
||||
const users = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId,
|
||||
...parseScimFilter(filter)
|
||||
},
|
||||
findOpts
|
||||
);
|
||||
const users = await orgDAL.findMembershipWithScimFilter(orgId, filter, findOpts);
|
||||
|
||||
const scimUsers = users.map(({ id, externalId, username, firstName, lastName, email, isActive }) =>
|
||||
buildScimUser({
|
||||
orgMembershipId: id ?? "",
|
||||
username: externalId ?? username,
|
||||
firstName: firstName ?? "",
|
||||
lastName: lastName ?? "",
|
||||
email,
|
||||
active: isActive
|
||||
})
|
||||
const scimUsers = users.map(
|
||||
({ id, externalId, username, firstName, lastName, email, isActive, createdAt, updatedAt }) =>
|
||||
buildScimUser({
|
||||
orgMembershipId: id ?? "",
|
||||
username: externalId ?? username,
|
||||
firstName: firstName ?? "",
|
||||
lastName: lastName ?? "",
|
||||
email,
|
||||
active: isActive,
|
||||
createdAt,
|
||||
updatedAt
|
||||
})
|
||||
);
|
||||
|
||||
return buildScimUserList({
|
||||
@ -258,22 +259,15 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
const groupMembershipsInOrg = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(
|
||||
membership.userId,
|
||||
orgId
|
||||
);
|
||||
|
||||
return buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
email: membership.email ?? "",
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
firstName: membership.firstName,
|
||||
lastName: membership.lastName,
|
||||
active: membership.isActive,
|
||||
groups: groupMembershipsInOrg.map((group) => ({
|
||||
value: group.groupId,
|
||||
display: group.groupName
|
||||
}))
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@ -322,7 +316,7 @@ export const scimServiceFactory = ({
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
role: OrgMembershipRole.NoAccess,
|
||||
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
isActive: true
|
||||
},
|
||||
@ -349,7 +343,11 @@ export const scimServiceFactory = ({
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
const uniqueUsername = await normalizeUsername(`${firstName}-${lastName}`, userDAL);
|
||||
const uniqueUsername = await normalizeUsername(
|
||||
// external id is username
|
||||
`${firstName}-${lastName}`,
|
||||
userDAL
|
||||
);
|
||||
user = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
|
||||
@ -427,13 +425,16 @@ export const scimServiceFactory = ({
|
||||
return buildScimUser({
|
||||
orgMembershipId: createdOrgMembership.id,
|
||||
username: externalId,
|
||||
firstName: createdUser.firstName as string,
|
||||
lastName: createdUser.lastName as string,
|
||||
firstName: createdUser.firstName,
|
||||
lastName: createdUser.lastName,
|
||||
email: createdUser.email ?? "",
|
||||
active: createdOrgMembership.isActive
|
||||
active: createdOrgMembership.isActive,
|
||||
createdAt: createdOrgMembership.createdAt,
|
||||
updatedAt: createdOrgMembership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
// partial
|
||||
const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
@ -459,37 +460,52 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
let active = true;
|
||||
|
||||
operations.forEach((operation) => {
|
||||
if (operation.op.toLowerCase() === "replace") {
|
||||
if (operation.path === "active" && operation.value === "False") {
|
||||
// azure scim op format
|
||||
active = false;
|
||||
} else if (typeof operation.value === "object" && operation.value.active === false) {
|
||||
// okta scim op format
|
||||
active = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!active) {
|
||||
await orgMembershipDAL.updateById(membership.id, {
|
||||
isActive: false
|
||||
});
|
||||
}
|
||||
|
||||
return buildScimUser({
|
||||
const scimUser = buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
active
|
||||
lastName: membership.lastName,
|
||||
firstName: membership.firstName,
|
||||
active: membership.isActive,
|
||||
username: membership.externalId ?? membership.username,
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
scimPatch(scimUser, operations);
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
{
|
||||
isActive: scimUser.active
|
||||
},
|
||||
tx
|
||||
);
|
||||
const hasEmailChanged = scimUser.emails[0].value !== membership.email;
|
||||
await userDAL.updateById(
|
||||
membership.userId,
|
||||
{
|
||||
firstName: scimUser.name.givenName,
|
||||
email: scimUser.emails[0].value,
|
||||
lastName: scimUser.name.familyName,
|
||||
isEmailVerified: hasEmailChanged ? serverCfg.trustSamlEmails : true
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
return scimUser;
|
||||
};
|
||||
|
||||
const replaceScimUser = async ({ orgMembershipId, active, orgId }: TReplaceScimUserDTO) => {
|
||||
const replaceScimUser = async ({
|
||||
orgMembershipId,
|
||||
active,
|
||||
orgId,
|
||||
lastName,
|
||||
firstName,
|
||||
email,
|
||||
externalId
|
||||
}: TReplaceScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@ -514,26 +530,47 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
await orgMembershipDAL.updateById(membership.id, {
|
||||
isActive: active
|
||||
const serverCfg = await getServerCfg();
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await userAliasDAL.update(
|
||||
{
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML,
|
||||
userId: membership.userId
|
||||
},
|
||||
{
|
||||
externalId
|
||||
},
|
||||
tx
|
||||
);
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
{
|
||||
isActive: active
|
||||
},
|
||||
tx
|
||||
);
|
||||
await userDAL.updateById(
|
||||
membership.userId,
|
||||
{
|
||||
firstName,
|
||||
email,
|
||||
lastName,
|
||||
isEmailVerified: serverCfg.trustSamlEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const groupMembershipsInOrg = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(
|
||||
membership.userId,
|
||||
orgId
|
||||
);
|
||||
|
||||
return buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
username: externalId,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
firstName: membership.firstName,
|
||||
lastName: membership.lastName,
|
||||
active,
|
||||
groups: groupMembershipsInOrg.map((group) => ({
|
||||
value: group.groupId,
|
||||
display: group.groupName
|
||||
}))
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@ -570,7 +607,7 @@ export const scimServiceFactory = ({
|
||||
return {}; // intentionally return empty object upon success
|
||||
};
|
||||
|
||||
const listScimGroups = async ({ orgId, startIndex, limit, filter }: TListScimGroupsDTO) => {
|
||||
const listScimGroups = async ({ orgId, startIndex, limit, filter, isMembersExcluded }: TListScimGroupsDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
@ -603,6 +640,21 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
|
||||
const scimGroups: TScimGroup[] = [];
|
||||
if (isMembersExcluded) {
|
||||
return buildScimGroupList({
|
||||
scimGroups: groups.map((group) =>
|
||||
buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: [],
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
})
|
||||
),
|
||||
startIndex,
|
||||
limit
|
||||
});
|
||||
}
|
||||
|
||||
for await (const group of groups) {
|
||||
const members = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
@ -612,7 +664,9 @@ export const scimServiceFactory = ({
|
||||
members: members.map((member) => ({
|
||||
value: member.orgMembershipId,
|
||||
display: `${member.firstName ?? ""} ${member.lastName ?? ""}`
|
||||
}))
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
scimGroups.push(scimGroup);
|
||||
}
|
||||
@ -696,7 +750,9 @@ export const scimServiceFactory = ({
|
||||
members: orgMemberships.map(({ id, firstName, lastName }) => ({
|
||||
value: id,
|
||||
display: `${firstName} ${lastName}`
|
||||
}))
|
||||
})),
|
||||
createdAt: newGroup.group.createdAt,
|
||||
updatedAt: newGroup.group.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@ -739,31 +795,17 @@ export const scimServiceFactory = ({
|
||||
members: orgMemberships.map(({ id, firstName, lastName }) => ({
|
||||
value: id,
|
||||
display: `${firstName} ${lastName}`
|
||||
}))
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
const updateScimGroupNamePut = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const $replaceGroupDAL = async (
|
||||
groupId: string,
|
||||
orgId: string,
|
||||
{ displayName, members = [] }: { displayName: string; members: { value: string }[] }
|
||||
) => {
|
||||
const updatedGroup = await groupDAL.transaction(async (tx) => {
|
||||
const [group] = await groupDAL.update(
|
||||
{
|
||||
@ -782,74 +824,96 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (members) {
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: members.map((member) => member.value)
|
||||
}
|
||||
const orgMemberships = members.length
|
||||
? await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: members.map((member) => member.value)
|
||||
}
|
||||
})
|
||||
: [];
|
||||
|
||||
const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId));
|
||||
const userGroupMembers = await userGroupMembershipDAL.find({
|
||||
groupId: group.id
|
||||
});
|
||||
const directMemberUserIds = userGroupMembers.filter((el) => !el.isPending).map((membership) => membership.userId);
|
||||
|
||||
const pendingGroupAdditionsUserIds = userGroupMembers
|
||||
.filter((el) => el.isPending)
|
||||
.map((pendingGroupAddition) => pendingGroupAddition.userId);
|
||||
|
||||
const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds);
|
||||
const allMembersUserIdsSet = new Set(allMembersUserIds);
|
||||
|
||||
const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string));
|
||||
const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId));
|
||||
|
||||
if (toAddUserIds.length) {
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: toAddUserIds.map((member) => member.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId));
|
||||
|
||||
const directMemberUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
isPending: false
|
||||
})
|
||||
).map((membership) => membership.userId);
|
||||
|
||||
const pendingGroupAdditionsUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
isPending: true
|
||||
})
|
||||
).map((pendingGroupAddition) => pendingGroupAddition.userId);
|
||||
|
||||
const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds);
|
||||
const allMembersUserIdsSet = new Set(allMembersUserIds);
|
||||
|
||||
const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string));
|
||||
const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId));
|
||||
|
||||
if (toAddUserIds.length) {
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: toAddUserIds.map((member) => member.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
if (toRemoveUserIds.length) {
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: toRemoveUserIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
if (toRemoveUserIds.length) {
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: toRemoveUserIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
return group;
|
||||
});
|
||||
|
||||
return updatedGroup;
|
||||
};
|
||||
|
||||
const replaceScimGroup = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const updatedGroup = await $replaceGroupDAL(groupId, orgId, { displayName, members });
|
||||
|
||||
return buildScimGroup({
|
||||
groupId: updatedGroup.id,
|
||||
name: updatedGroup.name,
|
||||
members
|
||||
members,
|
||||
updatedAt: updatedGroup.updatedAt,
|
||||
createdAt: updatedGroup.createdAt
|
||||
});
|
||||
};
|
||||
|
||||
const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
|
||||
const updateScimGroup = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
@ -871,7 +935,7 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
let group = await groupDAL.findOne({
|
||||
const group = await groupDAL.findOne({
|
||||
id: groupId,
|
||||
orgId
|
||||
});
|
||||
@ -883,73 +947,28 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
for await (const operation of operations) {
|
||||
switch (operation.op) {
|
||||
case "replace": {
|
||||
group = await groupDAL.updateById(group.id, {
|
||||
name: operation.value.displayName
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "add": {
|
||||
try {
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: operation.value.map((member) => member.value)
|
||||
}
|
||||
});
|
||||
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: orgMemberships.map((membership) => membership.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL
|
||||
});
|
||||
} catch {
|
||||
logger.info("Repeat SCIM user-group add operation");
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
case "remove": {
|
||||
const orgMembershipId = extractScimValueFromPath(operation.path);
|
||||
if (!orgMembershipId) throw new ScimRequestError({ detail: "Invalid path value", status: 400 });
|
||||
const orgMembership = await orgMembershipDAL.findById(orgMembershipId);
|
||||
if (!orgMembership) throw new ScimRequestError({ detail: "Org Membership Not Found", status: 400 });
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: [orgMembership.userId as string],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL
|
||||
});
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new ScimRequestError({
|
||||
detail: "Invalid Operation",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const members = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
|
||||
return buildScimGroup({
|
||||
const scimGroup = buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: members.map((member) => ({
|
||||
value: member.orgMembershipId
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
scimPatch(scimGroup, operations);
|
||||
// remove members is a weird case not following scim convention
|
||||
await $replaceGroupDAL(groupId, orgId, { displayName: scimGroup.displayName, members: scimGroup.members });
|
||||
|
||||
const updatedScimMembers = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
return {
|
||||
...scimGroup,
|
||||
members: updatedScimMembers.map((member) => ({
|
||||
value: member.orgMembershipId,
|
||||
display: `${member.firstName ?? ""} ${member.lastName ?? ""}`
|
||||
}))
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const deleteScimGroup = async ({ groupId, orgId }: TDeleteScimGroupDTO) => {
|
||||
@ -1025,8 +1044,8 @@ export const scimServiceFactory = ({
|
||||
createScimGroup,
|
||||
getScimGroup,
|
||||
deleteScimGroup,
|
||||
updateScimGroupNamePut,
|
||||
updateScimGroupNamePatch,
|
||||
replaceScimGroup,
|
||||
updateScimGroup,
|
||||
fnValidateScimToken
|
||||
};
|
||||
};
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { ScimPatchOperation } from "scim-patch";
|
||||
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
export type TCreateScimTokenDTO = {
|
||||
@ -34,29 +36,25 @@ export type TGetScimUserDTO = {
|
||||
export type TCreateScimUserDTO = {
|
||||
externalId: string;
|
||||
email?: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TUpdateScimUserDTO = {
|
||||
orgMembershipId: string;
|
||||
orgId: string;
|
||||
operations: {
|
||||
op: string;
|
||||
path?: string;
|
||||
value?:
|
||||
| string
|
||||
| {
|
||||
active: boolean;
|
||||
};
|
||||
}[];
|
||||
operations: ScimPatchOperation[];
|
||||
};
|
||||
|
||||
export type TReplaceScimUserDTO = {
|
||||
orgMembershipId: string;
|
||||
active: boolean;
|
||||
orgId: string;
|
||||
email?: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
externalId: string;
|
||||
};
|
||||
|
||||
export type TDeleteScimUserDTO = {
|
||||
@ -69,6 +67,7 @@ export type TListScimGroupsDTO = {
|
||||
filter?: string;
|
||||
limit: number;
|
||||
orgId: string;
|
||||
isMembersExcluded?: boolean;
|
||||
};
|
||||
|
||||
export type TListScimGroups = {
|
||||
@ -107,29 +106,7 @@ export type TUpdateScimGroupNamePutDTO = {
|
||||
export type TUpdateScimGroupNamePatchDTO = {
|
||||
groupId: string;
|
||||
orgId: string;
|
||||
operations: (TRemoveOp | TReplaceOp | TAddOp)[];
|
||||
};
|
||||
|
||||
type TReplaceOp = {
|
||||
op: "replace";
|
||||
value: {
|
||||
id: string;
|
||||
displayName: string;
|
||||
};
|
||||
};
|
||||
|
||||
type TRemoveOp = {
|
||||
op: "remove";
|
||||
path: string;
|
||||
};
|
||||
|
||||
type TAddOp = {
|
||||
op: "add";
|
||||
path: string;
|
||||
value: {
|
||||
value: string;
|
||||
display?: string;
|
||||
}[];
|
||||
operations: ScimPatchOperation[];
|
||||
};
|
||||
|
||||
export type TDeleteScimGroupDTO = {
|
||||
@ -158,13 +135,10 @@ export type TScimUser = {
|
||||
type: string;
|
||||
}[];
|
||||
active: boolean;
|
||||
groups: {
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
meta: {
|
||||
resourceType: string;
|
||||
location: null;
|
||||
created: Date;
|
||||
lastModified: Date;
|
||||
};
|
||||
};
|
||||
|
||||
@ -174,10 +148,11 @@ export type TScimGroup = {
|
||||
displayName: string;
|
||||
members: {
|
||||
value: string;
|
||||
display: string;
|
||||
display?: string;
|
||||
}[];
|
||||
meta: {
|
||||
resourceType: string;
|
||||
location: null;
|
||||
created: Date;
|
||||
lastModified: Date;
|
||||
};
|
||||
};
|
||||
|
@ -36,9 +36,7 @@ export const sendApprovalEmailsFn = async ({
|
||||
firstName: reviewerUser.firstName,
|
||||
projectName: project.name,
|
||||
organizationName: project.organization.name,
|
||||
approvalUrl: `${cfg.isDevelopmentMode ? "https" : "http"}://${cfg.SITE_URL}/project/${
|
||||
project.id
|
||||
}/approval?requestId=${secretApprovalRequest.id}`
|
||||
approvalUrl: `${cfg.SITE_URL}/project/${project.id}/approval?requestId=${secretApprovalRequest.id}`
|
||||
},
|
||||
template: SmtpTemplates.SecretApprovalRequestNeedsReview
|
||||
});
|
||||
|
@ -16,6 +16,7 @@ import {
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
|
||||
|
||||
@ -599,6 +600,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
const pruneExcessSnapshots = async () => {
|
||||
const PRUNE_FOLDER_BATCH_SIZE = 10000;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots started`);
|
||||
try {
|
||||
let uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// cleanup snapshots from current folders
|
||||
@ -714,6 +716,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "SnapshotPrune" });
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots completed`);
|
||||
};
|
||||
|
||||
// special query for migration for secret v2
|
||||
|
@ -964,6 +964,10 @@ export const INTEGRATION = {
|
||||
shouldAutoRedeploy: "Used by Render to trigger auto deploy.",
|
||||
secretGCPLabel: "The label for GCP secrets.",
|
||||
secretAWSTag: "The tags for AWS secrets.",
|
||||
githubVisibility:
|
||||
"Define where the secrets from the Github Integration should be visible. Option 'selected' lets you directly define which repositories to sync secrets to.",
|
||||
githubVisibilityRepoIds:
|
||||
"The repository IDs to sync secrets to when using the Github Integration. Only applicable when using Organization scope, and visibility is set to 'selected'",
|
||||
kmsKeyId: "The ID of the encryption key from AWS KMS.",
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
|
||||
|
57
backend/src/lib/axios/digest-auth.ts
Normal file
@ -0,0 +1,57 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { AxiosError, AxiosInstance, AxiosRequestConfig } from "axios";
|
||||
|
||||
export const createDigestAuthRequestInterceptor = (
|
||||
axiosInstance: AxiosInstance,
|
||||
username: string,
|
||||
password: string
|
||||
) => {
|
||||
let nc = 0;
|
||||
|
||||
return async (opts: AxiosRequestConfig) => {
|
||||
try {
|
||||
return await axiosInstance.request(opts);
|
||||
} catch (err) {
|
||||
const error = err as AxiosError;
|
||||
const authHeader = (error?.response?.headers?.["www-authenticate"] as string) || "";
|
||||
|
||||
if (error?.response?.status !== 401 || !authHeader?.includes("nonce")) {
|
||||
return Promise.reject(error.message);
|
||||
}
|
||||
|
||||
if (!error.config) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
|
||||
const authDetails = authHeader.split(",").map((el) => el.split("="));
|
||||
nc += 1;
|
||||
const nonceCount = nc.toString(16).padStart(8, "0");
|
||||
const cnonce = crypto.randomBytes(24).toString("hex");
|
||||
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1].replace(/"/g, "");
|
||||
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1].replace(/"/g, "");
|
||||
const ha1 = crypto.createHash("md5").update(`${username}:${realm}:${password}`).digest("hex");
|
||||
const path = opts.url;
|
||||
|
||||
const ha2 = crypto
|
||||
.createHash("md5")
|
||||
.update(`${opts.method ?? "GET"}:${path}`)
|
||||
.digest("hex");
|
||||
|
||||
const response = crypto
|
||||
.createHash("md5")
|
||||
.update(`${ha1}:${nonce}:${nonceCount}:${cnonce}:auth:${ha2}`)
|
||||
.digest("hex");
|
||||
const authorization = `Digest username="${username}",realm="${realm}",nonce="${nonce}",uri="${path}",qop="auth",algorithm="MD5",response="${response}",nc="${nonceCount}",cnonce="${cnonce}"`;
|
||||
|
||||
if (opts.headers) {
|
||||
// eslint-disable-next-line
|
||||
opts.headers.authorization = authorization;
|
||||
} else {
|
||||
// eslint-disable-next-line
|
||||
opts.headers = { authorization };
|
||||
}
|
||||
return axiosInstance.request(opts);
|
||||
}
|
||||
};
|
||||
};
|
@ -1,6 +1,7 @@
|
||||
import { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { zpStr } from "../zod";
|
||||
|
||||
export const GITLAB_URL = "https://gitlab.com";
|
||||
@ -63,7 +64,9 @@ const envSchema = z
|
||||
.string()
|
||||
.min(32)
|
||||
.default("#5VihU%rbXHcHwWwCot5L3vyPsx$7dWYw^iGk!EJg2bC*f$PD$%KCqx^R@#^LSEf"),
|
||||
SITE_URL: zpStr(z.string().optional()),
|
||||
|
||||
// Ensure that the SITE_URL never ends with a trailing slash
|
||||
SITE_URL: zpStr(z.string().transform((val) => (val ? removeTrailingSlash(val) : val))).optional(),
|
||||
// Telemetry
|
||||
TELEMETRY_ENABLED: zodStrBool.default("true"),
|
||||
POSTHOG_HOST: zpStr(z.string().optional().default("https://app.posthog.com")),
|
||||
|
121
backend/src/lib/knex/scim.ts
Normal file
@ -0,0 +1,121 @@
|
||||
import { Knex } from "knex";
|
||||
import { Compare, Filter, parse } from "scim2-parse-filter";
|
||||
|
||||
const appendParentToGroupingOperator = (parentPath: string, filter: Filter) => {
|
||||
if (filter.op !== "[]" && filter.op !== "and" && filter.op !== "or" && filter.op !== "not") {
|
||||
return { ...filter, attrPath: `${parentPath}.${(filter as Compare).attrPath}` };
|
||||
}
|
||||
return filter;
|
||||
};
|
||||
|
||||
export const generateKnexQueryFromScim = (
|
||||
rootQuery: Knex.QueryBuilder,
|
||||
rootScimFilter: string,
|
||||
getAttributeField: (attr: string) => string | null
|
||||
) => {
|
||||
const scimRootFilterAst = parse(rootScimFilter);
|
||||
const stack = [
|
||||
{
|
||||
scimFilterAst: scimRootFilterAst,
|
||||
query: rootQuery
|
||||
}
|
||||
];
|
||||
|
||||
while (stack.length) {
|
||||
const { scimFilterAst, query } = stack.pop()!;
|
||||
switch (scimFilterAst.op) {
|
||||
case "eq": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "pr": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereNotNull(attrPath);
|
||||
break;
|
||||
}
|
||||
case "gt": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, ">", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "ge": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, ">=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "lt": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, "<", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "le": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, "<=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "sw": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `${scimFilterAst.compValue}%`);
|
||||
break;
|
||||
}
|
||||
case "ew": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}`);
|
||||
break;
|
||||
}
|
||||
case "co": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}%`);
|
||||
break;
|
||||
}
|
||||
case "ne": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereNot(attrPath, "=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "and": {
|
||||
void query.andWhere((subQueryBuilder) => {
|
||||
scimFilterAst.filters.forEach((el) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: el
|
||||
});
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "or": {
|
||||
void query.orWhere((subQueryBuilder) => {
|
||||
scimFilterAst.filters.forEach((el) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: el
|
||||
});
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "not": {
|
||||
void query.whereNot((subQueryBuilder) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: scimFilterAst.filter
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "[]": {
|
||||
void query.whereNot((subQueryBuilder) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: appendParentToGroupingOperator(scimFilterAst.attrPath, scimFilterAst.valFilter)
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
@ -49,6 +49,21 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
|
||||
server.setValidatorCompiler(validatorCompiler);
|
||||
server.setSerializerCompiler(serializerCompiler);
|
||||
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
if (!strBody) {
|
||||
done(null, undefined);
|
||||
return;
|
||||
}
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await server.register<FastifyCookieOptions>(cookie, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY
|
||||
|
@ -1,7 +1,9 @@
|
||||
import { CronJob } from "cron";
|
||||
import { Redis } from "ioredis";
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { registerCertificateEstRouter } from "@app/ee/routes/est/certificate-est-router";
|
||||
import { registerV1EERoutes } from "@app/ee/routes/v1";
|
||||
import { accessApprovalPolicyApproverDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-approver-dal";
|
||||
import { accessApprovalPolicyDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-dal";
|
||||
@ -16,6 +18,7 @@ import { auditLogStreamDALFactory } from "@app/ee/services/audit-log-stream/audi
|
||||
import { auditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { certificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
import { certificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { certificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { dynamicSecretDALFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-dal";
|
||||
import { dynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/providers";
|
||||
@ -71,6 +74,7 @@ import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal"
|
||||
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
|
||||
@ -90,7 +94,6 @@ import { certificateAuthorityDALFactory } from "@app/services/certificate-author
|
||||
import { certificateAuthorityQueueFactory } from "@app/services/certificate-authority/certificate-authority-queue";
|
||||
import { certificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
|
||||
import { certificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { certificateEstServiceFactory } from "@app/services/certificate-est/certificate-est-service";
|
||||
import { certificateTemplateDALFactory } from "@app/services/certificate-template/certificate-template-dal";
|
||||
import { certificateTemplateEstConfigDALFactory } from "@app/services/certificate-template/certificate-template-est-config-dal";
|
||||
import { certificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
@ -197,7 +200,6 @@ import { injectIdentity } from "../plugins/auth/inject-identity";
|
||||
import { injectPermission } from "../plugins/auth/inject-permission";
|
||||
import { injectRateLimits } from "../plugins/inject-rate-limits";
|
||||
import { registerSecretScannerGhApp } from "../plugins/secret-scanner";
|
||||
import { registerCertificateEstRouter } from "./est/certificate-est-router";
|
||||
import { registerV1Routes } from "./v1";
|
||||
import { registerV2Routes } from "./v2";
|
||||
import { registerV3Routes } from "./v3";
|
||||
@ -665,7 +667,8 @@ export const registerRoutes = async (
|
||||
certificateAuthorityDAL,
|
||||
permissionService,
|
||||
kmsService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const certificateEstService = certificateEstServiceFactory({
|
||||
@ -675,7 +678,8 @@ export const registerRoutes = async (
|
||||
certificateAuthorityCertDAL,
|
||||
certificateAuthorityDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const pkiAlertService = pkiAlertServiceFactory({
|
||||
@ -1257,7 +1261,7 @@ export const registerRoutes = async (
|
||||
response: {
|
||||
200: z.object({
|
||||
date: z.date(),
|
||||
message: z.literal("Ok"),
|
||||
message: z.string().optional(),
|
||||
emailConfigured: z.boolean().optional(),
|
||||
inviteOnlySignup: z.boolean().optional(),
|
||||
redisConfigured: z.boolean().optional(),
|
||||
@ -1266,12 +1270,37 @@ export const registerRoutes = async (
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async () => {
|
||||
handler: async (request, reply) => {
|
||||
const cfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
try {
|
||||
await db.raw("SELECT NOW()");
|
||||
} catch (err) {
|
||||
logger.error("Health check: database connection failed", err);
|
||||
return reply.code(503).send({
|
||||
date: new Date(),
|
||||
message: "Service unavailable"
|
||||
});
|
||||
}
|
||||
|
||||
if (cfg.isRedisConfigured) {
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
try {
|
||||
await redis.ping();
|
||||
redis.disconnect();
|
||||
} catch (err) {
|
||||
logger.error("Health check: redis connection failed", err);
|
||||
return reply.code(503).send({
|
||||
date: new Date(),
|
||||
message: "Service unavailable"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
date: new Date(),
|
||||
message: "Ok" as const,
|
||||
message: "Ok",
|
||||
emailConfigured: cfg.isSmtpConfigured,
|
||||
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
|
||||
redisConfigured: cfg.isRedisConfigured,
|
||||
|
@ -48,7 +48,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
method: "POST",
|
||||
url: "/public/:id",
|
||||
config: {
|
||||
rateLimit: publicEndpointLimit
|
||||
@ -57,38 +57,37 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
params: z.object({
|
||||
id: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
hashedHex: z.string().min(1)
|
||||
body: z.object({
|
||||
hashedHex: z.string().min(1),
|
||||
password: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: SecretSharingSchema.pick({
|
||||
encryptedValue: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
expiresAt: true,
|
||||
expiresAfterViews: true,
|
||||
accessType: true
|
||||
}).extend({
|
||||
orgName: z.string().optional()
|
||||
200: z.object({
|
||||
isPasswordProtected: z.boolean(),
|
||||
secret: SecretSharingSchema.pick({
|
||||
encryptedValue: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
expiresAt: true,
|
||||
expiresAfterViews: true,
|
||||
accessType: true
|
||||
})
|
||||
.extend({
|
||||
orgName: z.string().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const sharedSecret = await req.server.services.secretSharing.getActiveSharedSecretById({
|
||||
const sharedSecret = await req.server.services.secretSharing.getSharedSecretById({
|
||||
sharedSecretId: req.params.id,
|
||||
hashedHex: req.query.hashedHex,
|
||||
hashedHex: req.body.hashedHex,
|
||||
password: req.body.password,
|
||||
orgId: req.permission?.orgId
|
||||
});
|
||||
if (!sharedSecret) return undefined;
|
||||
return {
|
||||
encryptedValue: sharedSecret.encryptedValue,
|
||||
iv: sharedSecret.iv,
|
||||
tag: sharedSecret.tag,
|
||||
expiresAt: sharedSecret.expiresAt,
|
||||
expiresAfterViews: sharedSecret.expiresAfterViews,
|
||||
accessType: sharedSecret.accessType,
|
||||
orgName: sharedSecret.orgName
|
||||
};
|
||||
|
||||
return sharedSecret;
|
||||
}
|
||||
});
|
||||
|
||||
@ -101,6 +100,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
schema: {
|
||||
body: z.object({
|
||||
encryptedValue: z.string(),
|
||||
password: z.string().optional(),
|
||||
hashedHex: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
@ -131,6 +131,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().max(50).optional(),
|
||||
password: z.string().optional(),
|
||||
encryptedValue: z.string(),
|
||||
hashedHex: z.string(),
|
||||
iv: z.string(),
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
@ -30,20 +32,21 @@ export const certificateTemplateDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const getById = async (id: string) => {
|
||||
const getById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const certTemplate = await db
|
||||
.replicaNode()(TableName.CertificateTemplate)
|
||||
const certTemplate = await (tx || db.replicaNode())(TableName.CertificateTemplate)
|
||||
.join(
|
||||
TableName.CertificateAuthority,
|
||||
`${TableName.CertificateAuthority}.id`,
|
||||
`${TableName.CertificateTemplate}.caId`
|
||||
)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.CertificateAuthority}.projectId`)
|
||||
.where(`${TableName.CertificateTemplate}.id`, "=", id)
|
||||
.select(selectAllTableCols(TableName.CertificateTemplate))
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.CertificateAuthority),
|
||||
db.ref("friendlyName").as("caName").withSchema(TableName.CertificateAuthority)
|
||||
db.ref("friendlyName").as("caName").withSchema(TableName.CertificateAuthority),
|
||||
db.ref("orgId").withSchema(TableName.Project)
|
||||
)
|
||||
.first();
|
||||
|
||||
|
@ -3,6 +3,7 @@ import * as x509 from "@peculiar/x509";
|
||||
import bcrypt from "bcrypt";
|
||||
|
||||
import { TCertificateTemplateEstConfigsUpdate } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@ -32,6 +33,7 @@ type TCertificateTemplateServiceFactoryDep = {
|
||||
kmsService: Pick<TKmsServiceFactory, "generateKmsKey" | "encryptWithKmsKey" | "decryptWithKmsKey">;
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateTemplateServiceFactory = ReturnType<typeof certificateTemplateServiceFactory>;
|
||||
@ -42,7 +44,8 @@ export const certificateTemplateServiceFactory = ({
|
||||
certificateAuthorityDAL,
|
||||
permissionService,
|
||||
kmsService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
licenseService
|
||||
}: TCertificateTemplateServiceFactoryDep) => {
|
||||
const createCertTemplate = async ({
|
||||
caId,
|
||||
@ -75,23 +78,28 @@ export const certificateTemplateServiceFactory = ({
|
||||
ProjectPermissionSub.CertificateTemplates
|
||||
);
|
||||
|
||||
const { id } = await certificateTemplateDAL.create({
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
name,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
ttl
|
||||
return certificateTemplateDAL.transaction(async (tx) => {
|
||||
const { id } = await certificateTemplateDAL.create(
|
||||
{
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
name,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
ttl
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const certificateTemplate = await certificateTemplateDAL.getById(id, tx);
|
||||
if (!certificateTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return certificateTemplate;
|
||||
});
|
||||
|
||||
const certificateTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!certificateTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return certificateTemplate;
|
||||
};
|
||||
|
||||
const updateCertTemplate = async ({
|
||||
@ -136,23 +144,29 @@ export const certificateTemplateServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
await certificateTemplateDAL.updateById(certTemplate.id, {
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
name,
|
||||
ttl
|
||||
return certificateTemplateDAL.transaction(async (tx) => {
|
||||
await certificateTemplateDAL.updateById(
|
||||
certTemplate.id,
|
||||
{
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
name,
|
||||
ttl
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const updatedTemplate = await certificateTemplateDAL.getById(id, tx);
|
||||
if (!updatedTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return updatedTemplate;
|
||||
});
|
||||
|
||||
const updatedTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!updatedTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return updatedTemplate;
|
||||
};
|
||||
|
||||
const deleteCertTemplate = async ({ id, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteCertTemplateDTO) => {
|
||||
@ -215,6 +229,13 @@ export const certificateTemplateServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TCreateEstConfigurationDTO) => {
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create EST configuration due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
const certTemplate = await certificateTemplateDAL.getById(certificateTemplateId);
|
||||
if (!certTemplate) {
|
||||
throw new NotFoundError({
|
||||
@ -285,6 +306,13 @@ export const certificateTemplateServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TUpdateEstConfigurationDTO) => {
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update EST configuration due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
const certTemplate = await certificateTemplateDAL.getById(certificateTemplateId);
|
||||
if (!certTemplate) {
|
||||
throw new NotFoundError({
|
||||
@ -416,7 +444,8 @@ export const certificateTemplateServiceFactory = ({
|
||||
isEnabled: estConfig.isEnabled,
|
||||
caChain: decryptedCaChain.toString(),
|
||||
hashedPassphrase: estConfig.hashedPassphrase,
|
||||
projectId: certTemplate.projectId
|
||||
projectId: certTemplate.projectId,
|
||||
orgId: certTemplate.orgId
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TIdentityAccessTokenDALFactory = ReturnType<typeof identityAccessTokenDALFactory>;
|
||||
|
||||
@ -95,6 +97,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const removeExpiredTokens = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token started`);
|
||||
try {
|
||||
const docs = (tx || db)(TableName.IdentityAccessToken)
|
||||
.where({
|
||||
@ -131,7 +134,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
});
|
||||
})
|
||||
.delete();
|
||||
return await docs;
|
||||
await docs;
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token completed`);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "IdentityAccessTokenPrune" });
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TIdentityUaClientSecretDALFactory = ReturnType<typeof identityUaClientSecretDALFactory>;
|
||||
|
||||
@ -30,7 +31,9 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
|
||||
|
||||
let deletedClientSecret: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredClientSecretQuery = (tx || db)(TableName.IdentityUaClientSecret)
|
||||
@ -39,7 +42,7 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
|
||||
})
|
||||
.orWhere((qb) => {
|
||||
void qb
|
||||
.where("clientSecretNumUses", ">", 0)
|
||||
.where("clientSecretNumUsesLimit", ">", 0)
|
||||
.andWhere(
|
||||
"clientSecretNumUses",
|
||||
">=",
|
||||
@ -71,7 +74,9 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
} while (deletedClientSecret.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedClientSecret.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret completed`);
|
||||
};
|
||||
|
||||
return { ...uaClientSecretOrm, incrementUsage, removeExpiredClientSecrets };
|
||||
|
@ -30,6 +30,7 @@ const getIntegrationSecretsV2 = async (
|
||||
environment: string;
|
||||
folderId: string;
|
||||
depth: number;
|
||||
secretPath: string;
|
||||
decryptor: (value: Buffer | null | undefined) => string;
|
||||
},
|
||||
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">,
|
||||
@ -306,6 +307,7 @@ export const deleteIntegrationSecrets = async ({
|
||||
? await getIntegrationSecretsV2(
|
||||
{
|
||||
environment: integration.environment.id,
|
||||
secretPath: integration.secretPath,
|
||||
projectId: integration.projectId,
|
||||
folderId: folder.id,
|
||||
depth: 1,
|
||||
|
@ -1625,7 +1625,11 @@ const syncSecretsGitHub = async ({
|
||||
await octokit.request("PUT /orgs/{org}/actions/secrets/{secret_name}", {
|
||||
org: integration.owner as string,
|
||||
secret_name: key,
|
||||
visibility: "all",
|
||||
visibility: metadata.githubVisibility ?? "all",
|
||||
...(metadata.githubVisibility === "selected" && {
|
||||
// we need to map the githubVisibilityRepoIds to numbers
|
||||
selected_repository_ids: metadata.githubVisibilityRepoIds?.map(Number) ?? []
|
||||
}),
|
||||
encrypted_value: encryptedSecret,
|
||||
key_id: repoPublicKey.key_id
|
||||
});
|
||||
|
@ -5,14 +5,18 @@ import { INTEGRATION } from "@app/lib/api-docs";
|
||||
import { IntegrationMappingBehavior } from "../integration-auth/integration-list";
|
||||
|
||||
export const IntegrationMetadataSchema = z.object({
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
|
||||
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
|
||||
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
|
||||
mappingBehavior: z
|
||||
.nativeEnum(IntegrationMappingBehavior)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
|
||||
|
||||
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
|
||||
|
||||
secretGCPLabel: z
|
||||
.object({
|
||||
labelName: z.string(),
|
||||
@ -20,6 +24,7 @@ export const IntegrationMetadataSchema = z.object({
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
|
||||
|
||||
secretAWSTag: z
|
||||
.array(
|
||||
z.object({
|
||||
@ -29,7 +34,15 @@ export const IntegrationMetadataSchema = z.object({
|
||||
)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
|
||||
|
||||
githubVisibility: z
|
||||
.union([z.literal("selected"), z.literal("private"), z.literal("all")])
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.githubVisibility),
|
||||
githubVisibilityRepoIds: z.array(z.string()).optional().describe(INTEGRATION.CREATE.metadata.githubVisibilityRepoIds),
|
||||
|
||||
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
|
||||
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
|
||||
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete),
|
||||
shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets),
|
||||
|
@ -27,6 +27,10 @@ export type TCreateIntegrationDTO = {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
|
||||
githubVisibility?: string;
|
||||
githubVisibilityRepoIds?: string[];
|
||||
|
||||
kmsKeyId?: string;
|
||||
shouldDisableDelete?: boolean;
|
||||
shouldMaskSecrets?: boolean;
|
||||
|
@ -12,6 +12,7 @@ import {
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt, withTransaction } from "@app/lib/knex";
|
||||
import { generateKnexQueryFromScim } from "@app/lib/knex/scim";
|
||||
|
||||
export type TOrgDALFactory = ReturnType<typeof orgDALFactory>;
|
||||
|
||||
@ -280,6 +281,67 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.ref("scimEnabled").withSchema(TableName.Organization),
|
||||
db.ref("externalId").withSchema(TableName.UserAliases)
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
|
||||
}
|
||||
const res = await query;
|
||||
return res;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find one" });
|
||||
}
|
||||
};
|
||||
|
||||
const findMembershipWithScimFilter = async (
|
||||
orgId: string,
|
||||
scimFilter: string | undefined,
|
||||
{ offset, limit, sort, tx }: TFindOpt<TOrgMemberships> = {}
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.OrgMembership)
|
||||
// eslint-disable-next-line
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.where((qb) => {
|
||||
if (scimFilter) {
|
||||
void generateKnexQueryFromScim(qb, scimFilter, (attrPath) => {
|
||||
switch (attrPath) {
|
||||
case "active":
|
||||
return `${TableName.OrgMembership}.isActive`;
|
||||
case "userName":
|
||||
return `${TableName.UserAliases}.externalId`;
|
||||
case "name.givenName":
|
||||
return `${TableName.Users}.firstName`;
|
||||
case "name.familyName":
|
||||
return `${TableName.Users}.lastName`;
|
||||
case "email.value":
|
||||
return `${TableName.Users}.email`;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.join(TableName.Users, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
|
||||
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
|
||||
.leftJoin(TableName.UserAliases, function joinUserAlias() {
|
||||
this.on(`${TableName.UserAliases}.userId`, "=", `${TableName.OrgMembership}.userId`)
|
||||
.andOn(`${TableName.UserAliases}.orgId`, "=", `${TableName.OrgMembership}.orgId`)
|
||||
.andOn(`${TableName.UserAliases}.aliasType`, "=", (tx || db).raw("?", ["saml"]));
|
||||
})
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
@ -314,6 +376,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
updateById,
|
||||
deleteById,
|
||||
findMembership,
|
||||
findMembershipWithScimFilter,
|
||||
createMembership,
|
||||
updateMembershipById,
|
||||
deleteMembershipById,
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretFolderVersions } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretFolderVersionDALFactory = ReturnType<typeof secretFolderVersionDALFactory>;
|
||||
|
||||
@ -65,6 +67,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions started`);
|
||||
try {
|
||||
await db(TableName.SecretFolderVersion)
|
||||
.with("folder_cte", (qb) => {
|
||||
@ -89,6 +92,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Folder Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions completed`);
|
||||
};
|
||||
|
||||
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions };
|
||||
|
@ -158,9 +158,12 @@ export const fnSecretsV2FromImports = async ({
|
||||
depth?: number;
|
||||
cyclicDetector?: Set<string>;
|
||||
decryptor: (value?: Buffer | null) => string;
|
||||
expandSecretReferences?: (
|
||||
secrets: Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => Promise<Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>>;
|
||||
expandSecretReferences?: (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => Promise<string | undefined>;
|
||||
}) => {
|
||||
// avoid going more than a depth
|
||||
if (depth >= LEVEL_BREAK) return [];
|
||||
@ -244,26 +247,21 @@ export const fnSecretsV2FromImports = async ({
|
||||
});
|
||||
|
||||
if (expandSecretReferences) {
|
||||
await Promise.all(
|
||||
processedImports.map(async (processedImport) => {
|
||||
const secretsGroupByKey = processedImport.secrets.reduce(
|
||||
(acc, item) => {
|
||||
acc[item.secretKey] = {
|
||||
value: item.secretValue,
|
||||
comment: item.secretComment,
|
||||
skipMultilineEncoding: item.skipMultilineEncoding
|
||||
};
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
);
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretsGroupByKey);
|
||||
processedImport.secrets.forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value;
|
||||
});
|
||||
})
|
||||
await Promise.allSettled(
|
||||
processedImports.map((processedImport) =>
|
||||
Promise.allSettled(
|
||||
processedImport.secrets.map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: processedImport.secretPath,
|
||||
environment: processedImport.environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
processedImport.secrets[index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretSharing } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretSharingDALFactory = ReturnType<typeof secretSharingDALFactory>;
|
||||
|
||||
@ -30,6 +32,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExpiredSharedSecrets = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret started`);
|
||||
try {
|
||||
const today = new Date();
|
||||
const docs = await (tx || db)(TableName.SecretSharing)
|
||||
@ -40,6 +43,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
tag: "",
|
||||
iv: ""
|
||||
});
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret completed`);
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "pruneExpiredSharedSecrets" });
|
||||
|
@ -1,3 +1,6 @@
|
||||
import bcrypt from "bcrypt";
|
||||
|
||||
import { TSecretSharing } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { SecretSharingAccessType } from "@app/lib/types";
|
||||
@ -36,6 +39,7 @@ export const secretSharingServiceFactory = ({
|
||||
iv,
|
||||
tag,
|
||||
name,
|
||||
password,
|
||||
accessType,
|
||||
expiresAt,
|
||||
expiresAfterViews
|
||||
@ -60,8 +64,10 @@ export const secretSharingServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
const hashedPassword = password ? await bcrypt.hash(password, 10) : null;
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
name,
|
||||
password: hashedPassword,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@ -77,6 +83,7 @@ export const secretSharingServiceFactory = ({
|
||||
};
|
||||
|
||||
const createPublicSharedSecret = async ({
|
||||
password,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@ -102,7 +109,9 @@ export const secretSharingServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
const hashedPassword = password ? await bcrypt.hash(password, 10) : null;
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
password: hashedPassword,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@ -111,6 +120,7 @@ export const secretSharingServiceFactory = ({
|
||||
expiresAfterViews,
|
||||
accessType
|
||||
});
|
||||
|
||||
return { id: newSharedSecret.id };
|
||||
};
|
||||
|
||||
@ -152,7 +162,21 @@ export const secretSharingServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const getActiveSharedSecretById = async ({ sharedSecretId, hashedHex, orgId }: TGetActiveSharedSecretByIdDTO) => {
|
||||
const $decrementSecretViewCount = async (sharedSecret: TSecretSharing, sharedSecretId: string) => {
|
||||
const { expiresAfterViews } = sharedSecret;
|
||||
|
||||
if (expiresAfterViews) {
|
||||
// decrement view count if view count expiry set
|
||||
await secretSharingDAL.updateById(sharedSecretId, { $decr: { expiresAfterViews: 1 } });
|
||||
}
|
||||
|
||||
await secretSharingDAL.updateById(sharedSecretId, {
|
||||
lastViewedAt: new Date()
|
||||
});
|
||||
};
|
||||
|
||||
/** Get's passwordless secret. validates all secret's requested (must be fresh). */
|
||||
const getSharedSecretById = async ({ sharedSecretId, hashedHex, orgId, password }: TGetActiveSharedSecretByIdDTO) => {
|
||||
const sharedSecret = await secretSharingDAL.findOne({
|
||||
id: sharedSecretId,
|
||||
hashedHex
|
||||
@ -169,6 +193,8 @@ export const secretSharingServiceFactory = ({
|
||||
if (accessType === SecretSharingAccessType.Organization && orgId !== sharedSecret.orgId)
|
||||
throw new UnauthorizedError();
|
||||
|
||||
// all secrets pass through here, meaning we check if its expired first and then check if it needs verification
|
||||
// or can be safely sent to the client.
|
||||
if (expiresAt !== null && expiresAt < new Date()) {
|
||||
// check lifetime expiry
|
||||
await secretSharingDAL.softDeleteById(sharedSecretId);
|
||||
@ -185,21 +211,29 @@ export const secretSharingServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (expiresAfterViews) {
|
||||
// decrement view count if view count expiry set
|
||||
await secretSharingDAL.updateById(sharedSecretId, { $decr: { expiresAfterViews: 1 } });
|
||||
const isPasswordProtected = Boolean(sharedSecret.password);
|
||||
const hasProvidedPassword = Boolean(password);
|
||||
if (isPasswordProtected) {
|
||||
if (hasProvidedPassword) {
|
||||
const isMatch = await bcrypt.compare(password as string, sharedSecret.password as string);
|
||||
if (!isMatch) throw new UnauthorizedError({ message: "Invalid credentials" });
|
||||
} else {
|
||||
return { isPasswordProtected };
|
||||
}
|
||||
}
|
||||
|
||||
await secretSharingDAL.updateById(sharedSecretId, {
|
||||
lastViewedAt: new Date()
|
||||
});
|
||||
// decrement when we are sure the user will view secret.
|
||||
await $decrementSecretViewCount(sharedSecret, sharedSecretId);
|
||||
|
||||
return {
|
||||
...sharedSecret,
|
||||
orgName:
|
||||
sharedSecret.accessType === SecretSharingAccessType.Organization && orgId === sharedSecret.orgId
|
||||
? orgName
|
||||
: undefined
|
||||
isPasswordProtected,
|
||||
secret: {
|
||||
...sharedSecret,
|
||||
orgName:
|
||||
sharedSecret.accessType === SecretSharingAccessType.Organization && orgId === sharedSecret.orgId
|
||||
? orgName
|
||||
: undefined
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@ -216,6 +250,6 @@ export const secretSharingServiceFactory = ({
|
||||
createPublicSharedSecret,
|
||||
getSharedSecrets,
|
||||
deleteSharedSecretById,
|
||||
getActiveSharedSecretById
|
||||
getSharedSecretById
|
||||
};
|
||||
};
|
||||
|
@ -15,6 +15,7 @@ export type TSharedSecretPermission = {
|
||||
orgId: string;
|
||||
accessType?: SecretSharingAccessType;
|
||||
name?: string;
|
||||
password?: string;
|
||||
};
|
||||
|
||||
export type TCreatePublicSharedSecretDTO = {
|
||||
@ -24,6 +25,7 @@ export type TCreatePublicSharedSecretDTO = {
|
||||
tag: string;
|
||||
expiresAt: string;
|
||||
expiresAfterViews?: number;
|
||||
password?: string;
|
||||
accessType: SecretSharingAccessType;
|
||||
};
|
||||
|
||||
@ -31,6 +33,11 @@ export type TGetActiveSharedSecretByIdDTO = {
|
||||
sharedSecretId: string;
|
||||
hashedHex: string;
|
||||
orgId?: string;
|
||||
password?: string;
|
||||
};
|
||||
|
||||
export type TValidateActiveSharedSecretDTO = TGetActiveSharedSecretByIdDTO & {
|
||||
password: string;
|
||||
};
|
||||
|
||||
export type TCreateSharedSecretDTO = TSharedSecretPermission & TCreatePublicSharedSecretDTO;
|
||||
|
@ -377,150 +377,116 @@ type TInterpolateSecretArg = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
};
|
||||
|
||||
const MAX_SECRET_REFERENCE_DEPTH = 10;
|
||||
export const expandSecretReferencesFactory = ({
|
||||
projectId,
|
||||
decryptSecretValue: decryptSecret,
|
||||
secretDAL,
|
||||
folderDAL
|
||||
}: TInterpolateSecretArg) => {
|
||||
const fetchSecretFactory = () => {
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const getCacheUniqueKey = (environment: string, secretPath: string) => `${environment}-${secretPath}`;
|
||||
|
||||
return async (secRefEnv: string, secRefPath: string[], secRefKey: string) => {
|
||||
const referredSecretPathURL = path.join("/", ...secRefPath);
|
||||
const uniqueKey = `${secRefEnv}-${referredSecretPathURL}`;
|
||||
const fetchSecret = async (environment: string, secretPath: string, secretKey: string) => {
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
|
||||
if (secretCache?.[uniqueKey]) {
|
||||
return secretCache[uniqueKey][secRefKey];
|
||||
}
|
||||
if (secretCache?.[cacheKey]) {
|
||||
return secretCache[cacheKey][secretKey] || "";
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, secRefEnv, referredSecretPathURL);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
|
||||
const decryptedSecret = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
// eslint-disable-next-line
|
||||
prev[secret.key] = decryptSecret(secret.encryptedValue) || "";
|
||||
return prev;
|
||||
}, {});
|
||||
const decryptedSecret = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
prev[secret.key] = decryptSecret(secret.encryptedValue) || "";
|
||||
return prev;
|
||||
}, {});
|
||||
|
||||
secretCache[uniqueKey] = decryptedSecret;
|
||||
secretCache[cacheKey] = decryptedSecret;
|
||||
|
||||
return secretCache[uniqueKey][secRefKey];
|
||||
};
|
||||
return secretCache[cacheKey][secretKey] || "";
|
||||
};
|
||||
|
||||
const recursivelyExpandSecret = async (
|
||||
expandedSec: Record<string, string | undefined>,
|
||||
interpolatedSec: Record<string, string | undefined>,
|
||||
fetchSecret: (env: string, secPath: string[], secKey: string) => Promise<string>,
|
||||
recursionChainBreaker: Record<string, boolean>,
|
||||
key: string
|
||||
): Promise<string | undefined> => {
|
||||
if (expandedSec?.[key] !== undefined) {
|
||||
return expandedSec[key];
|
||||
}
|
||||
if (recursionChainBreaker?.[key]) {
|
||||
return "";
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
recursionChainBreaker[key] = true;
|
||||
const recursivelyExpandSecret = async (dto: { value?: string; secretPath: string; environment: string }) => {
|
||||
if (!dto.value) return "";
|
||||
|
||||
let interpolatedValue = interpolatedSec[key];
|
||||
if (!interpolatedValue) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`Couldn't find referenced value - ${key}`);
|
||||
return "";
|
||||
}
|
||||
const stack = [{ ...dto, depth: 0 }];
|
||||
let expandedValue = dto.value;
|
||||
|
||||
const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG);
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
while (stack.length) {
|
||||
const { value, secretPath, environment, depth } = stack.pop()!;
|
||||
// eslint-disable-next-line no-continue
|
||||
if (depth > MAX_SECRET_REFERENCE_DEPTH) continue;
|
||||
const refs = value?.match(INTERPOLATION_SYNTAX_REG);
|
||||
|
||||
if (entities.length === 1) {
|
||||
// eslint-disable-next-line
|
||||
const val = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
fetchSecret,
|
||||
recursionChainBreaker,
|
||||
interpolationKey
|
||||
);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
|
||||
if (entities.length > 1) {
|
||||
const secRefEnv = entities[0];
|
||||
const secRefPath = entities.slice(1, entities.length - 1);
|
||||
const secRefKey = entities[entities.length - 1];
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!entities.length) continue;
|
||||
|
||||
// eslint-disable-next-line
|
||||
const val = await fetchSecret(secRefEnv, secRefPath, secRefKey);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
if (entities.length === 1) {
|
||||
const [secretKey] = entities;
|
||||
|
||||
// eslint-disable-next-line no-continue,no-await-in-loop
|
||||
const referedValue = await fetchSecret(environment, secretPath, secretKey);
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
secretCache[cacheKey][secretKey] = referedValue;
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referedValue)) {
|
||||
stack.push({
|
||||
value: referedValue,
|
||||
secretPath,
|
||||
environment,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referedValue);
|
||||
} else {
|
||||
const secretReferenceEnvironment = entities[0];
|
||||
const secretReferencePath = path.join("/", ...entities.slice(1, entities.length - 1));
|
||||
const secretReferenceKey = entities[entities.length - 1];
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const referedValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey);
|
||||
const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath);
|
||||
secretCache[cacheKey][secretReferenceKey] = referedValue;
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referedValue)) {
|
||||
stack.push({
|
||||
value: referedValue,
|
||||
secretPath: secretReferencePath,
|
||||
environment: secretReferenceEnvironment,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
expandedSec[key] = interpolatedValue;
|
||||
return interpolatedValue;
|
||||
return expandedValue;
|
||||
};
|
||||
|
||||
const fetchSecret = fetchSecretFactory();
|
||||
const expandSecrets = async (
|
||||
inputSecrets: Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => {
|
||||
const expandedSecrets: Record<string, string | undefined> = {};
|
||||
const toBeExpandedSecrets: Record<string, string | undefined> = {};
|
||||
const expandSecret = async (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
if (!inputSecret.value) return inputSecret.value;
|
||||
|
||||
Object.keys(inputSecrets).forEach((key) => {
|
||||
if (inputSecrets[key].value?.match(INTERPOLATION_SYNTAX_REG)) {
|
||||
toBeExpandedSecrets[key] = inputSecrets[key].value;
|
||||
} else {
|
||||
expandedSecrets[key] = inputSecrets[key].value;
|
||||
}
|
||||
});
|
||||
const shouldExpand = Boolean(inputSecret.value?.match(INTERPOLATION_SYNTAX_REG));
|
||||
if (!shouldExpand) return inputSecret.value;
|
||||
|
||||
for (const key of Object.keys(inputSecrets)) {
|
||||
if (expandedSecrets?.[key]) {
|
||||
// should not do multi line encoding if user has set it to skip
|
||||
// eslint-disable-next-line
|
||||
inputSecrets[key].value = inputSecrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedSecrets[key])
|
||||
: expandedSecrets[key];
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
// this is to avoid recursion loop. So the graph should be direct graph rather than cyclic
|
||||
// so for any recursion building if there is an entity two times same key meaning it will be looped
|
||||
const recursionChainBreaker: Record<string, boolean> = {};
|
||||
// eslint-disable-next-line
|
||||
const expandedVal = await recursivelyExpandSecret(
|
||||
expandedSecrets,
|
||||
toBeExpandedSecrets,
|
||||
fetchSecret,
|
||||
recursionChainBreaker,
|
||||
key
|
||||
);
|
||||
|
||||
// eslint-disable-next-line
|
||||
inputSecrets[key].value = inputSecrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedVal)
|
||||
: expandedVal;
|
||||
}
|
||||
|
||||
return inputSecrets;
|
||||
const expandedSecretValue = await recursivelyExpandSecret(inputSecret);
|
||||
return inputSecret.skipMultilineEncoding ? formatMultiValueEnv(expandedSecretValue) : expandedSecretValue;
|
||||
};
|
||||
return expandSecrets;
|
||||
return expandSecret;
|
||||
};
|
||||
|
||||
export const reshapeBridgeSecret = (
|
||||
|
@ -521,27 +521,22 @@ export const secretV2BridgeServiceFactory = ({
|
||||
|
||||
if (shouldExpandSecretReferences) {
|
||||
const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath);
|
||||
for (const secretPathKey in secretsGroupByPath) {
|
||||
if (Object.hasOwn(secretsGroupByPath, secretPathKey)) {
|
||||
const secretsGroupByKey = secretsGroupByPath[secretPathKey].reduce(
|
||||
(acc, item) => {
|
||||
acc[item.secretKey] = {
|
||||
value: item.secretValue,
|
||||
comment: item.secretComment,
|
||||
skipMultilineEncoding: item.skipMultilineEncoding
|
||||
};
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
);
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretsGroupByKey);
|
||||
secretsGroupByPath[secretPathKey].forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value || "";
|
||||
});
|
||||
}
|
||||
}
|
||||
await Promise.allSettled(
|
||||
Object.keys(secretsGroupByPath).map((groupedPath) =>
|
||||
Promise.allSettled(
|
||||
secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: groupedPath,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (!includeImports) {
|
||||
@ -693,12 +688,14 @@ export const secretV2BridgeServiceFactory = ({
|
||||
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString()
|
||||
: "";
|
||||
if (shouldExpandSecretReferences && secretValue) {
|
||||
const secretReferenceExpandedRecord = {
|
||||
[secret.key]: { value: secretValue }
|
||||
};
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretReferenceExpandedRecord);
|
||||
secretValue = secretReferenceExpandedRecord[secret.key].value;
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment,
|
||||
secretPath: path,
|
||||
value: secretValue,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding
|
||||
});
|
||||
secretValue = expandedSecretValue || "";
|
||||
}
|
||||
|
||||
return reshapeBridgeSecret(projectId, environment, path, {
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretVersionsV2, TSecretVersionsV2Update } from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretVersionV2DALFactory = ReturnType<typeof secretVersionV2BridgeDALFactory>;
|
||||
|
||||
@ -87,6 +89,7 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 started`);
|
||||
try {
|
||||
await db(TableName.SecretVersionV2)
|
||||
.with("version_cte", (qb) => {
|
||||
@ -112,6 +115,7 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 completed`);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -196,6 +196,13 @@ export const recursivelyGetSecretPaths = ({
|
||||
return getPaths;
|
||||
};
|
||||
|
||||
// used to convert multi line ones to quotes ones with \n
|
||||
const formatMultiValueEnv = (val?: string) => {
|
||||
if (!val) return "";
|
||||
if (!val.match("\n")) return val;
|
||||
return `"${val.replace(/\n/g, "\\n")}"`;
|
||||
};
|
||||
|
||||
type TInterpolateSecretArg = {
|
||||
projectId: string;
|
||||
secretEncKey: string;
|
||||
@ -203,162 +210,128 @@ type TInterpolateSecretArg = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
};
|
||||
|
||||
const MAX_SECRET_REFERENCE_DEPTH = 5;
|
||||
const INTERPOLATION_SYNTAX_REG = /\${([^}]+)}/g;
|
||||
export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderDAL }: TInterpolateSecretArg) => {
|
||||
const fetchSecretsCrossEnv = () => {
|
||||
const fetchCache: Record<string, Record<string, string>> = {};
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const getCacheUniqueKey = (environment: string, secretPath: string) => `${environment}-${secretPath}`;
|
||||
|
||||
return async (secRefEnv: string, secRefPath: string[], secRefKey: string) => {
|
||||
const secRefPathUrl = path.join("/", ...secRefPath);
|
||||
const uniqKey = `${secRefEnv}-${secRefPathUrl}`;
|
||||
const fetchSecret = async (environment: string, secretPath: string, secretKey: string) => {
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
const uniqKey = `${environment}-${cacheKey}`;
|
||||
|
||||
if (fetchCache?.[uniqKey]) {
|
||||
return fetchCache[uniqKey][secRefKey];
|
||||
}
|
||||
if (secretCache?.[uniqKey]) {
|
||||
return secretCache[uniqKey][secretKey] || "";
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, secRefEnv, secRefPathUrl);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
|
||||
const decryptedSec = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const decryptedSec = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
const decryptedSecretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const decryptedSecretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
|
||||
// eslint-disable-next-line
|
||||
prev[secretKey] = secretValue;
|
||||
return prev;
|
||||
}, {});
|
||||
// eslint-disable-next-line
|
||||
prev[decryptedSecretKey] = decryptedSecretValue;
|
||||
return prev;
|
||||
}, {});
|
||||
|
||||
fetchCache[uniqKey] = decryptedSec;
|
||||
secretCache[uniqKey] = decryptedSec;
|
||||
|
||||
return fetchCache[uniqKey][secRefKey];
|
||||
};
|
||||
return secretCache[uniqKey][secretKey] || "";
|
||||
};
|
||||
|
||||
const recursivelyExpandSecret = async (
|
||||
expandedSec: Record<string, string>,
|
||||
interpolatedSec: Record<string, string>,
|
||||
fetchCrossEnv: (env: string, secPath: string[], secKey: string) => Promise<string>,
|
||||
recursionChainBreaker: Record<string, boolean>,
|
||||
key: string
|
||||
) => {
|
||||
if (expandedSec?.[key] !== undefined) {
|
||||
return expandedSec[key];
|
||||
}
|
||||
if (recursionChainBreaker?.[key]) {
|
||||
return "";
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
recursionChainBreaker[key] = true;
|
||||
const recursivelyExpandSecret = async ({
|
||||
value,
|
||||
secretPath,
|
||||
environment,
|
||||
depth = 0
|
||||
}: {
|
||||
value?: string;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
depth?: number;
|
||||
}) => {
|
||||
if (!value) return "";
|
||||
if (depth > MAX_SECRET_REFERENCE_DEPTH) return "";
|
||||
|
||||
let interpolatedValue = interpolatedSec[key];
|
||||
if (!interpolatedValue) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`Couldn't find referenced value - ${key}`);
|
||||
return "";
|
||||
}
|
||||
|
||||
const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG);
|
||||
const refs = value.match(INTERPOLATION_SYNTAX_REG);
|
||||
let expandedValue = value;
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
|
||||
if (entities.length === 1) {
|
||||
const val = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
fetchCrossEnv,
|
||||
recursionChainBreaker,
|
||||
interpolationKey
|
||||
);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
}
|
||||
const [secretKey] = entities;
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
let referenceValue = await fetchSecret(environment, secretPath, secretKey);
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referenceValue)) {
|
||||
// eslint-disable-next-line
|
||||
referenceValue = await recursivelyExpandSecret({
|
||||
environment,
|
||||
secretPath,
|
||||
value: referenceValue,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
secretCache[cacheKey][secretKey] = referenceValue;
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referenceValue);
|
||||
}
|
||||
|
||||
if (entities.length > 1) {
|
||||
const secRefEnv = entities[0];
|
||||
const secRefPath = entities.slice(1, entities.length - 1);
|
||||
const secRefKey = entities[entities.length - 1];
|
||||
const secretReferenceEnvironment = entities[0];
|
||||
const secretReferencePath = path.join("/", ...entities.slice(1, entities.length - 1));
|
||||
const secretReferenceKey = entities[entities.length - 1];
|
||||
|
||||
const val = await fetchCrossEnv(secRefEnv, secRefPath, secRefKey);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
// eslint-disable-next-line
|
||||
let referenceValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey);
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referenceValue)) {
|
||||
// eslint-disable-next-line
|
||||
referenceValue = await recursivelyExpandSecret({
|
||||
environment: secretReferenceEnvironment,
|
||||
secretPath: secretReferencePath,
|
||||
value: referenceValue,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath);
|
||||
secretCache[cacheKey][secretReferenceKey] = referenceValue;
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referenceValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
expandedSec[key] = interpolatedValue;
|
||||
return interpolatedValue;
|
||||
return expandedValue;
|
||||
};
|
||||
|
||||
// used to convert multi line ones to quotes ones with \n
|
||||
const formatMultiValueEnv = (val?: string) => {
|
||||
if (!val) return "";
|
||||
if (!val.match("\n")) return val;
|
||||
return `"${val.replace(/\n/g, "\\n")}"`;
|
||||
const expandSecret = async (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
if (!inputSecret.value) return inputSecret.value;
|
||||
|
||||
const shouldExpand = Boolean(inputSecret.value?.match(INTERPOLATION_SYNTAX_REG));
|
||||
if (!shouldExpand) return inputSecret.value;
|
||||
|
||||
const expandedSecretValue = await recursivelyExpandSecret(inputSecret);
|
||||
return inputSecret.skipMultilineEncoding ? formatMultiValueEnv(expandedSecretValue) : expandedSecretValue;
|
||||
};
|
||||
|
||||
const expandSecrets = async (
|
||||
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => {
|
||||
const expandedSec: Record<string, string> = {};
|
||||
const interpolatedSec: Record<string, string> = {};
|
||||
|
||||
const crossSecEnvFetch = fetchSecretsCrossEnv();
|
||||
|
||||
Object.keys(secrets).forEach((key) => {
|
||||
if (secrets[key].value.match(INTERPOLATION_SYNTAX_REG)) {
|
||||
interpolatedSec[key] = secrets[key].value;
|
||||
} else {
|
||||
expandedSec[key] = secrets[key].value;
|
||||
}
|
||||
});
|
||||
|
||||
for (const key of Object.keys(secrets)) {
|
||||
if (expandedSec?.[key]) {
|
||||
// should not do multi line encoding if user has set it to skip
|
||||
// eslint-disable-next-line
|
||||
secrets[key].value = secrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedSec[key])
|
||||
: expandedSec[key];
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
// this is to avoid recursion loop. So the graph should be direct graph rather than cyclic
|
||||
// so for any recursion building if there is an entity two times same key meaning it will be looped
|
||||
const recursionChainBreaker: Record<string, boolean> = {};
|
||||
const expandedVal = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
crossSecEnvFetch,
|
||||
recursionChainBreaker,
|
||||
key
|
||||
);
|
||||
|
||||
// eslint-disable-next-line
|
||||
secrets[key].value = secrets[key].skipMultilineEncoding ? formatMultiValueEnv(expandedVal) : expandedVal;
|
||||
}
|
||||
|
||||
return secrets;
|
||||
};
|
||||
return expandSecrets;
|
||||
return expandSecret;
|
||||
};
|
||||
|
||||
export const decryptSecretRaw = (
|
||||
|
@ -258,6 +258,7 @@ export const secretQueueFactory = ({
|
||||
const getIntegrationSecretsV2 = async (dto: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
folderId: string;
|
||||
depth: number;
|
||||
decryptor: (value: Buffer | null | undefined) => string;
|
||||
@ -269,30 +270,36 @@ export const secretQueueFactory = ({
|
||||
);
|
||||
return content;
|
||||
}
|
||||
|
||||
// process secrets in current folder
|
||||
const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId);
|
||||
secrets.forEach((secret) => {
|
||||
const secretKey = secret.key;
|
||||
const secretValue = dto.decryptor(secret.encryptedValue);
|
||||
content[secretKey] = { value: secretValue };
|
||||
|
||||
if (secret.encryptedComment) {
|
||||
const commentValue = dto.decryptor(secret.encryptedComment);
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
});
|
||||
|
||||
const expandSecretReferences = expandSecretReferencesFactory({
|
||||
decryptSecretValue: dto.decryptor,
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
folderDAL,
|
||||
projectId: dto.projectId
|
||||
});
|
||||
// process secrets in current folder
|
||||
const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId);
|
||||
|
||||
await Promise.allSettled(
|
||||
secrets.map(async (secret) => {
|
||||
const secretKey = secret.key;
|
||||
const secretValue = dto.decryptor(secret.encryptedValue);
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment: dto.environment,
|
||||
secretPath: dto.secretPath,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
value: secretValue
|
||||
});
|
||||
content[secretKey] = { value: expandedSecretValue || "" };
|
||||
|
||||
if (secret.encryptedComment) {
|
||||
const commentValue = dto.decryptor(secret.encryptedComment);
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
})
|
||||
);
|
||||
|
||||
await expandSecretReferences(content);
|
||||
// check if current folder has any imports from other folders
|
||||
const secretImports = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
|
||||
|
||||
@ -329,6 +336,7 @@ export const secretQueueFactory = ({
|
||||
const getIntegrationSecrets = async (dto: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
folderId: string;
|
||||
key: string;
|
||||
depth: number;
|
||||
@ -341,46 +349,52 @@ export const secretQueueFactory = ({
|
||||
return content;
|
||||
}
|
||||
|
||||
// process secrets in current folder
|
||||
const secrets = await secretDAL.findByFolderId(dto.folderId);
|
||||
secrets.forEach((secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
content[secretKey] = { value: secretValue };
|
||||
|
||||
if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) {
|
||||
const commentValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretCommentCiphertext,
|
||||
iv: secret.secretCommentIV,
|
||||
tag: secret.secretCommentTag,
|
||||
key: dto.key
|
||||
});
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
});
|
||||
|
||||
const expandSecrets = interpolateSecrets({
|
||||
const expandSecretReferences = interpolateSecrets({
|
||||
projectId: dto.projectId,
|
||||
secretEncKey: dto.key,
|
||||
folderDAL,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
await expandSecrets(content);
|
||||
// process secrets in current folder
|
||||
const secrets = await secretDAL.findByFolderId(dto.folderId);
|
||||
await Promise.allSettled(
|
||||
secrets.map(async (secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: dto.key
|
||||
});
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment: dto.environment,
|
||||
secretPath: dto.secretPath,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
value: secretValue
|
||||
});
|
||||
|
||||
content[secretKey] = { value: expandedSecretValue || "" };
|
||||
|
||||
if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) {
|
||||
const commentValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretCommentCiphertext,
|
||||
iv: secret.secretCommentIV,
|
||||
tag: secret.secretCommentTag,
|
||||
key: dto.key
|
||||
});
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
})
|
||||
);
|
||||
|
||||
// check if current folder has any imports from other folders
|
||||
const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
|
||||
@ -404,7 +418,8 @@ export const secretQueueFactory = ({
|
||||
projectId: dto.projectId,
|
||||
folderId: folder.id,
|
||||
key: dto.key,
|
||||
depth: dto.depth + 1
|
||||
depth: dto.depth + 1,
|
||||
secretPath: dto.secretPath
|
||||
});
|
||||
|
||||
// add the imported secrets to the current folder secrets
|
||||
@ -686,6 +701,7 @@ export const secretQueueFactory = ({
|
||||
projectId,
|
||||
folderId: folder.id,
|
||||
depth: 1,
|
||||
secretPath,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
|
||||
})
|
||||
: await getIntegrationSecrets({
|
||||
@ -693,7 +709,8 @@ export const secretQueueFactory = ({
|
||||
projectId,
|
||||
folderId: folder.id,
|
||||
key: botKey as string,
|
||||
depth: 1
|
||||
depth: 1,
|
||||
secretPath
|
||||
});
|
||||
|
||||
for (const integration of toBeSyncedIntegrations) {
|
||||
|
@ -482,7 +482,7 @@ export const secretServiceFactory = ({
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
// TODO(akhilmhdh-pg): license check, posthog service and snapshot
|
||||
return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment, secretPath: path };
|
||||
};
|
||||
|
||||
@ -1047,74 +1047,47 @@ export const secretServiceFactory = ({
|
||||
};
|
||||
});
|
||||
|
||||
const expandSecret = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
if (expandSecretReferences) {
|
||||
const expandSecrets = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
const batchSecretsExpand = async (
|
||||
secretBatch: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
secretPath: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}[]
|
||||
) => {
|
||||
// Group secrets by secretPath
|
||||
const secretsByPath: Record<
|
||||
string,
|
||||
{
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}[]
|
||||
> = {};
|
||||
|
||||
secretBatch.forEach((secret) => {
|
||||
if (!secretsByPath[secret.secretPath]) {
|
||||
secretsByPath[secret.secretPath] = [];
|
||||
}
|
||||
secretsByPath[secret.secretPath].push(secret);
|
||||
});
|
||||
|
||||
// Expand secrets for each group
|
||||
for (const secPath in secretsByPath) {
|
||||
if (!Object.hasOwn(secretsByPath, path)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const secretRecord: Record<
|
||||
string,
|
||||
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
|
||||
> = {};
|
||||
secretsByPath[secPath].forEach((decryptedSecret) => {
|
||||
secretRecord[decryptedSecret.secretKey] = {
|
||||
value: decryptedSecret.secretValue,
|
||||
comment: decryptedSecret.secretComment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
};
|
||||
});
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
|
||||
secretsByPath[secPath].forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretRecord[decryptedSecret.secretKey].value;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// expand secrets
|
||||
await batchSecretsExpand(filteredSecrets);
|
||||
|
||||
// expand imports by batch
|
||||
await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets)));
|
||||
const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath);
|
||||
await Promise.allSettled(
|
||||
Object.keys(secretsGroupByPath).map((groupedPath) =>
|
||||
Promise.allSettled(
|
||||
secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecret({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: groupedPath,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.allSettled(
|
||||
processedImports.map((processedImport) =>
|
||||
Promise.allSettled(
|
||||
processedImport.secrets.map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecret({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: path,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
processedImport.secrets[index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
@ -1177,40 +1150,19 @@ export const secretServiceFactory = ({
|
||||
const decryptedSecret = decryptSecretRaw(encryptedSecret, botKey);
|
||||
|
||||
if (expandSecretReferences) {
|
||||
const expandSecrets = interpolateSecrets({
|
||||
const expandSecret = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
const expandSingleSecret = async (secret: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
secretPath: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}) => {
|
||||
const secretRecord: Record<
|
||||
string,
|
||||
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
|
||||
> = {
|
||||
[secret.secretKey]: {
|
||||
value: secret.secretValue,
|
||||
comment: secret.secretComment,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding
|
||||
}
|
||||
};
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
|
||||
// Update the secret with the expanded value
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secret.secretValue = secretRecord[secret.secretKey].value;
|
||||
};
|
||||
|
||||
// Expand the secret
|
||||
await expandSingleSecret(decryptedSecret);
|
||||
const expandedSecretValue = await expandSecret({
|
||||
environment,
|
||||
secretPath: path,
|
||||
value: decryptedSecret.secretValue,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
decryptedSecret.secretValue = expandedSecretValue || "";
|
||||
}
|
||||
|
||||
return decryptedSecret;
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretVersions, TSecretVersionsUpdate } from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretVersionDALFactory = ReturnType<typeof secretVersionDALFactory>;
|
||||
|
||||
@ -112,6 +114,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v1 started`);
|
||||
try {
|
||||
await db(TableName.SecretVersion)
|
||||
.with("version_cte", (qb) => {
|
||||
@ -137,6 +140,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v1 completed`);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import tsconfigPaths from "vite-tsconfig-paths"; // only if you are using custom tsconfig paths
|
||||
import path from "path";
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
@ -15,7 +15,14 @@ export default defineConfig({
|
||||
useAtomics: true,
|
||||
isolate: false
|
||||
}
|
||||
},
|
||||
alias: {
|
||||
"./license-fns": path.resolve(__dirname, "./src/ee/services/license/__mocks__/license-fns")
|
||||
}
|
||||
},
|
||||
plugins: [tsconfigPaths()] // only if you are using custom tsconfig paths,
|
||||
resolve: {
|
||||
alias: {
|
||||
"@app": path.resolve(__dirname, "./src")
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -11,12 +11,25 @@ sinks:
|
||||
config:
|
||||
path: "access-token"
|
||||
templates:
|
||||
- source-path: my-dot-ev-secret-template
|
||||
- template-content: |
|
||||
{{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }}
|
||||
{{- range . }}
|
||||
{{ .Key }}={{ .Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
destination-path: my-dot-env-0.env
|
||||
config:
|
||||
polling-interval: 60s
|
||||
execute:
|
||||
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
|
||||
|
||||
- base64-template-content: e3stIHdpdGggc2VjcmV0ICIyMDJmMDRkNy1lNGNiLTQzZDQtYTI5Mi1lODkzNzEyZDYxZmMiICJkZXYiICIvIiB9fQp7ey0gcmFuZ2UgLiB9fQp7eyAuS2V5IH19PXt7IC5WYWx1ZSB9fQp7ey0gZW5kIH19Cnt7LSBlbmQgfX0=
|
||||
destination-path: my-dot-env.env
|
||||
config:
|
||||
polling-interval: 60s
|
||||
execute:
|
||||
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
|
||||
|
||||
- source-path: my-dot-ev-secret-template1
|
||||
destination-path: my-dot-env-1.env
|
||||
config:
|
||||
|
@ -95,6 +95,7 @@ type Template struct {
|
||||
SourcePath string `yaml:"source-path"`
|
||||
Base64TemplateContent string `yaml:"base64-template-content"`
|
||||
DestinationPath string `yaml:"destination-path"`
|
||||
TemplateContent string `yaml:"template-content"`
|
||||
|
||||
Config struct { // Configurations for the template
|
||||
PollingInterval string `yaml:"polling-interval"` // How often to poll for changes in the secret
|
||||
@ -432,6 +433,30 @@ func ProcessBase64Template(templateId int, encodedTemplate string, data interfac
|
||||
return &buf, nil
|
||||
}
|
||||
|
||||
func ProcessLiteralTemplate(templateId int, templateString string, data interface{}, accessToken string, existingEtag string, currentEtag *string, dynamicSecretLeaser *DynamicSecretLeaseManager) (*bytes.Buffer, error) {
|
||||
secretFunction := secretTemplateFunction(accessToken, existingEtag, currentEtag) // TODO: Fix this
|
||||
dynamicSecretFunction := dynamicSecretTemplateFunction(accessToken, dynamicSecretLeaser, templateId)
|
||||
funcs := template.FuncMap{
|
||||
"secret": secretFunction,
|
||||
"dynamic_secret": dynamicSecretFunction,
|
||||
}
|
||||
|
||||
templateName := "literalTemplate"
|
||||
|
||||
tmpl, err := template.New(templateName).Funcs(funcs).Parse(templateString)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := tmpl.Execute(&buf, data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &buf, nil
|
||||
}
|
||||
|
||||
|
||||
type AgentManager struct {
|
||||
accessToken string
|
||||
accessTokenTTL time.Duration
|
||||
@ -820,6 +845,8 @@ func (tm *AgentManager) MonitorSecretChanges(secretTemplate Template, templateId
|
||||
|
||||
if secretTemplate.SourcePath != "" {
|
||||
processedTemplate, err = ProcessTemplate(templateId, secretTemplate.SourcePath, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases)
|
||||
} else if secretTemplate.TemplateContent != "" {
|
||||
processedTemplate, err = ProcessLiteralTemplate(templateId, secretTemplate.TemplateContent, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases)
|
||||
} else {
|
||||
processedTemplate, err = ProcessBase64Template(templateId, secretTemplate.Base64TemplateContent, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases)
|
||||
}
|
||||
|
@ -19,10 +19,11 @@ Every new joiner has an onboarding buddy who should ideally be in the the same t
|
||||
1. Join the weekly all-hands meeting. It typically happens on Monday's at 8:30am PT.
|
||||
2. Ship something together on day one – even if tiny! It feels great to hit the ground running, with a development environment all ready to go.
|
||||
3. Check out the [Areas of Responsibility (AoR) Table](https://docs.google.com/spreadsheets/d/1RnXlGFg83Sgu0dh7ycuydsSobmFfI3A0XkGw7vrVxEI/edit?usp=sharing). This is helpful to know who you can ask about particular areas of Infisical. Feel free to add yourself to the areas you'd be most interesting to dive into.
|
||||
4. Read the [Infisical Strategy Doc](https://docs.google.com/document/d/1oy_NP1Q_Zt1oqxLpyNkLIGmhAI3N28AmZq6dDIOONSQ/edit?usp=sharing).
|
||||
4. Read the [Infisical Strategy Doc](https://docs.google.com/document/d/1RaJd3RoS2QpWLFHlgfHaXnHqCCwRt6mCGZkbJ75J_D0/edit?usp=sharing).
|
||||
5. Update your LinkedIn profile with one of [Infisical's official banners](https://drive.google.com/drive/u/0/folders/1oSNWjbpRl9oNYwxM_98IqzKs9fAskrb2) (if you want to). You can also coordinate your social posts in the #marketing Slack channel, so that we can boost it from Infisical's official social media accounts.
|
||||
6. Over the first few weeks, feel free to schedule 1:1s with folks on the team to get to know them a bit better.
|
||||
7. Change your Slack username in the users channel to `[NAME] (Infisical)`.
|
||||
8. Go through the [technical overview](https://infisical.com/docs/internals/overview) of Infisical.
|
||||
9. Request a company credit card (Maidul will be able to help with that).
|
||||
|
||||
|
||||
|
144
docs/documentation/platform/dynamic-secrets/aws-elasticache.mdx
Normal file
@ -0,0 +1,144 @@
|
||||
---
|
||||
title: "AWS ElastiCache"
|
||||
description: "Learn how to dynamically generate AWS ElastiCache user credentials."
|
||||
---
|
||||
|
||||
The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCache credentials on demand based on configured role.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
|
||||
|
||||
2. Create an AWS IAM user with the following permissions:
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"elasticache:DescribeUsers",
|
||||
"elasticache:ModifyUser",
|
||||
"elasticache:CreateUser",
|
||||
"elasticache:CreateUserGroup",
|
||||
"elasticache:DeleteUser",
|
||||
"elasticache:DescribeReplicationGroups",
|
||||
"elasticache:DescribeUserGroups",
|
||||
"elasticache:ModifyReplicationGroup",
|
||||
"elasticache:ModifyUserGroup"
|
||||
],
|
||||
"Resource": "arn:aws:elasticache:<region>:<account-id>:user:*"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
3. Create an access key ID and secret access key for the user you created in the previous step. You will need these to configure the Infisical dynamic secret.
|
||||
|
||||
<Note>
|
||||
New leases may take up-to a couple of minutes before ElastiCache has the chance to complete their configuration.
|
||||
It is recommended to use a retry strategy when establishing new ElastiCache connections.
|
||||
This may prevent errors when trying to use a password that isn't yet live on the targeted ElastiCache cluster.
|
||||
|
||||
While a leasing is being created, you will be unable to create new leases for the same dynamic secret.
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
Please ensure that your ElastiCache cluster has transit encryption enabled and set to required. This is required for the dynamic secret to work.
|
||||
</Note>
|
||||
|
||||
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with AWS ElastiCache
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select 'AWS ElastiCache'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Region" type="string" required>
|
||||
The region that the ElastiCache cluster is located in. _(e.g. us-east-1)_
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Access Key ID" type="string" required>
|
||||
This is the access key ID of the AWS IAM user you created in the prerequisites. This will be used to provision and manage the dynamic secret leases.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Secret Access Key" type="string" required>
|
||||
This is the secret access key of the AWS IAM user you created in the prerequisites. This will be used to provision and manage the dynamic secret leases.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service.
|
||||
</ParamField>
|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify ElastiCache Statements">
|
||||
If you want to provide specific privileges for the generated dynamic credentials, you can modify the ElastiCache statement to your needs. This is useful if you want to only give access to a specific table(s).
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certificate.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "AWS IAM"
|
||||
description: "How to dynamically generate AWS IAM Users."
|
||||
description: "Learn how to dynamically generate AWS IAM Users."
|
||||
---
|
||||
|
||||
The Infisical AWS IAM dynamic secret allows you to generate AWS IAM Users on demand based on configured AWS policy.
|
||||
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "Cassandra"
|
||||
description: "How to dynamically generate Cassandra database users."
|
||||
description: "Learn how to dynamically generate Cassandra database user credentials"
|
||||
---
|
||||
|
||||
The Infisical Cassandra dynamic secret allows you to generate Cassandra database credentials on demand based on configured role.
|
||||
|
127
docs/documentation/platform/dynamic-secrets/elastic-search.mdx
Normal file
@ -0,0 +1,127 @@
|
||||
---
|
||||
title: "Elasticsearch"
|
||||
description: "Learn how to dynamically generate Elasticsearch user credentials."
|
||||
---
|
||||
|
||||
The Infisical Elasticsearch dynamic secret allows you to generate Elasticsearch credentials on demand based on configured role.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
|
||||
|
||||
1. Create a role with at least `manage_security` and `monitor` permissions.
|
||||
2. Assign the newly created role to your API key or user that you'll use later in the dynamic secret configuration.
|
||||
|
||||
<Note>
|
||||
For testing purposes, you can also use a highly privileged role like `superuser`, that will have full control over the cluster. This is not recommended in production environments following the principle of least privilege.
|
||||
</Note>
|
||||
|
||||
## Set up Dynamic Secrets with Elasticsearch
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select 'Elasticsearch'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Your Elasticsearch host. This is the endpoint that your instance runs on. _(Example: https://your-cluster-ip)_
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="string" required>
|
||||
The port that your Elasticsearch instance is running on. _(Example: 9200)_
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Roles" type="string[]" required>
|
||||
The roles that the new user that is created when a lease is provisioned will be assigned to. This is a required field. This defaults to `superuser`, which is highly privileged. It is recommended to create a new role with the least privileges required for the lease.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Authentication Method" type="API Key | Username/Password" required>
|
||||
Select the authentication method you want to use to connect to your Elasticsearch instance.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Username" type="string" required>
|
||||
The username of the user that will be used to provision new dynamic secret leases. Only required if you selected the `Username/Password` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
The password of the user that will be used to provision new dynamic secret leases. Only required if you selected the `Username/Password` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="API Key ID" required>
|
||||
The ID of the API key that will be used to provision new dynamic secret leases. Only required if you selected the `API Key` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="API Key" required>
|
||||
The API key that will be used to provision new dynamic secret leases. Only required if you selected the `API Key` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service.
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
|
||||
</Step>
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certificate.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
114
docs/documentation/platform/dynamic-secrets/mongo-atlas.mdx
Normal file
@ -0,0 +1,114 @@
|
||||
---
|
||||
title: "Mongo Atlas"
|
||||
description: "Learn how to dynamically generate Mongo Atlas Database user credentials."
|
||||
---
|
||||
|
||||
The Infisical Mongo Atlas dynamic secret allows you to generate Mongo Atlas Database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
Create a project scopped API Key with the required permission in your Mongo Atlas following the [official doc](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-a-project).
|
||||
|
||||
<Info>
|
||||
The API Key must have permission to manage users in the project.
|
||||
</Info>
|
||||
|
||||
## Set up Dynamic Secrets with Mongo Atlas
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select Mongo Atlas">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Admin public key" type="string" required>
|
||||
The public key of your generated Atlas API Key. This acts as a username.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Admin private key" type="string" required>
|
||||
The private key of your generated Atlas API Key. This acts as a password.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Group ID" type="number" required>
|
||||
Unique 24-hexadecimal digit string that identifies your project. This is same as project id
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Roles" type="string" required>
|
||||
List that provides the pairings of one role with one applicable database.
|
||||
- **Database Name**: Database to which the user is granted access privileges.
|
||||
- **Collection**: Collection on which this role applies.
|
||||
- **Role Name**: Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.
|
||||
- Enum: `atlasAdmin` `backup` `clusterMonitor` `dbAdmin` `dbAdminAnyDatabase` `enableSharding` `read` `readAnyDatabase` `readWrite` `readWriteAnyDatabase` `<a custom role name>`.
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify Access Scope">
|
||||
List that contains clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances that this database user can access. If omitted, MongoDB Cloud grants the database user access to all the clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances in the project.
|
||||
|
||||

|
||||
- **Label**: Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access.
|
||||
- **Type**: Category of resource that this database user can access.
|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certficate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "MS SQL"
|
||||
description: "How to dynamically generate MS SQL database users."
|
||||
description: "Learn how to dynamically generate MS SQL database user credentials."
|
||||
---
|
||||
|
||||
The Infisical MS SQL dynamic secret allows you to generate Microsoft SQL server database credentials on demand based on configured role.
|
||||
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "MySQL"
|
||||
description: "Learn how to dynamically generate MySQL Database user passwords."
|
||||
description: "Learn how to dynamically generate MySQL Database user credentials."
|
||||
---
|
||||
|
||||
The Infisical MySQL dynamic secret allows you to generate MySQL Database credentials on demand based on configured role.
|
||||
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "Oracle"
|
||||
description: "Learn how to dynamically generate Oracle Database user passwords."
|
||||
description: "Learn how to dynamically generate Oracle Database user credentials."
|
||||
---
|
||||
|
||||
The Infisical Oracle dynamic secret allows you to generate Oracle Database credentials on demand based on configured role.
|
||||
|
@ -32,4 +32,5 @@ Dynamic secrets are particularly useful in environments with stringent security
|
||||
2. [MySQL](./mysql)
|
||||
3. [Cassandra](./cassandra)
|
||||
4. [Oracle](./oracle)
|
||||
6. [Redis](./redis)
|
||||
5. [AWS IAM](./aws-iam)
|
||||
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "PostgreSQL"
|
||||
description: "How to dynamically generate PostgreSQL database users."
|
||||
description: "Learn how to dynamically generate PostgreSQL database users."
|
||||
---
|
||||
|
||||
The Infisical PostgreSQL dynamic secret allows you to generate PostgreSQL database credentials on demand based on configured role.
|
||||
|
106
docs/documentation/platform/dynamic-secrets/redis.mdx
Normal file
@ -0,0 +1,106 @@
|
||||
---
|
||||
title: "Redis"
|
||||
description: "Learn how to dynamically generate Redis Database user credentials."
|
||||
---
|
||||
|
||||
The Infisical Redis dynamic secret allows you to generate Redis Database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
Create a user with the required permission in your Redis instance. This user will be used to create new accounts on-demand.
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with Redis
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select 'Redis'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
The database host, this can be an IP address or a domain name as long as Infisical can reach it.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="number" required>
|
||||
The database port, this is the port that the Redis instance is listening on.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="User" type="string" required>
|
||||
Redis username that will be used to create new users on-demand. This is often 'default' or 'admin'.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" optional>
|
||||
Password that will be used to create dynamic secrets. This is required if your Redis instance is password protected.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service.
|
||||
</ParamField>
|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify Redis Statements">
|
||||
If you want to provide specific privileges for the generated dynamic credentials, you can modify the Redis statement to your needs. This is useful if you want to only give access to a specific table(s).
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certificate.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
@ -21,7 +21,8 @@ These endpoints are exposed on port 8443 under the .well-known/est path e.g.
|
||||
|
||||
- You need to have an existing [CA hierarchy](/documentation/platform/pki/private-ca).
|
||||
- The client devices need to have a bootstrap/pre-installed certificate.
|
||||
- The client devices must trust the server certificates used by Infisical's EST server. If the devices are new or lack existing trust configurations, you need to manually establish trust for the appropriate certificates. When using Infisical Cloud, this means establishing trust for certificates issued by AWS.
|
||||
- The client devices must trust the server certificates used by Infisical's EST server. If the devices are new or lack existing trust configurations, you need to manually establish trust for the appropriate certificates.
|
||||
- For Infisical Cloud users, the devices must be configured to trust the [Amazon root CA certificates](https://www.amazontrust.com/repository).
|
||||
|
||||
## Guide to configuring EST
|
||||
|
||||
@ -42,7 +43,7 @@ These endpoints are exposed on port 8443 under the .well-known/est path e.g.
|
||||
4. Once the configuration of enrollment options is completed, a new **EST Label** field appears in the enrollment settings. This is the value to use as label in the URL when configuring the connection of EST clients to Infisical.
|
||||

|
||||
|
||||
For demonstration, the complete URL of the supported EST endpoints will look like the following:
|
||||
The complete URL of the supported EST endpoints will look like the following:
|
||||
|
||||
- https://app.infisical.com:8443/.well-known/est/f110f308-9888-40ab-b228-237b12de8b96/cacerts
|
||||
- https://app.infisical.com:8443/.well-known/est/f110f308-9888-40ab-b228-237b12de8b96/simpleenroll
|
||||
|
BIN
docs/images/integrations/azure-devops/create-new-token.png
Normal file
After ![]() (image error) Size: 136 KiB |
After ![]() (image error) Size: 197 KiB |
After ![]() (image error) Size: 192 KiB |
BIN
docs/images/integrations/azure-devops/new-token-created.png
Normal file
After ![]() (image error) Size: 111 KiB |
BIN
docs/images/integrations/azure-devops/overview-page.png
Normal file
After ![]() (image error) Size: 93 KiB |
Before ![]() (image error) Size: 691 KiB After ![]() (image error) Size: 700 KiB ![]() ![]() |
Before ![]() (image error) Size: 709 KiB After ![]() (image error) Size: 710 KiB ![]() ![]() |
Before ![]() (image error) Size: 715 KiB After ![]() (image error) Size: 696 KiB ![]() ![]() |
After ![]() (image error) Size: 200 KiB |
BIN
docs/images/platform/dynamic-secrets/advanced-option-atlas.png
Normal file
After ![]() (image error) Size: 18 KiB |
After ![]() (image error) Size: 50 KiB |
After ![]() (image error) Size: 197 KiB |
After ![]() (image error) Size: 173 KiB |
After ![]() (image error) Size: 133 KiB |
After ![]() (image error) Size: 136 KiB |
After ![]() (image error) Size: 76 KiB |