Compare commits
133 Commits
daniel/env
...
daniel/ref
Author | SHA1 | Date | |
---|---|---|---|
|
da561e37c5 | ||
|
a29fb613b9 | ||
|
8f3d328b9a | ||
|
b7d683ee1b | ||
|
302e068c74 | ||
|
95b92caff3 | ||
|
5d894b6d43 | ||
|
dab3e2efad | ||
|
04cbbccd25 | ||
|
7f48e9d62e | ||
|
8a0018eff2 | ||
|
e6a920caa3 | ||
|
11411ca4eb | ||
|
b7c79fa45b | ||
|
18951b99de | ||
|
bd05c440c3 | ||
|
9ca5013a59 | ||
|
b65b8bc362 | ||
|
f494c182ff | ||
|
2fae822e1f | ||
|
5df140cbd5 | ||
|
d93cbb023d | ||
|
9056d1be0c | ||
|
5f503949eb | ||
|
9cf917de07 | ||
|
ce7bb82f02 | ||
|
7cd092c0cf | ||
|
cbfb9af0b9 | ||
|
ef236106b4 | ||
|
773a338397 | ||
|
afb5820113 | ||
|
5acc0fc243 | ||
|
c56469ecdb | ||
|
c59a53180c | ||
|
f56d265e62 | ||
|
cc0ff98d4f | ||
|
4a14c3efd2 | ||
|
b2d2297914 | ||
|
836bb6d835 | ||
|
177eb2afee | ||
|
594df18611 | ||
|
3bcb8bf6fc | ||
|
a74c37c18b | ||
|
3ece81d663 | ||
|
f6d87ebf32 | ||
|
23483ab7e1 | ||
|
fe31d44d22 | ||
|
58bab4d163 | ||
|
8f48a64fd6 | ||
|
929dc059c3 | ||
|
45e471b16a | ||
|
7c540b6be8 | ||
|
7dbe8dd3c9 | ||
|
0dec602729 | ||
|
66ded779fc | ||
|
01d24291f2 | ||
|
55b36b033e | ||
|
8f461bf50c | ||
|
1847491cb3 | ||
|
541c7b63cd | ||
|
7e5e177680 | ||
|
40f552e4f1 | ||
|
ecb54ee3b3 | ||
|
b975996158 | ||
|
122f789cdf | ||
|
c9911aa841 | ||
|
32cd0d8af8 | ||
|
585f0d9f1b | ||
|
d0292aa139 | ||
|
4e9be8ca3c | ||
|
ad49e9eaf1 | ||
|
fed60f7c03 | ||
|
1bc0e3087a | ||
|
80a4f838a1 | ||
|
d31ec44f50 | ||
|
d0caef37ce | ||
|
2d26febe58 | ||
|
c23ad8ebf2 | ||
|
bad068ef19 | ||
|
53430608a8 | ||
|
a556c02df6 | ||
|
3c39bf6a0f | ||
|
5611b9aba1 | ||
|
53075d503a | ||
|
e47cfa262a | ||
|
0ab7a4e713 | ||
|
5138d588db | ||
|
7e2d093e29 | ||
|
2d780e0566 | ||
|
8eb234a12f | ||
|
85590af99e | ||
|
5c7cec0c81 | ||
|
68f768749b | ||
|
2c7e342b18 | ||
|
632900e516 | ||
|
5fd975b1d7 | ||
|
d45ac66064 | ||
|
47cba8ec3c | ||
|
d4aab66da2 | ||
|
0dc4c92c89 | ||
|
f49c963367 | ||
|
fe11b8e57e | ||
|
0401f55bc3 | ||
|
403e0d2d9d | ||
|
e73d3f87f3 | ||
|
b53607f8e4 | ||
|
8f79d3210a | ||
|
3ddb4cd27a | ||
|
a5555c3816 | ||
|
8479c406a5 | ||
|
8e0b4254b1 | ||
|
069651bdb4 | ||
|
9061ec2dff | ||
|
b0a5023723 | ||
|
69fe5bf71d | ||
|
f12d4d80c6 | ||
|
56f2a3afa4 | ||
|
406da1b5f0 | ||
|
da45e132a3 | ||
|
fb719a9383 | ||
|
3c64359597 | ||
|
e420973dd2 | ||
|
15cc157c5f | ||
|
ad89ffe94d | ||
|
4de1713a18 | ||
|
1917e0fdb7 | ||
|
4b07234997 | ||
|
6a402950c3 | ||
|
63333159ca | ||
|
ce4ba24ef2 | ||
|
f606e31b98 | ||
|
ecdbb3eb53 | ||
|
0321ec32fb |
@@ -6,9 +6,15 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
|
35
backend/e2e-test/routes/v1/secret-approval-policy.spec.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: string[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [seedData1.id],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@@ -1,73 +1,61 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: importEnv,
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
await deleteSecretImport(payload.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
@@ -89,25 +77,60 @@ describe("Secret Import Router", async () => {
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
@@ -161,22 +184,55 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@@ -201,6 +257,552 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
406
backend/e2e-test/routes/v1/secret-replication.spec.ts
Normal file
@@ -0,0 +1,406 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
330
backend/e2e-test/routes/v3/secret-reference.spec.ts
Normal file
@@ -0,0 +1,330 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@@ -8,6 +8,7 @@ type TRawSecret = {
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
|
73
backend/e2e-test/testUtils/folders.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
93
backend/e2e-test/testUtils/secret-imports.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
128
backend/e2e-test/testUtils/secrets.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@@ -11,10 +11,11 @@ import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@@ -28,19 +29,31 @@ export default {
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@@ -58,10 +71,12 @@ export default {
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
@@ -80,6 +95,9 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
436
backend/package-lock.json
generated
@@ -15,6 +15,7 @@
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
@@ -79,6 +80,8 @@
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
@@ -134,7 +137,6 @@
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
}
|
||||
},
|
||||
@@ -299,16 +301,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-cloudwatch-logs": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-cloudwatch-logs/-/client-cloudwatch-logs-3.635.0.tgz",
|
||||
"integrity": "sha512-M2SGf0B/WmHYNxUhUWKIYI5NW4Si7cyokB6Lt3RtDof3WVHA8L0LLl+EEo1URUkpxH8/F8VH2fTES7ODm2c+7g==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-cloudwatch-logs/-/client-cloudwatch-logs-3.632.0.tgz",
|
||||
"integrity": "sha512-QrG04Ss2/KDsvGmoBH9QHjaC/wx7Gf9U2F5o8gYbHVU5ZGDW+zMX2Sj/6jjSyZ4qLD4sxK7sRHwK+fYA21OQQA==",
|
||||
"dependencies": {
|
||||
"@aws-crypto/sha256-browser": "5.2.0",
|
||||
"@aws-crypto/sha256-js": "5.2.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.635.0",
|
||||
"@aws-sdk/client-sts": "3.635.0",
|
||||
"@aws-sdk/core": "3.635.0",
|
||||
"@aws-sdk/credential-provider-node": "3.635.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.632.0",
|
||||
"@aws-sdk/client-sts": "3.632.0",
|
||||
"@aws-sdk/core": "3.629.0",
|
||||
"@aws-sdk/credential-provider-node": "3.632.0",
|
||||
"@aws-sdk/middleware-host-header": "3.620.0",
|
||||
"@aws-sdk/middleware-logger": "3.609.0",
|
||||
"@aws-sdk/middleware-recursion-detection": "3.620.0",
|
||||
@@ -319,7 +321,7 @@
|
||||
"@aws-sdk/util-user-agent-browser": "3.609.0",
|
||||
"@aws-sdk/util-user-agent-node": "3.614.0",
|
||||
"@smithy/config-resolver": "^3.0.5",
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/core": "^2.3.2",
|
||||
"@smithy/eventstream-serde-browser": "^3.0.6",
|
||||
"@smithy/eventstream-serde-config-resolver": "^3.0.3",
|
||||
"@smithy/eventstream-serde-node": "^3.0.5",
|
||||
@@ -328,20 +330,20 @@
|
||||
"@smithy/invalid-dependency": "^3.0.3",
|
||||
"@smithy/middleware-content-length": "^3.0.5",
|
||||
"@smithy/middleware-endpoint": "^3.1.0",
|
||||
"@smithy/middleware-retry": "^3.0.15",
|
||||
"@smithy/middleware-retry": "^3.0.14",
|
||||
"@smithy/middleware-serde": "^3.0.3",
|
||||
"@smithy/middleware-stack": "^3.0.3",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/url-parser": "^3.0.3",
|
||||
"@smithy/util-base64": "^3.0.0",
|
||||
"@smithy/util-body-length-browser": "^3.0.0",
|
||||
"@smithy/util-body-length-node": "^3.0.0",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.14",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.14",
|
||||
"@smithy/util-endpoints": "^2.0.5",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"@smithy/util-retry": "^3.0.3",
|
||||
@@ -559,6 +561,45 @@
|
||||
"node": ">=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-elasticache/node_modules/@aws-sdk/core": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.635.0.tgz",
|
||||
"integrity": "sha512-i1x/E/sgA+liUE1XJ7rj1dhyXpAKO1UKFUcTTHXok2ARjWTvszHnSXMOsB77aPbmn0fUp1JTx2kHUAZ1LVt5Bg==",
|
||||
"dependencies": {
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/property-provider": "^3.1.3",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/signature-v4": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"fast-xml-parser": "4.4.1",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-elasticache/node_modules/@aws-sdk/credential-provider-http": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.635.0.tgz",
|
||||
"integrity": "sha512-iJyRgEjOCQlBMXqtwPLIKYc7Bsc6nqjrZybdMDenPDa+kmLg7xh8LxHsu9088e+2/wtLicE34FsJJIfzu3L82g==",
|
||||
"dependencies": {
|
||||
"@aws-sdk/types": "3.609.0",
|
||||
"@smithy/fetch-http-handler": "^3.2.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/property-provider": "^3.1.3",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/util-stream": "^3.1.3",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-elasticache/node_modules/@aws-sdk/credential-provider-ini": {
|
||||
"version": "3.637.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.637.0.tgz",
|
||||
@@ -657,16 +698,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-iam": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-iam/-/client-iam-3.635.0.tgz",
|
||||
"integrity": "sha512-sflTv6XcwO5UX+U9x31+T6TBEgVIzG61giLgRV51kkFGErni++GpxUcc6O1mpDwb3jpbntJf7QPjrkkj9wsTPA==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-iam/-/client-iam-3.632.0.tgz",
|
||||
"integrity": "sha512-iwivASUliVxCEbT/mu5s03SCyqQKNXbJUpG17ywT4taA2xvLisGRI5iNV3OYT1qDmK9DOLMSJYpeX2GWCijPxw==",
|
||||
"dependencies": {
|
||||
"@aws-crypto/sha256-browser": "5.2.0",
|
||||
"@aws-crypto/sha256-js": "5.2.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.635.0",
|
||||
"@aws-sdk/client-sts": "3.635.0",
|
||||
"@aws-sdk/core": "3.635.0",
|
||||
"@aws-sdk/credential-provider-node": "3.635.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.632.0",
|
||||
"@aws-sdk/client-sts": "3.632.0",
|
||||
"@aws-sdk/core": "3.629.0",
|
||||
"@aws-sdk/credential-provider-node": "3.632.0",
|
||||
"@aws-sdk/middleware-host-header": "3.620.0",
|
||||
"@aws-sdk/middleware-logger": "3.609.0",
|
||||
"@aws-sdk/middleware-recursion-detection": "3.620.0",
|
||||
@@ -677,26 +718,26 @@
|
||||
"@aws-sdk/util-user-agent-browser": "3.609.0",
|
||||
"@aws-sdk/util-user-agent-node": "3.614.0",
|
||||
"@smithy/config-resolver": "^3.0.5",
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/core": "^2.3.2",
|
||||
"@smithy/fetch-http-handler": "^3.2.4",
|
||||
"@smithy/hash-node": "^3.0.3",
|
||||
"@smithy/invalid-dependency": "^3.0.3",
|
||||
"@smithy/middleware-content-length": "^3.0.5",
|
||||
"@smithy/middleware-endpoint": "^3.1.0",
|
||||
"@smithy/middleware-retry": "^3.0.15",
|
||||
"@smithy/middleware-retry": "^3.0.14",
|
||||
"@smithy/middleware-serde": "^3.0.3",
|
||||
"@smithy/middleware-stack": "^3.0.3",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/url-parser": "^3.0.3",
|
||||
"@smithy/util-base64": "^3.0.0",
|
||||
"@smithy/util-body-length-browser": "^3.0.0",
|
||||
"@smithy/util-body-length-node": "^3.0.0",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.14",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.14",
|
||||
"@smithy/util-endpoints": "^2.0.5",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"@smithy/util-retry": "^3.0.3",
|
||||
@@ -709,16 +750,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-kms": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-kms/-/client-kms-3.635.0.tgz",
|
||||
"integrity": "sha512-H2qJVXiz3WbBQwtxqfEvuJ9pCKJdqEWkzQ8I4knkXbQkyy78GktfMwBWqFyw3eap/s9rQmsyXbuuhBIozbemOg==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-kms/-/client-kms-3.632.0.tgz",
|
||||
"integrity": "sha512-uMm1fAIdImaBKwKXnpcD1cpRlTAbLisbRbNJqzJdH+snN0jAkukLNUMUheb0XKaczk7eQrp5w4inlWrRvEmjSA==",
|
||||
"dependencies": {
|
||||
"@aws-crypto/sha256-browser": "5.2.0",
|
||||
"@aws-crypto/sha256-js": "5.2.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.635.0",
|
||||
"@aws-sdk/client-sts": "3.635.0",
|
||||
"@aws-sdk/core": "3.635.0",
|
||||
"@aws-sdk/credential-provider-node": "3.635.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.632.0",
|
||||
"@aws-sdk/client-sts": "3.632.0",
|
||||
"@aws-sdk/core": "3.629.0",
|
||||
"@aws-sdk/credential-provider-node": "3.632.0",
|
||||
"@aws-sdk/middleware-host-header": "3.620.0",
|
||||
"@aws-sdk/middleware-logger": "3.609.0",
|
||||
"@aws-sdk/middleware-recursion-detection": "3.620.0",
|
||||
@@ -729,26 +770,26 @@
|
||||
"@aws-sdk/util-user-agent-browser": "3.609.0",
|
||||
"@aws-sdk/util-user-agent-node": "3.614.0",
|
||||
"@smithy/config-resolver": "^3.0.5",
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/core": "^2.3.2",
|
||||
"@smithy/fetch-http-handler": "^3.2.4",
|
||||
"@smithy/hash-node": "^3.0.3",
|
||||
"@smithy/invalid-dependency": "^3.0.3",
|
||||
"@smithy/middleware-content-length": "^3.0.5",
|
||||
"@smithy/middleware-endpoint": "^3.1.0",
|
||||
"@smithy/middleware-retry": "^3.0.15",
|
||||
"@smithy/middleware-retry": "^3.0.14",
|
||||
"@smithy/middleware-serde": "^3.0.3",
|
||||
"@smithy/middleware-stack": "^3.0.3",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/url-parser": "^3.0.3",
|
||||
"@smithy/util-base64": "^3.0.0",
|
||||
"@smithy/util-body-length-browser": "^3.0.0",
|
||||
"@smithy/util-body-length-node": "^3.0.0",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.14",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.14",
|
||||
"@smithy/util-endpoints": "^2.0.5",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"@smithy/util-retry": "^3.0.3",
|
||||
@@ -760,16 +801,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-secrets-manager": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-secrets-manager/-/client-secrets-manager-3.635.0.tgz",
|
||||
"integrity": "sha512-taa+sa8xFym7ZYzybqkOVy5MAdedcIt2pKEVOReEaNkUuOwMUo+wF4QhJeyhaLPTs2l0rHR1bnwYOG+0fW0Kvg==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-secrets-manager/-/client-secrets-manager-3.632.0.tgz",
|
||||
"integrity": "sha512-WsQhPHHK1yPfALcP1B7nBSGDzky6vFTUEXnUdfzb5Xy2cT+JTBTS6ChtQGqqOuGHDP/3t/9soqZ+L6rUCYBb/Q==",
|
||||
"dependencies": {
|
||||
"@aws-crypto/sha256-browser": "5.2.0",
|
||||
"@aws-crypto/sha256-js": "5.2.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.635.0",
|
||||
"@aws-sdk/client-sts": "3.635.0",
|
||||
"@aws-sdk/core": "3.635.0",
|
||||
"@aws-sdk/credential-provider-node": "3.635.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.632.0",
|
||||
"@aws-sdk/client-sts": "3.632.0",
|
||||
"@aws-sdk/core": "3.629.0",
|
||||
"@aws-sdk/credential-provider-node": "3.632.0",
|
||||
"@aws-sdk/middleware-host-header": "3.620.0",
|
||||
"@aws-sdk/middleware-logger": "3.609.0",
|
||||
"@aws-sdk/middleware-recursion-detection": "3.620.0",
|
||||
@@ -780,26 +821,26 @@
|
||||
"@aws-sdk/util-user-agent-browser": "3.609.0",
|
||||
"@aws-sdk/util-user-agent-node": "3.614.0",
|
||||
"@smithy/config-resolver": "^3.0.5",
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/core": "^2.3.2",
|
||||
"@smithy/fetch-http-handler": "^3.2.4",
|
||||
"@smithy/hash-node": "^3.0.3",
|
||||
"@smithy/invalid-dependency": "^3.0.3",
|
||||
"@smithy/middleware-content-length": "^3.0.5",
|
||||
"@smithy/middleware-endpoint": "^3.1.0",
|
||||
"@smithy/middleware-retry": "^3.0.15",
|
||||
"@smithy/middleware-retry": "^3.0.14",
|
||||
"@smithy/middleware-serde": "^3.0.3",
|
||||
"@smithy/middleware-stack": "^3.0.3",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/url-parser": "^3.0.3",
|
||||
"@smithy/util-base64": "^3.0.0",
|
||||
"@smithy/util-body-length-browser": "^3.0.0",
|
||||
"@smithy/util-body-length-node": "^3.0.0",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.14",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.14",
|
||||
"@smithy/util-endpoints": "^2.0.5",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"@smithy/util-retry": "^3.0.3",
|
||||
@@ -812,13 +853,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-sso": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.635.0.tgz",
|
||||
"integrity": "sha512-/Hl69+JpFUo9JNVmh2gSvMgYkE4xjd+1okiRoPBbQqjI7YBP2JWCUDP8IoEkNq3wj0vNTq0OWfn6RpZycIkAXQ==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.632.0.tgz",
|
||||
"integrity": "sha512-iYWHiKBz44m3chCFvtvHnvCpL2rALzyr1e6tOZV3dLlOKtQtDUlPy6OtnXDu4y+wyJCniy8ivG3+LAe4klzn1Q==",
|
||||
"dependencies": {
|
||||
"@aws-crypto/sha256-browser": "5.2.0",
|
||||
"@aws-crypto/sha256-js": "5.2.0",
|
||||
"@aws-sdk/core": "3.635.0",
|
||||
"@aws-sdk/core": "3.629.0",
|
||||
"@aws-sdk/middleware-host-header": "3.620.0",
|
||||
"@aws-sdk/middleware-logger": "3.609.0",
|
||||
"@aws-sdk/middleware-recursion-detection": "3.620.0",
|
||||
@@ -829,26 +870,26 @@
|
||||
"@aws-sdk/util-user-agent-browser": "3.609.0",
|
||||
"@aws-sdk/util-user-agent-node": "3.614.0",
|
||||
"@smithy/config-resolver": "^3.0.5",
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/core": "^2.3.2",
|
||||
"@smithy/fetch-http-handler": "^3.2.4",
|
||||
"@smithy/hash-node": "^3.0.3",
|
||||
"@smithy/invalid-dependency": "^3.0.3",
|
||||
"@smithy/middleware-content-length": "^3.0.5",
|
||||
"@smithy/middleware-endpoint": "^3.1.0",
|
||||
"@smithy/middleware-retry": "^3.0.15",
|
||||
"@smithy/middleware-retry": "^3.0.14",
|
||||
"@smithy/middleware-serde": "^3.0.3",
|
||||
"@smithy/middleware-stack": "^3.0.3",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/url-parser": "^3.0.3",
|
||||
"@smithy/util-base64": "^3.0.0",
|
||||
"@smithy/util-body-length-browser": "^3.0.0",
|
||||
"@smithy/util-body-length-node": "^3.0.0",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.14",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.14",
|
||||
"@smithy/util-endpoints": "^2.0.5",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"@smithy/util-retry": "^3.0.3",
|
||||
@@ -860,14 +901,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-sso-oidc": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.635.0.tgz",
|
||||
"integrity": "sha512-RIwDlhzAFttB1vbpznewnPqz7h1H/2UhQLwB38yfZBwYQOxyxVfLV5j5VoUUX3jY4i4qH9wiHc7b02qeAOZY6g==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.632.0.tgz",
|
||||
"integrity": "sha512-Oh1fIWaoZluihOCb/zDEpRTi+6an82fgJz7fyRBugyLhEtDjmvpCQ3oKjzaOhoN+4EvXAm1ZS/ZgpvXBlIRTgw==",
|
||||
"dependencies": {
|
||||
"@aws-crypto/sha256-browser": "5.2.0",
|
||||
"@aws-crypto/sha256-js": "5.2.0",
|
||||
"@aws-sdk/core": "3.635.0",
|
||||
"@aws-sdk/credential-provider-node": "3.635.0",
|
||||
"@aws-sdk/core": "3.629.0",
|
||||
"@aws-sdk/credential-provider-node": "3.632.0",
|
||||
"@aws-sdk/middleware-host-header": "3.620.0",
|
||||
"@aws-sdk/middleware-logger": "3.609.0",
|
||||
"@aws-sdk/middleware-recursion-detection": "3.620.0",
|
||||
@@ -878,26 +919,26 @@
|
||||
"@aws-sdk/util-user-agent-browser": "3.609.0",
|
||||
"@aws-sdk/util-user-agent-node": "3.614.0",
|
||||
"@smithy/config-resolver": "^3.0.5",
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/core": "^2.3.2",
|
||||
"@smithy/fetch-http-handler": "^3.2.4",
|
||||
"@smithy/hash-node": "^3.0.3",
|
||||
"@smithy/invalid-dependency": "^3.0.3",
|
||||
"@smithy/middleware-content-length": "^3.0.5",
|
||||
"@smithy/middleware-endpoint": "^3.1.0",
|
||||
"@smithy/middleware-retry": "^3.0.15",
|
||||
"@smithy/middleware-retry": "^3.0.14",
|
||||
"@smithy/middleware-serde": "^3.0.3",
|
||||
"@smithy/middleware-stack": "^3.0.3",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/url-parser": "^3.0.3",
|
||||
"@smithy/util-base64": "^3.0.0",
|
||||
"@smithy/util-body-length-browser": "^3.0.0",
|
||||
"@smithy/util-body-length-node": "^3.0.0",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.14",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.14",
|
||||
"@smithy/util-endpoints": "^2.0.5",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"@smithy/util-retry": "^3.0.3",
|
||||
@@ -908,19 +949,19 @@
|
||||
"node": ">=16.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@aws-sdk/client-sts": "^3.635.0"
|
||||
"@aws-sdk/client-sts": "^3.632.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/client-sts": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.635.0.tgz",
|
||||
"integrity": "sha512-Al2ytE69+cbA44qHlelqhzWwbURikfF13Zkal9utIG5Q6T2c7r8p6sePN92n8l/x1v0FhJ5VTxKak+cPTE0CZQ==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.632.0.tgz",
|
||||
"integrity": "sha512-Ss5cBH09icpTvT+jtGGuQlRdwtO7RyE9BF4ZV/CEPATdd9whtJt4Qxdya8BUnkWR7h5HHTrQHqai3YVYjku41A==",
|
||||
"dependencies": {
|
||||
"@aws-crypto/sha256-browser": "5.2.0",
|
||||
"@aws-crypto/sha256-js": "5.2.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.635.0",
|
||||
"@aws-sdk/core": "3.635.0",
|
||||
"@aws-sdk/credential-provider-node": "3.635.0",
|
||||
"@aws-sdk/client-sso-oidc": "3.632.0",
|
||||
"@aws-sdk/core": "3.629.0",
|
||||
"@aws-sdk/credential-provider-node": "3.632.0",
|
||||
"@aws-sdk/middleware-host-header": "3.620.0",
|
||||
"@aws-sdk/middleware-logger": "3.609.0",
|
||||
"@aws-sdk/middleware-recursion-detection": "3.620.0",
|
||||
@@ -931,26 +972,26 @@
|
||||
"@aws-sdk/util-user-agent-browser": "3.609.0",
|
||||
"@aws-sdk/util-user-agent-node": "3.614.0",
|
||||
"@smithy/config-resolver": "^3.0.5",
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/core": "^2.3.2",
|
||||
"@smithy/fetch-http-handler": "^3.2.4",
|
||||
"@smithy/hash-node": "^3.0.3",
|
||||
"@smithy/invalid-dependency": "^3.0.3",
|
||||
"@smithy/middleware-content-length": "^3.0.5",
|
||||
"@smithy/middleware-endpoint": "^3.1.0",
|
||||
"@smithy/middleware-retry": "^3.0.15",
|
||||
"@smithy/middleware-retry": "^3.0.14",
|
||||
"@smithy/middleware-serde": "^3.0.3",
|
||||
"@smithy/middleware-stack": "^3.0.3",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/url-parser": "^3.0.3",
|
||||
"@smithy/util-base64": "^3.0.0",
|
||||
"@smithy/util-body-length-browser": "^3.0.0",
|
||||
"@smithy/util-body-length-node": "^3.0.0",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.15",
|
||||
"@smithy/util-defaults-mode-browser": "^3.0.14",
|
||||
"@smithy/util-defaults-mode-node": "^3.0.14",
|
||||
"@smithy/util-endpoints": "^2.0.5",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"@smithy/util-retry": "^3.0.3",
|
||||
@@ -962,16 +1003,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/core": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.635.0.tgz",
|
||||
"integrity": "sha512-i1x/E/sgA+liUE1XJ7rj1dhyXpAKO1UKFUcTTHXok2ARjWTvszHnSXMOsB77aPbmn0fUp1JTx2kHUAZ1LVt5Bg==",
|
||||
"version": "3.629.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.629.0.tgz",
|
||||
"integrity": "sha512-+/ShPU/tyIBM3oY1cnjgNA/tFyHtlWq+wXF9xEKRv19NOpYbWQ+xzNwVjGq8vR07cCRqy/sDQLWPhxjtuV/FiQ==",
|
||||
"dependencies": {
|
||||
"@smithy/core": "^2.4.0",
|
||||
"@smithy/core": "^2.3.2",
|
||||
"@smithy/node-config-provider": "^3.1.4",
|
||||
"@smithy/property-provider": "^3.1.3",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/signature-v4": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/util-middleware": "^3.0.3",
|
||||
"fast-xml-parser": "4.4.1",
|
||||
@@ -996,16 +1037,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/credential-provider-http": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.635.0.tgz",
|
||||
"integrity": "sha512-iJyRgEjOCQlBMXqtwPLIKYc7Bsc6nqjrZybdMDenPDa+kmLg7xh8LxHsu9088e+2/wtLicE34FsJJIfzu3L82g==",
|
||||
"version": "3.622.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.622.0.tgz",
|
||||
"integrity": "sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==",
|
||||
"dependencies": {
|
||||
"@aws-sdk/types": "3.609.0",
|
||||
"@smithy/fetch-http-handler": "^3.2.4",
|
||||
"@smithy/node-http-handler": "^3.1.4",
|
||||
"@smithy/property-provider": "^3.1.3",
|
||||
"@smithy/protocol-http": "^4.1.0",
|
||||
"@smithy/smithy-client": "^3.2.0",
|
||||
"@smithy/smithy-client": "^3.1.12",
|
||||
"@smithy/types": "^3.3.0",
|
||||
"@smithy/util-stream": "^3.1.3",
|
||||
"tslib": "^2.6.2"
|
||||
@@ -1015,14 +1056,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/credential-provider-ini": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.635.0.tgz",
|
||||
"integrity": "sha512-+OqcNhhOFFY08YHLjO9/Y1n37RKAO7LADnsJ7VTXca7IfvYh27BVBn+FdlqnyEb1MQ5ArHTY4pq3pKRIg6RW4Q==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.632.0.tgz",
|
||||
"integrity": "sha512-m6epoW41xa1ajU5OiHcmQHoGVtrbXBaRBOUhlCLZmcaqMLYsboM4iD/WZP8aatKEON5tTnVXh/4StV8D/+wemw==",
|
||||
"dependencies": {
|
||||
"@aws-sdk/credential-provider-env": "3.620.1",
|
||||
"@aws-sdk/credential-provider-http": "3.635.0",
|
||||
"@aws-sdk/credential-provider-http": "3.622.0",
|
||||
"@aws-sdk/credential-provider-process": "3.620.1",
|
||||
"@aws-sdk/credential-provider-sso": "3.635.0",
|
||||
"@aws-sdk/credential-provider-sso": "3.632.0",
|
||||
"@aws-sdk/credential-provider-web-identity": "3.621.0",
|
||||
"@aws-sdk/types": "3.609.0",
|
||||
"@smithy/credential-provider-imds": "^3.2.0",
|
||||
@@ -1035,19 +1076,19 @@
|
||||
"node": ">=16.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@aws-sdk/client-sts": "^3.635.0"
|
||||
"@aws-sdk/client-sts": "^3.632.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/credential-provider-node": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.635.0.tgz",
|
||||
"integrity": "sha512-bmd23mnb94S6AxmWPgqJTnvT9ONKlTx7EPafE1RNO+vUl6mHih4iyqX6ZPaRcSfaPx4U1R7H1RM8cSnafXgaBg==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.632.0.tgz",
|
||||
"integrity": "sha512-cL8fuJWm/xQBO4XJPkeuZzl3XinIn9EExWgzpG48NRMKR5us1RI/ucv7xFbBBaG+r/sDR2HpYBIA3lVIpm1H3Q==",
|
||||
"dependencies": {
|
||||
"@aws-sdk/credential-provider-env": "3.620.1",
|
||||
"@aws-sdk/credential-provider-http": "3.635.0",
|
||||
"@aws-sdk/credential-provider-ini": "3.635.0",
|
||||
"@aws-sdk/credential-provider-http": "3.622.0",
|
||||
"@aws-sdk/credential-provider-ini": "3.632.0",
|
||||
"@aws-sdk/credential-provider-process": "3.620.1",
|
||||
"@aws-sdk/credential-provider-sso": "3.635.0",
|
||||
"@aws-sdk/credential-provider-sso": "3.632.0",
|
||||
"@aws-sdk/credential-provider-web-identity": "3.621.0",
|
||||
"@aws-sdk/types": "3.609.0",
|
||||
"@smithy/credential-provider-imds": "^3.2.0",
|
||||
@@ -1076,11 +1117,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-sdk/credential-provider-sso": {
|
||||
"version": "3.635.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.635.0.tgz",
|
||||
"integrity": "sha512-hO/fKyvUaGpK9zyvCnmJz70EputvGWDr2UTOn/RzvcR6UB4yXoFf0QcCMubEsE3v67EsAv6PadgOeJ0vz6IazA==",
|
||||
"version": "3.632.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.632.0.tgz",
|
||||
"integrity": "sha512-P/4wB6j7ym5QCPTL2xlMfvf2NcXSh+z0jmsZP4WW/tVwab4hvgabPPbLeEZDSWZ0BpgtxKGvRq0GSHuGeirQbA==",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-sso": "3.635.0",
|
||||
"@aws-sdk/client-sso": "3.632.0",
|
||||
"@aws-sdk/token-providers": "3.614.0",
|
||||
"@aws-sdk/types": "3.609.0",
|
||||
"@smithy/property-provider": "^3.1.3",
|
||||
@@ -3723,6 +3764,60 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@elastic/elasticsearch": {
|
||||
"version": "8.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.15.0.tgz",
|
||||
"integrity": "sha512-mG90EMdTDoT6GFSdqpUAhWK9LGuiJo6tOWqs0Usd/t15mPQDj7ZqHXfCBqNkASZpwPZpbAYVjd57S6nbUBINCg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@elastic/transport": "^8.7.0",
|
||||
"tslib": "^2.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@elastic/transport": {
|
||||
"version": "8.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@elastic/transport/-/transport-8.7.1.tgz",
|
||||
"integrity": "sha512-2eeMVkz57Ayxv+UAZkIKzzrUu7nm96jr3+N3kLfbBqALYe2jwDpLr9pR0jc/x9HyJKAM909YGaNlHFDZeb0+Mw==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@opentelemetry/api": "1.x",
|
||||
"debug": "^4.3.4",
|
||||
"hpagent": "^1.0.0",
|
||||
"ms": "^2.1.3",
|
||||
"secure-json-parse": "^2.4.0",
|
||||
"tslib": "^2.4.0",
|
||||
"undici": "^6.12.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@elastic/transport/node_modules/debug": {
|
||||
"version": "4.3.6",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz",
|
||||
"integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@elastic/transport/node_modules/debug/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@esbuild/aix-ppc64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz",
|
||||
@@ -5219,6 +5314,15 @@
|
||||
"resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.1.0.tgz",
|
||||
"integrity": "sha512-y92CpG4kFFtBBjni8LHoV12IegJ+KFxLgKRengrVjKmGE5XMeCuGvlfRe75lTRrgXaG6XIWJlFpIDTlkoJsU8w=="
|
||||
},
|
||||
"node_modules/@opentelemetry/api": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
|
||||
"integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@peculiar/asn1-cms": {
|
||||
"version": "2.3.8",
|
||||
"resolved": "https://registry.npmjs.org/@peculiar/asn1-cms/-/asn1-cms-2.3.8.tgz",
|
||||
@@ -11287,12 +11391,6 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/globrex": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz",
|
||||
"integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/google-auth-library": {
|
||||
"version": "9.9.0",
|
||||
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.9.0.tgz",
|
||||
@@ -11584,6 +11682,15 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/hpagent": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/hpagent/-/hpagent-1.2.0.tgz",
|
||||
"integrity": "sha512-A91dYTeIB6NoXG+PxTQpCCDDnfHsW9kc06Lvpu1TEe9gnd6ZFeiBoRO9JvzEv6xK7EX97/dUE8g/vBMTqTS3CA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/http-errors": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
|
||||
@@ -13040,12 +13147,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/micromatch": {
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
|
||||
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
|
||||
"version": "4.0.8",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
|
||||
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"braces": "^3.0.2",
|
||||
"braces": "^3.0.3",
|
||||
"picomatch": "^2.3.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -15495,6 +15602,34 @@
|
||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz",
|
||||
"integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA=="
|
||||
},
|
||||
"node_modules/scim-patch": {
|
||||
"version": "0.8.3",
|
||||
"resolved": "https://registry.npmjs.org/scim-patch/-/scim-patch-0.8.3.tgz",
|
||||
"integrity": "sha512-3d0wD4THAt03Zgi08kCQwc+lvPJ2v4wwk41b0xViVa4gLYSgRUCmGkJNBsaE+yoKg0fufTOJCcSrufZaqYn/og==",
|
||||
"dependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"fast-deep-equal": "3.1.3",
|
||||
"scim2-parse-filter": "0.2.10"
|
||||
}
|
||||
},
|
||||
"node_modules/scim-patch/node_modules/@types/node": {
|
||||
"version": "22.5.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.1.tgz",
|
||||
"integrity": "sha512-KkHsxej0j9IW1KKOOAA/XBA0z08UFSrRQHErzEfA3Vgq57eXIMYboIlHJuYIfd+lwCQjtKqUu3UnmKbtUc9yRw==",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.19.2"
|
||||
}
|
||||
},
|
||||
"node_modules/scim-patch/node_modules/undici-types": {
|
||||
"version": "6.19.8",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
|
||||
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="
|
||||
},
|
||||
"node_modules/scim2-parse-filter": {
|
||||
"version": "0.2.10",
|
||||
"resolved": "https://registry.npmjs.org/scim2-parse-filter/-/scim2-parse-filter-0.2.10.tgz",
|
||||
"integrity": "sha512-k5TgGSuQEbR4jXRgw/GPAYVL9fMp1pWA2abLF5z3q9IGWSuZTqbrZBOSUezvc+rtViXr+czSZjg3eAN4QSTvxQ=="
|
||||
},
|
||||
"node_modules/secure-json-parse": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz",
|
||||
@@ -16573,26 +16708,6 @@
|
||||
"node": "^12.20.0 || >=14"
|
||||
}
|
||||
},
|
||||
"node_modules/tsconfck": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/tsconfck/-/tsconfck-2.1.2.tgz",
|
||||
"integrity": "sha512-ghqN1b0puy3MhhviwO2kGF8SeMDNhEbnKxjK7h6+fvY9JAxqvXi8y5NAHSQv687OVboS2uZIByzGd45/YxrRHg==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"tsconfck": "bin/tsconfck.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^14.13.1 || ^16 || >=18"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^4.3.5 || ^5.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/tsconfig-paths": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz",
|
||||
@@ -17412,6 +17527,15 @@
|
||||
"integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/undici": {
|
||||
"version": "6.19.8",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-6.19.8.tgz",
|
||||
"integrity": "sha512-U8uCCl2x9TK3WANvmBavymRzxbfFYG+tAu+fgx3zxQy3qdagQqBLwJVrdyO1TBfUXvfKveMKJZhpvUYoOjM+4g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.17"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "5.26.5",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
|
||||
@@ -17749,48 +17873,6 @@
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/vite-tsconfig-paths": {
|
||||
"version": "4.2.2",
|
||||
"resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-4.2.2.tgz",
|
||||
"integrity": "sha512-dq0FjyxHHDnp0uS3P12WEOX2W7NeuLzX9AWP38D7Zw2CTbFErapwQVlCiT5DMJcVWKQ1MMdTe92PZl/rBQ7qcw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"globrex": "^0.1.2",
|
||||
"tsconfck": "^2.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"vite": "*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"vite": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/vite-tsconfig-paths/node_modules/debug": {
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/vite-tsconfig-paths/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/android-arm": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz",
|
||||
|
@@ -50,6 +50,7 @@
|
||||
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@@ -102,7 +103,6 @@
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -112,6 +112,7 @@
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
@@ -176,6 +177,8 @@
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
@@ -7,6 +7,7 @@ import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-se
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
@@ -36,7 +37,6 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/services/certificate-est/certificate-est-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
|
@@ -115,7 +115,14 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
@@ -147,13 +154,27 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
committerId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
statusChangeBy: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
@@ -177,8 +198,20 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalRequest,
|
||||
`${TableName.SecretApprovalRequest}.id`,
|
||||
`${TableName.SecretApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member").notNullable().alter();
|
||||
|
@@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (!doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("password").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("password");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@@ -21,6 +21,7 @@ export const SecretSharingSchema = z.object({
|
||||
expiresAfterViews: z.number().nullable().optional(),
|
||||
accessType: z.string().default("anyone"),
|
||||
name: z.string().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
lastViewedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
|
@@ -17,11 +17,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().default("/"),
|
||||
environment: z.string(),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
@@ -127,11 +127,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.trim()
|
||||
.optional()
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
|
@@ -1,6 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// TODO(akhilmhdh): Fix this when licence service gets it type
|
||||
// TODO(akhilmhdh): Fix this when license service gets it type
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
@@ -5,22 +5,47 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
if (!strBody) {
|
||||
done(null, undefined);
|
||||
return;
|
||||
}
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
const ScimUserSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
});
|
||||
|
||||
const ScimGroupSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
});
|
||||
|
||||
export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/scim-tokens",
|
||||
method: "POST",
|
||||
@@ -127,25 +152,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
),
|
||||
Resources: z.array(ScimUserSchema),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@@ -173,30 +180,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
201: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@@ -216,10 +200,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
@@ -229,28 +215,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
// displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
active: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@@ -260,8 +228,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
externalId: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
@@ -291,6 +259,116 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
active: req.body?.active,
|
||||
email: primaryEmail,
|
||||
externalId: req.body.userName
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PATCH",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any().optional()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.updateScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return user;
|
||||
}
|
||||
});
|
||||
server.route({
|
||||
url: "/Groups",
|
||||
method: "POST",
|
||||
@@ -305,25 +383,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional() // okta-specific
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@@ -344,26 +407,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
querystring: z.object({
|
||||
startIndex: z.coerce.number().default(1),
|
||||
count: z.coerce.number().default(20),
|
||||
filter: z.string().trim().optional()
|
||||
filter: z.string().trim().optional(),
|
||||
excludedAttributes: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
),
|
||||
Resources: z.array(ScimGroupSchema),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@@ -377,7 +426,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgId: req.permission.orgId,
|
||||
startIndex: req.query.startIndex,
|
||||
filter: req.query.filter,
|
||||
limit: req.query.count
|
||||
limit: req.query.count,
|
||||
isMembersExcluded: req.query.excludedAttributes === "members"
|
||||
});
|
||||
|
||||
return groups;
|
||||
@@ -392,20 +442,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@@ -414,6 +451,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@@ -437,25 +475,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePut({
|
||||
const group = await req.server.services.scim.replaceScimGroup({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
...req.body
|
||||
@@ -476,55 +501,35 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
schemas: z.array(z.string()),
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("replace"), z.literal("Replace")]),
|
||||
value: z.object({
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim()
|
||||
})
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim()
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add")]),
|
||||
path: z.string().trim(),
|
||||
value: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim().optional()
|
||||
})
|
||||
)
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePatch({
|
||||
const group = await req.server.services.scim.updateScimGroup({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@@ -550,60 +555,4 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
return group;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
active: req.body.active
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -44,7 +44,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
approverUserIds,
|
||||
approvers,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
@@ -52,7 +52,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
if (approvals > approverUserIds.length)
|
||||
if (approvals > approvers.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@@ -76,7 +76,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: approverUserIds
|
||||
userIds: approvers
|
||||
});
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
@@ -91,7 +91,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approvers.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
@@ -128,7 +128,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
const updateAccessApprovalPolicy = async ({
|
||||
policyId,
|
||||
approverUserIds,
|
||||
approvers,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@@ -161,7 +161,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (approverUserIds) {
|
||||
if (approvers) {
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
@@ -169,12 +169,12 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: approverUserIds
|
||||
userIds: approvers
|
||||
});
|
||||
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approvers.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
|
@@ -17,7 +17,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
approverUserIds: string[];
|
||||
approvers: string[];
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
@@ -26,7 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
policyId: string;
|
||||
approvals?: number;
|
||||
approverUserIds?: string[];
|
||||
approvers?: string[];
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
|
@@ -5,6 +5,7 @@ import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@@ -62,7 +63,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
@@ -84,7 +87,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@@ -1,16 +1,17 @@
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { getCaCertChain, getCaCertChains } from "@app/services/certificate-authority/certificate-authority-fns";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateDALFactory } from "@app/services/certificate-template/certificate-template-dal";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { isCertChainValid } from "../certificate/certificate-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "../certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "../certificate-authority/certificate-authority-dal";
|
||||
import { getCaCertChain, getCaCertChains } from "../certificate-authority/certificate-authority-fns";
|
||||
import { TCertificateAuthorityServiceFactory } from "../certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateDALFactory } from "../certificate-template/certificate-template-dal";
|
||||
import { TCertificateTemplateServiceFactory } from "../certificate-template/certificate-template-service";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { convertRawCertsToPkcs7 } from "./certificate-est-fns";
|
||||
|
||||
type TCertificateEstServiceFactoryDep = {
|
||||
@@ -21,6 +22,7 @@ type TCertificateEstServiceFactoryDep = {
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "find" | "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateEstServiceFactory = ReturnType<typeof certificateEstServiceFactory>;
|
||||
@@ -32,7 +34,8 @@ export const certificateEstServiceFactory = ({
|
||||
certificateAuthorityCertDAL,
|
||||
certificateAuthorityDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
licenseService
|
||||
}: TCertificateEstServiceFactoryDep) => {
|
||||
const simpleReenroll = async ({
|
||||
csr,
|
||||
@@ -48,6 +51,14 @@ export const certificateEstServiceFactory = ({
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to perform EST operation - simpleReenroll due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
@@ -146,6 +157,14 @@ export const certificateEstServiceFactory = ({
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to perform EST operation - simpleEnroll due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
@@ -196,6 +215,24 @@ export const certificateEstServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const estConfig = await certificateTemplateService.getEstConfiguration({
|
||||
isInternal: true,
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to perform EST operation - caCerts due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
});
|
||||
}
|
||||
|
||||
const ca = await certificateAuthorityDAL.findById(certTemplate.caId);
|
||||
if (!ca) {
|
||||
throw new NotFoundError({
|
@@ -0,0 +1,126 @@
|
||||
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const ElasticSearchDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
})
|
||||
},
|
||||
auth: {
|
||||
...(providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey
|
||||
? {
|
||||
apiKey: {
|
||||
api_key: providerInputs.auth.apiKey,
|
||||
id: providerInputs.auth.apiKeyId
|
||||
}
|
||||
}
|
||||
: {
|
||||
username: providerInputs.auth.username,
|
||||
password: providerInputs.auth.password
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
return connection;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@@ -1,7 +1,9 @@
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchDatabaseProvider } from "./elastic-search";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
|
||||
@@ -10,5 +12,7 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),
|
||||
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider()
|
||||
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider(),
|
||||
[DynamicSecretProviders.MongoAtlas]: MongoAtlasProvider(),
|
||||
[DynamicSecretProviders.ElasticSearch]: ElasticSearchDatabaseProvider()
|
||||
});
|
||||
|
@@ -7,6 +7,11 @@ export enum SqlProviders {
|
||||
MsSQL = "mssql"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
User = "user",
|
||||
ApiKey = "api-key"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
@@ -30,6 +35,28 @@ export const DynamicSecretAwsElastiCacheSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretElasticSearchSchema = z.object({
|
||||
host: z.string().trim().min(1),
|
||||
port: z.number(),
|
||||
roles: z.array(z.string().trim().min(1)).min(1),
|
||||
|
||||
// two auth types "user, apikey"
|
||||
auth: z.discriminatedUnion("type", [
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.User),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.ApiKey),
|
||||
apiKey: z.string().trim(),
|
||||
apiKeyId: z.string().trim()
|
||||
})
|
||||
]),
|
||||
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
client: z.nativeEnum(SqlProviders),
|
||||
host: z.string().trim().toLowerCase(),
|
||||
@@ -67,12 +94,51 @@ export const DynamicSecretAwsIamSchema = z.object({
|
||||
policyArns: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretMongoAtlasSchema = z.object({
|
||||
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
|
||||
adminPrivateKey: z.string().trim().min(1).describe("Admin user private api key"),
|
||||
groupId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.describe("Unique 24-hexadecimal digit string that identifies your project. This is same as project id"),
|
||||
roles: z
|
||||
.object({
|
||||
collectionName: z.string().optional().describe("Collection on which this role applies."),
|
||||
databaseName: z.string().min(1).describe("Database to which the user is granted access privileges."),
|
||||
roleName: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
' Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
|
||||
)
|
||||
})
|
||||
.array()
|
||||
.min(1),
|
||||
scopes: z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
"Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access."
|
||||
),
|
||||
type: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe("Category of resource that this database user can access. Enum: CLUSTER, DATA_LAKE, STREAM")
|
||||
})
|
||||
.array()
|
||||
});
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
AwsIam = "aws-iam",
|
||||
Redis = "redis",
|
||||
AwsElastiCache = "aws-elasticache"
|
||||
AwsElastiCache = "aws-elasticache",
|
||||
MongoAtlas = "mongo-db-atlas",
|
||||
ElasticSearch = "elastic-search"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
@@ -80,7 +146,9 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
146
backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoAtlasSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
Accept: "application/vnd.atlas.2023-02-01+json",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
const digestAuth = createDigestAuthRequestInterceptor(
|
||||
client,
|
||||
providerInputs.adminPublicKey,
|
||||
providerInputs.adminPrivateKey
|
||||
);
|
||||
return digestAuth;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
})
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
method: "GET",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((err) => {
|
||||
if ((err as AxiosError).response?.status === 404) return false;
|
||||
throw err;
|
||||
});
|
||||
if (isExisting) {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable no-console */
|
||||
import handlebars from "handlebars";
|
||||
import { Redis } from "ioredis";
|
||||
import { customAlphabet } from "nanoid";
|
||||
|
@@ -26,8 +26,10 @@ export const getDefaultOnPremFeatures = () => {
|
||||
status: null,
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
secretApproval: false,
|
||||
secretApproval: true,
|
||||
secretRotation: true,
|
||||
caCrl: false
|
||||
};
|
||||
};
|
||||
|
||||
export const setupLicenseRequestWithStore = () => {};
|
@@ -45,18 +45,19 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
readLimit: 60,
|
||||
writeLimit: 200,
|
||||
secretsLimit: 40
|
||||
}
|
||||
},
|
||||
pkiEst: false
|
||||
});
|
||||
|
||||
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
let token: string;
|
||||
const licenceReq = axios.create({
|
||||
const licenseReq = axios.create({
|
||||
baseURL,
|
||||
timeout: 35 * 1000
|
||||
// signal: AbortSignal.timeout(60 * 1000)
|
||||
});
|
||||
|
||||
const refreshLicence = async () => {
|
||||
const refreshLicense = async () => {
|
||||
const appCfg = getConfig();
|
||||
const {
|
||||
data: { token: authToken }
|
||||
@@ -74,7 +75,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
return token;
|
||||
};
|
||||
|
||||
licenceReq.interceptors.request.use(
|
||||
licenseReq.interceptors.request.use(
|
||||
(config) => {
|
||||
if (token && config.headers) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
@@ -85,7 +86,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(err) => Promise.reject(err)
|
||||
);
|
||||
|
||||
licenceReq.interceptors.response.use(
|
||||
licenseReq.interceptors.response.use(
|
||||
(response) => response,
|
||||
async (err) => {
|
||||
const originalRequest = (err as AxiosError).config;
|
||||
@@ -96,15 +97,15 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(originalRequest as any)._retry = true; // injected
|
||||
|
||||
// refresh
|
||||
await refreshLicence();
|
||||
await refreshLicense();
|
||||
|
||||
licenceReq.defaults.headers.common.Authorization = `Bearer ${token}`;
|
||||
return licenceReq(originalRequest!);
|
||||
licenseReq.defaults.headers.common.Authorization = `Bearer ${token}`;
|
||||
return licenseReq(originalRequest!);
|
||||
}
|
||||
|
||||
return Promise.reject(err);
|
||||
}
|
||||
);
|
||||
|
||||
return { request: licenceReq, refreshLicence };
|
||||
return { request: licenseReq, refreshLicense };
|
||||
};
|
@@ -16,8 +16,8 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { getDefaultOnPremFeatures, setupLicenceRequestWithStore } from "./licence-fns";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
InstanceType,
|
||||
TAddOrgPmtMethodDTO,
|
||||
@@ -64,13 +64,13 @@ export const licenseServiceFactory = ({
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
|
||||
const appCfg = getConfig();
|
||||
const licenseServerCloudApi = setupLicenceRequestWithStore(
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
appCfg.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_CLOUD_LOGIN,
|
||||
appCfg.LICENSE_SERVER_KEY || ""
|
||||
);
|
||||
|
||||
const licenseServerOnPremApi = setupLicenceRequestWithStore(
|
||||
const licenseServerOnPremApi = setupLicenseRequestWithStore(
|
||||
appCfg.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_ON_PREM_LOGIN,
|
||||
appCfg.LICENSE_KEY || ""
|
||||
@@ -79,7 +79,7 @@ export const licenseServiceFactory = ({
|
||||
const init = async () => {
|
||||
try {
|
||||
if (appCfg.LICENSE_SERVER_KEY) {
|
||||
const token = await licenseServerCloudApi.refreshLicence();
|
||||
const token = await licenseServerCloudApi.refreshLicense();
|
||||
if (token) instanceType = InstanceType.Cloud;
|
||||
logger.info(`Instance type: ${InstanceType.Cloud}`);
|
||||
isValidLicense = true;
|
||||
@@ -87,7 +87,7 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
|
||||
if (appCfg.LICENSE_KEY) {
|
||||
const token = await licenseServerOnPremApi.refreshLicence();
|
||||
const token = await licenseServerOnPremApi.refreshLicense();
|
||||
if (token) {
|
||||
const {
|
||||
data: { currentPlan }
|
||||
|
@@ -63,6 +63,7 @@ export type TFeatureSet = {
|
||||
writeLimit: number;
|
||||
secretsLimit: number;
|
||||
};
|
||||
pkiEst: boolean;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@@ -44,19 +44,18 @@ export const buildScimUser = ({
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
groups = [],
|
||||
active
|
||||
active,
|
||||
createdAt,
|
||||
updatedAt
|
||||
}: {
|
||||
orgMembershipId: string;
|
||||
username: string;
|
||||
email?: string | null;
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
groups?: {
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
active: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}): TScimUser => {
|
||||
const scimUser = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
@@ -78,10 +77,10 @@ export const buildScimUser = ({
|
||||
]
|
||||
: [],
|
||||
active,
|
||||
groups,
|
||||
meta: {
|
||||
resourceType: "User",
|
||||
location: null
|
||||
created: createdAt,
|
||||
lastModified: updatedAt
|
||||
}
|
||||
};
|
||||
|
||||
@@ -109,14 +108,18 @@ export const buildScimGroupList = ({
|
||||
export const buildScimGroup = ({
|
||||
groupId,
|
||||
name,
|
||||
members
|
||||
members,
|
||||
updatedAt,
|
||||
createdAt
|
||||
}: {
|
||||
groupId: string;
|
||||
name: string;
|
||||
members: {
|
||||
value: string;
|
||||
display: string;
|
||||
display?: string;
|
||||
}[];
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}): TScimGroup => {
|
||||
const scimGroup = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
@@ -125,7 +128,8 @@ export const buildScimGroup = ({
|
||||
members,
|
||||
meta: {
|
||||
resourceType: "Group",
|
||||
location: null
|
||||
created: createdAt,
|
||||
lastModified: updatedAt
|
||||
}
|
||||
};
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { scimPatch } from "scim-patch";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, TableName, TOrgMemberships, TUsers } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
@@ -9,7 +10,6 @@ import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-grou
|
||||
import { TScimDALFactory } from "@app/ee/services/scim/scim-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
@@ -32,14 +32,7 @@ import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import {
|
||||
buildScimGroup,
|
||||
buildScimGroupList,
|
||||
buildScimUser,
|
||||
buildScimUserList,
|
||||
extractScimValueFromPath,
|
||||
parseScimFilter
|
||||
} from "./scim-fns";
|
||||
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns";
|
||||
import {
|
||||
TCreateScimGroupDTO,
|
||||
TCreateScimTokenDTO,
|
||||
@@ -64,12 +57,18 @@ type TScimServiceFactoryDep = {
|
||||
scimDAL: Pick<TScimDALFactory, "create" | "find" | "findById" | "deleteById">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById"
|
||||
"find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById" | "updateById"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "findOne" | "create" | "delete">;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "findOne" | "create" | "delete" | "update">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction" | "updateMembershipById"
|
||||
| "createMembership"
|
||||
| "findById"
|
||||
| "findMembership"
|
||||
| "findMembershipWithScimFilter"
|
||||
| "deleteMembershipById"
|
||||
| "transaction"
|
||||
| "updateMembershipById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "findOne" | "create" | "updateById" | "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser">;
|
||||
@@ -193,7 +192,12 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
// SCIM server endpoints
|
||||
const listScimUsers = async ({ startIndex, limit, filter, orgId }: TListScimUsersDTO): Promise<TListScimUsers> => {
|
||||
const listScimUsers = async ({
|
||||
startIndex = 0,
|
||||
limit = 100,
|
||||
filter,
|
||||
orgId
|
||||
}: TListScimUsersDTO): Promise<TListScimUsers> => {
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org.scimEnabled)
|
||||
@@ -207,23 +211,20 @@ export const scimServiceFactory = ({
|
||||
...(limit && { limit })
|
||||
};
|
||||
|
||||
const users = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId,
|
||||
...parseScimFilter(filter)
|
||||
},
|
||||
findOpts
|
||||
);
|
||||
const users = await orgDAL.findMembershipWithScimFilter(orgId, filter, findOpts);
|
||||
|
||||
const scimUsers = users.map(({ id, externalId, username, firstName, lastName, email, isActive }) =>
|
||||
buildScimUser({
|
||||
orgMembershipId: id ?? "",
|
||||
username: externalId ?? username,
|
||||
firstName: firstName ?? "",
|
||||
lastName: lastName ?? "",
|
||||
email,
|
||||
active: isActive
|
||||
})
|
||||
const scimUsers = users.map(
|
||||
({ id, externalId, username, firstName, lastName, email, isActive, createdAt, updatedAt }) =>
|
||||
buildScimUser({
|
||||
orgMembershipId: id ?? "",
|
||||
username: externalId ?? username,
|
||||
firstName: firstName ?? "",
|
||||
lastName: lastName ?? "",
|
||||
email,
|
||||
active: isActive,
|
||||
createdAt,
|
||||
updatedAt
|
||||
})
|
||||
);
|
||||
|
||||
return buildScimUserList({
|
||||
@@ -258,11 +259,6 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
const groupMembershipsInOrg = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(
|
||||
membership.userId,
|
||||
orgId
|
||||
);
|
||||
|
||||
return buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
@@ -270,10 +266,8 @@ export const scimServiceFactory = ({
|
||||
firstName: membership.firstName,
|
||||
lastName: membership.lastName,
|
||||
active: membership.isActive,
|
||||
groups: groupMembershipsInOrg.map((group) => ({
|
||||
value: group.groupId,
|
||||
display: group.groupName
|
||||
}))
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@@ -322,7 +316,7 @@ export const scimServiceFactory = ({
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
role: OrgMembershipRole.NoAccess,
|
||||
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
isActive: true
|
||||
},
|
||||
@@ -349,7 +343,11 @@ export const scimServiceFactory = ({
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
const uniqueUsername = await normalizeUsername(`${firstName}-${lastName}`, userDAL);
|
||||
const uniqueUsername = await normalizeUsername(
|
||||
// external id is username
|
||||
`${firstName}-${lastName}`,
|
||||
userDAL
|
||||
);
|
||||
user = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
|
||||
@@ -430,10 +428,13 @@ export const scimServiceFactory = ({
|
||||
firstName: createdUser.firstName,
|
||||
lastName: createdUser.lastName,
|
||||
email: createdUser.email ?? "",
|
||||
active: createdOrgMembership.isActive
|
||||
active: createdOrgMembership.isActive,
|
||||
createdAt: createdOrgMembership.createdAt,
|
||||
updatedAt: createdOrgMembership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
// partial
|
||||
const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
@@ -459,37 +460,52 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
let active = true;
|
||||
|
||||
operations.forEach((operation) => {
|
||||
if (operation.op.toLowerCase() === "replace") {
|
||||
if (operation.path === "active" && operation.value === "False") {
|
||||
// azure scim op format
|
||||
active = false;
|
||||
} else if (typeof operation.value === "object" && operation.value.active === false) {
|
||||
// okta scim op format
|
||||
active = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!active) {
|
||||
await orgMembershipDAL.updateById(membership.id, {
|
||||
isActive: false
|
||||
});
|
||||
}
|
||||
|
||||
return buildScimUser({
|
||||
const scimUser = buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName,
|
||||
lastName: membership.lastName,
|
||||
active
|
||||
firstName: membership.firstName,
|
||||
active: membership.isActive,
|
||||
username: membership.externalId ?? membership.username,
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
scimPatch(scimUser, operations);
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
{
|
||||
isActive: scimUser.active
|
||||
},
|
||||
tx
|
||||
);
|
||||
const hasEmailChanged = scimUser.emails[0].value !== membership.email;
|
||||
await userDAL.updateById(
|
||||
membership.userId,
|
||||
{
|
||||
firstName: scimUser.name.givenName,
|
||||
email: scimUser.emails[0].value,
|
||||
lastName: scimUser.name.familyName,
|
||||
isEmailVerified: hasEmailChanged ? serverCfg.trustSamlEmails : true
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
return scimUser;
|
||||
};
|
||||
|
||||
const replaceScimUser = async ({ orgMembershipId, active, orgId }: TReplaceScimUserDTO) => {
|
||||
const replaceScimUser = async ({
|
||||
orgMembershipId,
|
||||
active,
|
||||
orgId,
|
||||
lastName,
|
||||
firstName,
|
||||
email,
|
||||
externalId
|
||||
}: TReplaceScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@@ -514,26 +530,47 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
await orgMembershipDAL.updateById(membership.id, {
|
||||
isActive: active
|
||||
const serverCfg = await getServerCfg();
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await userAliasDAL.update(
|
||||
{
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML,
|
||||
userId: membership.userId
|
||||
},
|
||||
{
|
||||
externalId
|
||||
},
|
||||
tx
|
||||
);
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
{
|
||||
isActive: active
|
||||
},
|
||||
tx
|
||||
);
|
||||
await userDAL.updateById(
|
||||
membership.userId,
|
||||
{
|
||||
firstName,
|
||||
email,
|
||||
lastName,
|
||||
isEmailVerified: serverCfg.trustSamlEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const groupMembershipsInOrg = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(
|
||||
membership.userId,
|
||||
orgId
|
||||
);
|
||||
|
||||
return buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
username: externalId,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName,
|
||||
lastName: membership.lastName,
|
||||
active,
|
||||
groups: groupMembershipsInOrg.map((group) => ({
|
||||
value: group.groupId,
|
||||
display: group.groupName
|
||||
}))
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@@ -570,7 +607,7 @@ export const scimServiceFactory = ({
|
||||
return {}; // intentionally return empty object upon success
|
||||
};
|
||||
|
||||
const listScimGroups = async ({ orgId, startIndex, limit, filter }: TListScimGroupsDTO) => {
|
||||
const listScimGroups = async ({ orgId, startIndex, limit, filter, isMembersExcluded }: TListScimGroupsDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
@@ -603,6 +640,21 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
|
||||
const scimGroups: TScimGroup[] = [];
|
||||
if (isMembersExcluded) {
|
||||
return buildScimGroupList({
|
||||
scimGroups: groups.map((group) =>
|
||||
buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: [],
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
})
|
||||
),
|
||||
startIndex,
|
||||
limit
|
||||
});
|
||||
}
|
||||
|
||||
for await (const group of groups) {
|
||||
const members = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
@@ -612,7 +664,9 @@ export const scimServiceFactory = ({
|
||||
members: members.map((member) => ({
|
||||
value: member.orgMembershipId,
|
||||
display: `${member.firstName ?? ""} ${member.lastName ?? ""}`
|
||||
}))
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
scimGroups.push(scimGroup);
|
||||
}
|
||||
@@ -696,7 +750,9 @@ export const scimServiceFactory = ({
|
||||
members: orgMemberships.map(({ id, firstName, lastName }) => ({
|
||||
value: id,
|
||||
display: `${firstName} ${lastName}`
|
||||
}))
|
||||
})),
|
||||
createdAt: newGroup.group.createdAt,
|
||||
updatedAt: newGroup.group.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@@ -739,31 +795,17 @@ export const scimServiceFactory = ({
|
||||
members: orgMemberships.map(({ id, firstName, lastName }) => ({
|
||||
value: id,
|
||||
display: `${firstName} ${lastName}`
|
||||
}))
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
const updateScimGroupNamePut = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const $replaceGroupDAL = async (
|
||||
groupId: string,
|
||||
orgId: string,
|
||||
{ displayName, members = [] }: { displayName: string; members: { value: string }[] }
|
||||
) => {
|
||||
const updatedGroup = await groupDAL.transaction(async (tx) => {
|
||||
const [group] = await groupDAL.update(
|
||||
{
|
||||
@@ -782,74 +824,96 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (members) {
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: members.map((member) => member.value)
|
||||
}
|
||||
const orgMemberships = members.length
|
||||
? await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: members.map((member) => member.value)
|
||||
}
|
||||
})
|
||||
: [];
|
||||
|
||||
const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId));
|
||||
const userGroupMembers = await userGroupMembershipDAL.find({
|
||||
groupId: group.id
|
||||
});
|
||||
const directMemberUserIds = userGroupMembers.filter((el) => !el.isPending).map((membership) => membership.userId);
|
||||
|
||||
const pendingGroupAdditionsUserIds = userGroupMembers
|
||||
.filter((el) => el.isPending)
|
||||
.map((pendingGroupAddition) => pendingGroupAddition.userId);
|
||||
|
||||
const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds);
|
||||
const allMembersUserIdsSet = new Set(allMembersUserIds);
|
||||
|
||||
const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string));
|
||||
const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId));
|
||||
|
||||
if (toAddUserIds.length) {
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: toAddUserIds.map((member) => member.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId));
|
||||
|
||||
const directMemberUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
isPending: false
|
||||
})
|
||||
).map((membership) => membership.userId);
|
||||
|
||||
const pendingGroupAdditionsUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
isPending: true
|
||||
})
|
||||
).map((pendingGroupAddition) => pendingGroupAddition.userId);
|
||||
|
||||
const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds);
|
||||
const allMembersUserIdsSet = new Set(allMembersUserIds);
|
||||
|
||||
const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string));
|
||||
const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId));
|
||||
|
||||
if (toAddUserIds.length) {
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: toAddUserIds.map((member) => member.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
if (toRemoveUserIds.length) {
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: toRemoveUserIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
if (toRemoveUserIds.length) {
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: toRemoveUserIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
return group;
|
||||
});
|
||||
|
||||
return updatedGroup;
|
||||
};
|
||||
|
||||
const replaceScimGroup = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const updatedGroup = await $replaceGroupDAL(groupId, orgId, { displayName, members });
|
||||
|
||||
return buildScimGroup({
|
||||
groupId: updatedGroup.id,
|
||||
name: updatedGroup.name,
|
||||
members
|
||||
members,
|
||||
updatedAt: updatedGroup.updatedAt,
|
||||
createdAt: updatedGroup.createdAt
|
||||
});
|
||||
};
|
||||
|
||||
const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
|
||||
const updateScimGroup = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
@@ -871,7 +935,7 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
let group = await groupDAL.findOne({
|
||||
const group = await groupDAL.findOne({
|
||||
id: groupId,
|
||||
orgId
|
||||
});
|
||||
@@ -883,64 +947,28 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
for await (const operation of operations) {
|
||||
if (operation.op === "replace" || operation.op === "Replace") {
|
||||
group = await groupDAL.updateById(group.id, {
|
||||
name: operation.value.displayName
|
||||
});
|
||||
} else if (operation.op === "add" || operation.op === "Add") {
|
||||
try {
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: operation.value.map((member) => member.value)
|
||||
}
|
||||
});
|
||||
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: orgMemberships.map((membership) => membership.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL
|
||||
});
|
||||
} catch {
|
||||
logger.info("Repeat SCIM user-group add operation");
|
||||
}
|
||||
} else if (operation.op === "remove" || operation.op === "Remove") {
|
||||
const orgMembershipId = extractScimValueFromPath(operation.path);
|
||||
if (!orgMembershipId) throw new ScimRequestError({ detail: "Invalid path value", status: 400 });
|
||||
const orgMembership = await orgMembershipDAL.findById(orgMembershipId);
|
||||
if (!orgMembership) throw new ScimRequestError({ detail: "Org Membership Not Found", status: 400 });
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: [orgMembership.userId as string],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL
|
||||
});
|
||||
} else {
|
||||
throw new ScimRequestError({
|
||||
detail: "Invalid Operation",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const members = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
|
||||
return buildScimGroup({
|
||||
const scimGroup = buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: members.map((member) => ({
|
||||
value: member.orgMembershipId
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
scimPatch(scimGroup, operations);
|
||||
// remove members is a weird case not following scim convention
|
||||
await $replaceGroupDAL(groupId, orgId, { displayName: scimGroup.displayName, members: scimGroup.members });
|
||||
|
||||
const updatedScimMembers = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
return {
|
||||
...scimGroup,
|
||||
members: updatedScimMembers.map((member) => ({
|
||||
value: member.orgMembershipId,
|
||||
display: `${member.firstName ?? ""} ${member.lastName ?? ""}`
|
||||
}))
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const deleteScimGroup = async ({ groupId, orgId }: TDeleteScimGroupDTO) => {
|
||||
@@ -1016,8 +1044,8 @@ export const scimServiceFactory = ({
|
||||
createScimGroup,
|
||||
getScimGroup,
|
||||
deleteScimGroup,
|
||||
updateScimGroupNamePut,
|
||||
updateScimGroupNamePatch,
|
||||
replaceScimGroup,
|
||||
updateScimGroup,
|
||||
fnValidateScimToken
|
||||
};
|
||||
};
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { ScimPatchOperation } from "scim-patch";
|
||||
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
export type TCreateScimTokenDTO = {
|
||||
@@ -34,29 +36,25 @@ export type TGetScimUserDTO = {
|
||||
export type TCreateScimUserDTO = {
|
||||
externalId: string;
|
||||
email?: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TUpdateScimUserDTO = {
|
||||
orgMembershipId: string;
|
||||
orgId: string;
|
||||
operations: {
|
||||
op: string;
|
||||
path?: string;
|
||||
value?:
|
||||
| string
|
||||
| {
|
||||
active: boolean;
|
||||
};
|
||||
}[];
|
||||
operations: ScimPatchOperation[];
|
||||
};
|
||||
|
||||
export type TReplaceScimUserDTO = {
|
||||
orgMembershipId: string;
|
||||
active: boolean;
|
||||
orgId: string;
|
||||
email?: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
externalId: string;
|
||||
};
|
||||
|
||||
export type TDeleteScimUserDTO = {
|
||||
@@ -69,6 +67,7 @@ export type TListScimGroupsDTO = {
|
||||
filter?: string;
|
||||
limit: number;
|
||||
orgId: string;
|
||||
isMembersExcluded?: boolean;
|
||||
};
|
||||
|
||||
export type TListScimGroups = {
|
||||
@@ -107,31 +106,7 @@ export type TUpdateScimGroupNamePutDTO = {
|
||||
export type TUpdateScimGroupNamePatchDTO = {
|
||||
groupId: string;
|
||||
orgId: string;
|
||||
operations: (TRemoveOp | TReplaceOp | TAddOp)[];
|
||||
};
|
||||
|
||||
// akhilmhdh: I know, this is done due to lack of time. Need to change later to support as normalized rather than like this
|
||||
// Forgive akhil blame tony
|
||||
type TReplaceOp = {
|
||||
op: "replace" | "Replace";
|
||||
value: {
|
||||
id: string;
|
||||
displayName: string;
|
||||
};
|
||||
};
|
||||
|
||||
type TRemoveOp = {
|
||||
op: "remove" | "Remove";
|
||||
path: string;
|
||||
};
|
||||
|
||||
type TAddOp = {
|
||||
op: "add" | "Add";
|
||||
path: string;
|
||||
value: {
|
||||
value: string;
|
||||
display?: string;
|
||||
}[];
|
||||
operations: ScimPatchOperation[];
|
||||
};
|
||||
|
||||
export type TDeleteScimGroupDTO = {
|
||||
@@ -160,13 +135,10 @@ export type TScimUser = {
|
||||
type: string;
|
||||
}[];
|
||||
active: boolean;
|
||||
groups: {
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
meta: {
|
||||
resourceType: string;
|
||||
location: null;
|
||||
created: Date;
|
||||
lastModified: Date;
|
||||
};
|
||||
};
|
||||
|
||||
@@ -176,10 +148,11 @@ export type TScimGroup = {
|
||||
displayName: string;
|
||||
members: {
|
||||
value: string;
|
||||
display: string;
|
||||
display?: string;
|
||||
}[];
|
||||
meta: {
|
||||
resourceType: string;
|
||||
location: null;
|
||||
created: Date;
|
||||
lastModified: Date;
|
||||
};
|
||||
};
|
||||
|
@@ -16,6 +16,7 @@ import {
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
|
||||
|
||||
@@ -599,6 +600,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
const pruneExcessSnapshots = async () => {
|
||||
const PRUNE_FOLDER_BATCH_SIZE = 10000;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots started`);
|
||||
try {
|
||||
let uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// cleanup snapshots from current folders
|
||||
@@ -714,6 +716,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "SnapshotPrune" });
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots completed`);
|
||||
};
|
||||
|
||||
// special query for migration for secret v2
|
||||
|
@@ -964,6 +964,10 @@ export const INTEGRATION = {
|
||||
shouldAutoRedeploy: "Used by Render to trigger auto deploy.",
|
||||
secretGCPLabel: "The label for GCP secrets.",
|
||||
secretAWSTag: "The tags for AWS secrets.",
|
||||
githubVisibility:
|
||||
"Define where the secrets from the Github Integration should be visible. Option 'selected' lets you directly define which repositories to sync secrets to.",
|
||||
githubVisibilityRepoIds:
|
||||
"The repository IDs to sync secrets to when using the Github Integration. Only applicable when using Organization scope, and visibility is set to 'selected'",
|
||||
kmsKeyId: "The ID of the encryption key from AWS KMS.",
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
|
||||
|
57
backend/src/lib/axios/digest-auth.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { AxiosError, AxiosInstance, AxiosRequestConfig } from "axios";
|
||||
|
||||
export const createDigestAuthRequestInterceptor = (
|
||||
axiosInstance: AxiosInstance,
|
||||
username: string,
|
||||
password: string
|
||||
) => {
|
||||
let nc = 0;
|
||||
|
||||
return async (opts: AxiosRequestConfig) => {
|
||||
try {
|
||||
return await axiosInstance.request(opts);
|
||||
} catch (err) {
|
||||
const error = err as AxiosError;
|
||||
const authHeader = (error?.response?.headers?.["www-authenticate"] as string) || "";
|
||||
|
||||
if (error?.response?.status !== 401 || !authHeader?.includes("nonce")) {
|
||||
return Promise.reject(error.message);
|
||||
}
|
||||
|
||||
if (!error.config) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
|
||||
const authDetails = authHeader.split(",").map((el) => el.split("="));
|
||||
nc += 1;
|
||||
const nonceCount = nc.toString(16).padStart(8, "0");
|
||||
const cnonce = crypto.randomBytes(24).toString("hex");
|
||||
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1].replace(/"/g, "");
|
||||
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1].replace(/"/g, "");
|
||||
const ha1 = crypto.createHash("md5").update(`${username}:${realm}:${password}`).digest("hex");
|
||||
const path = opts.url;
|
||||
|
||||
const ha2 = crypto
|
||||
.createHash("md5")
|
||||
.update(`${opts.method ?? "GET"}:${path}`)
|
||||
.digest("hex");
|
||||
|
||||
const response = crypto
|
||||
.createHash("md5")
|
||||
.update(`${ha1}:${nonce}:${nonceCount}:${cnonce}:auth:${ha2}`)
|
||||
.digest("hex");
|
||||
const authorization = `Digest username="${username}",realm="${realm}",nonce="${nonce}",uri="${path}",qop="auth",algorithm="MD5",response="${response}",nc="${nonceCount}",cnonce="${cnonce}"`;
|
||||
|
||||
if (opts.headers) {
|
||||
// eslint-disable-next-line
|
||||
opts.headers.authorization = authorization;
|
||||
} else {
|
||||
// eslint-disable-next-line
|
||||
opts.headers = { authorization };
|
||||
}
|
||||
return axiosInstance.request(opts);
|
||||
}
|
||||
};
|
||||
};
|
121
backend/src/lib/knex/scim.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { Knex } from "knex";
|
||||
import { Compare, Filter, parse } from "scim2-parse-filter";
|
||||
|
||||
const appendParentToGroupingOperator = (parentPath: string, filter: Filter) => {
|
||||
if (filter.op !== "[]" && filter.op !== "and" && filter.op !== "or" && filter.op !== "not") {
|
||||
return { ...filter, attrPath: `${parentPath}.${(filter as Compare).attrPath}` };
|
||||
}
|
||||
return filter;
|
||||
};
|
||||
|
||||
export const generateKnexQueryFromScim = (
|
||||
rootQuery: Knex.QueryBuilder,
|
||||
rootScimFilter: string,
|
||||
getAttributeField: (attr: string) => string | null
|
||||
) => {
|
||||
const scimRootFilterAst = parse(rootScimFilter);
|
||||
const stack = [
|
||||
{
|
||||
scimFilterAst: scimRootFilterAst,
|
||||
query: rootQuery
|
||||
}
|
||||
];
|
||||
|
||||
while (stack.length) {
|
||||
const { scimFilterAst, query } = stack.pop()!;
|
||||
switch (scimFilterAst.op) {
|
||||
case "eq": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "pr": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereNotNull(attrPath);
|
||||
break;
|
||||
}
|
||||
case "gt": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, ">", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "ge": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, ">=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "lt": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, "<", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "le": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, "<=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "sw": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `${scimFilterAst.compValue}%`);
|
||||
break;
|
||||
}
|
||||
case "ew": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}`);
|
||||
break;
|
||||
}
|
||||
case "co": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}%`);
|
||||
break;
|
||||
}
|
||||
case "ne": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereNot(attrPath, "=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "and": {
|
||||
void query.andWhere((subQueryBuilder) => {
|
||||
scimFilterAst.filters.forEach((el) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: el
|
||||
});
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "or": {
|
||||
void query.orWhere((subQueryBuilder) => {
|
||||
scimFilterAst.filters.forEach((el) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: el
|
||||
});
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "not": {
|
||||
void query.whereNot((subQueryBuilder) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: scimFilterAst.filter
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "[]": {
|
||||
void query.whereNot((subQueryBuilder) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: appendParentToGroupingOperator(scimFilterAst.attrPath, scimFilterAst.valFilter)
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
@@ -49,6 +49,21 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
|
||||
server.setValidatorCompiler(validatorCompiler);
|
||||
server.setSerializerCompiler(serializerCompiler);
|
||||
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
if (!strBody) {
|
||||
done(null, undefined);
|
||||
return;
|
||||
}
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await server.register<FastifyCookieOptions>(cookie, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY
|
||||
|
@@ -3,6 +3,7 @@ import { Redis } from "ioredis";
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { registerCertificateEstRouter } from "@app/ee/routes/est/certificate-est-router";
|
||||
import { registerV1EERoutes } from "@app/ee/routes/v1";
|
||||
import { accessApprovalPolicyApproverDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-approver-dal";
|
||||
import { accessApprovalPolicyDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-dal";
|
||||
@@ -17,6 +18,7 @@ import { auditLogStreamDALFactory } from "@app/ee/services/audit-log-stream/audi
|
||||
import { auditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { certificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
import { certificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { certificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { dynamicSecretDALFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-dal";
|
||||
import { dynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/providers";
|
||||
@@ -92,7 +94,6 @@ import { certificateAuthorityDALFactory } from "@app/services/certificate-author
|
||||
import { certificateAuthorityQueueFactory } from "@app/services/certificate-authority/certificate-authority-queue";
|
||||
import { certificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
|
||||
import { certificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { certificateEstServiceFactory } from "@app/services/certificate-est/certificate-est-service";
|
||||
import { certificateTemplateDALFactory } from "@app/services/certificate-template/certificate-template-dal";
|
||||
import { certificateTemplateEstConfigDALFactory } from "@app/services/certificate-template/certificate-template-est-config-dal";
|
||||
import { certificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
@@ -199,7 +200,6 @@ import { injectIdentity } from "../plugins/auth/inject-identity";
|
||||
import { injectPermission } from "../plugins/auth/inject-permission";
|
||||
import { injectRateLimits } from "../plugins/inject-rate-limits";
|
||||
import { registerSecretScannerGhApp } from "../plugins/secret-scanner";
|
||||
import { registerCertificateEstRouter } from "./est/certificate-est-router";
|
||||
import { registerV1Routes } from "./v1";
|
||||
import { registerV2Routes } from "./v2";
|
||||
import { registerV3Routes } from "./v3";
|
||||
@@ -667,7 +667,8 @@ export const registerRoutes = async (
|
||||
certificateAuthorityDAL,
|
||||
permissionService,
|
||||
kmsService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const certificateEstService = certificateEstServiceFactory({
|
||||
@@ -677,7 +678,8 @@ export const registerRoutes = async (
|
||||
certificateAuthorityCertDAL,
|
||||
certificateAuthorityDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const pkiAlertService = pkiAlertServiceFactory({
|
||||
|
@@ -48,7 +48,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
method: "POST",
|
||||
url: "/public/:id",
|
||||
config: {
|
||||
rateLimit: publicEndpointLimit
|
||||
@@ -57,38 +57,37 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
params: z.object({
|
||||
id: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
hashedHex: z.string().min(1)
|
||||
body: z.object({
|
||||
hashedHex: z.string().min(1),
|
||||
password: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: SecretSharingSchema.pick({
|
||||
encryptedValue: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
expiresAt: true,
|
||||
expiresAfterViews: true,
|
||||
accessType: true
|
||||
}).extend({
|
||||
orgName: z.string().optional()
|
||||
200: z.object({
|
||||
isPasswordProtected: z.boolean(),
|
||||
secret: SecretSharingSchema.pick({
|
||||
encryptedValue: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
expiresAt: true,
|
||||
expiresAfterViews: true,
|
||||
accessType: true
|
||||
})
|
||||
.extend({
|
||||
orgName: z.string().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const sharedSecret = await req.server.services.secretSharing.getActiveSharedSecretById({
|
||||
const sharedSecret = await req.server.services.secretSharing.getSharedSecretById({
|
||||
sharedSecretId: req.params.id,
|
||||
hashedHex: req.query.hashedHex,
|
||||
hashedHex: req.body.hashedHex,
|
||||
password: req.body.password,
|
||||
orgId: req.permission?.orgId
|
||||
});
|
||||
if (!sharedSecret) return undefined;
|
||||
return {
|
||||
encryptedValue: sharedSecret.encryptedValue,
|
||||
iv: sharedSecret.iv,
|
||||
tag: sharedSecret.tag,
|
||||
expiresAt: sharedSecret.expiresAt,
|
||||
expiresAfterViews: sharedSecret.expiresAfterViews,
|
||||
accessType: sharedSecret.accessType,
|
||||
orgName: sharedSecret.orgName
|
||||
};
|
||||
|
||||
return sharedSecret;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -101,6 +100,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
schema: {
|
||||
body: z.object({
|
||||
encryptedValue: z.string(),
|
||||
password: z.string().optional(),
|
||||
hashedHex: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
@@ -131,6 +131,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().max(50).optional(),
|
||||
password: z.string().optional(),
|
||||
encryptedValue: z.string(),
|
||||
hashedHex: z.string(),
|
||||
iv: z.string(),
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
@@ -30,20 +32,21 @@ export const certificateTemplateDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const getById = async (id: string) => {
|
||||
const getById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const certTemplate = await db
|
||||
.replicaNode()(TableName.CertificateTemplate)
|
||||
const certTemplate = await (tx || db.replicaNode())(TableName.CertificateTemplate)
|
||||
.join(
|
||||
TableName.CertificateAuthority,
|
||||
`${TableName.CertificateAuthority}.id`,
|
||||
`${TableName.CertificateTemplate}.caId`
|
||||
)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.CertificateAuthority}.projectId`)
|
||||
.where(`${TableName.CertificateTemplate}.id`, "=", id)
|
||||
.select(selectAllTableCols(TableName.CertificateTemplate))
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.CertificateAuthority),
|
||||
db.ref("friendlyName").as("caName").withSchema(TableName.CertificateAuthority)
|
||||
db.ref("friendlyName").as("caName").withSchema(TableName.CertificateAuthority),
|
||||
db.ref("orgId").withSchema(TableName.Project)
|
||||
)
|
||||
.first();
|
||||
|
||||
|
@@ -3,6 +3,7 @@ import * as x509 from "@peculiar/x509";
|
||||
import bcrypt from "bcrypt";
|
||||
|
||||
import { TCertificateTemplateEstConfigsUpdate } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@@ -32,6 +33,7 @@ type TCertificateTemplateServiceFactoryDep = {
|
||||
kmsService: Pick<TKmsServiceFactory, "generateKmsKey" | "encryptWithKmsKey" | "decryptWithKmsKey">;
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateTemplateServiceFactory = ReturnType<typeof certificateTemplateServiceFactory>;
|
||||
@@ -42,7 +44,8 @@ export const certificateTemplateServiceFactory = ({
|
||||
certificateAuthorityDAL,
|
||||
permissionService,
|
||||
kmsService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
licenseService
|
||||
}: TCertificateTemplateServiceFactoryDep) => {
|
||||
const createCertTemplate = async ({
|
||||
caId,
|
||||
@@ -75,23 +78,28 @@ export const certificateTemplateServiceFactory = ({
|
||||
ProjectPermissionSub.CertificateTemplates
|
||||
);
|
||||
|
||||
const { id } = await certificateTemplateDAL.create({
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
name,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
ttl
|
||||
return certificateTemplateDAL.transaction(async (tx) => {
|
||||
const { id } = await certificateTemplateDAL.create(
|
||||
{
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
name,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
ttl
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const certificateTemplate = await certificateTemplateDAL.getById(id, tx);
|
||||
if (!certificateTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return certificateTemplate;
|
||||
});
|
||||
|
||||
const certificateTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!certificateTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return certificateTemplate;
|
||||
};
|
||||
|
||||
const updateCertTemplate = async ({
|
||||
@@ -136,23 +144,29 @@ export const certificateTemplateServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
await certificateTemplateDAL.updateById(certTemplate.id, {
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
name,
|
||||
ttl
|
||||
return certificateTemplateDAL.transaction(async (tx) => {
|
||||
await certificateTemplateDAL.updateById(
|
||||
certTemplate.id,
|
||||
{
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
name,
|
||||
ttl
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const updatedTemplate = await certificateTemplateDAL.getById(id, tx);
|
||||
if (!updatedTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return updatedTemplate;
|
||||
});
|
||||
|
||||
const updatedTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!updatedTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return updatedTemplate;
|
||||
};
|
||||
|
||||
const deleteCertTemplate = async ({ id, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteCertTemplateDTO) => {
|
||||
@@ -215,6 +229,13 @@ export const certificateTemplateServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TCreateEstConfigurationDTO) => {
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create EST configuration due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
const certTemplate = await certificateTemplateDAL.getById(certificateTemplateId);
|
||||
if (!certTemplate) {
|
||||
throw new NotFoundError({
|
||||
@@ -285,6 +306,13 @@ export const certificateTemplateServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TUpdateEstConfigurationDTO) => {
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update EST configuration due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
const certTemplate = await certificateTemplateDAL.getById(certificateTemplateId);
|
||||
if (!certTemplate) {
|
||||
throw new NotFoundError({
|
||||
@@ -416,7 +444,8 @@ export const certificateTemplateServiceFactory = ({
|
||||
isEnabled: estConfig.isEnabled,
|
||||
caChain: decryptedCaChain.toString(),
|
||||
hashedPassphrase: estConfig.hashedPassphrase,
|
||||
projectId: certTemplate.projectId
|
||||
projectId: certTemplate.projectId,
|
||||
orgId: certTemplate.orgId
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TIdentityAccessTokenDALFactory = ReturnType<typeof identityAccessTokenDALFactory>;
|
||||
|
||||
@@ -95,6 +97,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const removeExpiredTokens = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token started`);
|
||||
try {
|
||||
const docs = (tx || db)(TableName.IdentityAccessToken)
|
||||
.where({
|
||||
@@ -131,7 +134,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
});
|
||||
})
|
||||
.delete();
|
||||
return await docs;
|
||||
await docs;
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token completed`);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "IdentityAccessTokenPrune" });
|
||||
}
|
||||
|
@@ -5,6 +5,7 @@ import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TIdentityUaClientSecretDALFactory = ReturnType<typeof identityUaClientSecretDALFactory>;
|
||||
|
||||
@@ -30,7 +31,9 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
|
||||
|
||||
let deletedClientSecret: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredClientSecretQuery = (tx || db)(TableName.IdentityUaClientSecret)
|
||||
@@ -71,7 +74,9 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
} while (deletedClientSecret.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedClientSecret.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret completed`);
|
||||
};
|
||||
|
||||
return { ...uaClientSecretOrm, incrementUsage, removeExpiredClientSecrets };
|
||||
|
@@ -460,16 +460,21 @@ const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
*/
|
||||
const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
const res = (
|
||||
await request.get<{ reponame: string }[]>(`${IntegrationUrls.CIRCLECI_API_URL}/v1.1/projects`, {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json"
|
||||
await request.get<{ reponame: string; username: string; vcs_url: string }[]>(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v1.1/projects`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
).data;
|
||||
|
||||
const apps = res?.map((a) => ({
|
||||
name: a?.reponame
|
||||
const apps = res.map((a) => ({
|
||||
owner: a.username, // username maps to unique organization name in CircleCI
|
||||
name: a.reponame, // reponame maps to project name within an organization in CircleCI
|
||||
appId: a.vcs_url.split("/").pop() // vcs_url maps to the project id in CircleCI
|
||||
}));
|
||||
|
||||
return apps;
|
||||
|
@@ -30,6 +30,7 @@ const getIntegrationSecretsV2 = async (
|
||||
environment: string;
|
||||
folderId: string;
|
||||
depth: number;
|
||||
secretPath: string;
|
||||
decryptor: (value: Buffer | null | undefined) => string;
|
||||
},
|
||||
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">,
|
||||
@@ -306,6 +307,7 @@ export const deleteIntegrationSecrets = async ({
|
||||
? await getIntegrationSecretsV2(
|
||||
{
|
||||
environment: integration.environment.id,
|
||||
secretPath: integration.secretPath,
|
||||
projectId: integration.projectId,
|
||||
folderId: folder.id,
|
||||
depth: 1,
|
||||
|
@@ -1625,7 +1625,11 @@ const syncSecretsGitHub = async ({
|
||||
await octokit.request("PUT /orgs/{org}/actions/secrets/{secret_name}", {
|
||||
org: integration.owner as string,
|
||||
secret_name: key,
|
||||
visibility: "all",
|
||||
visibility: metadata.githubVisibility ?? "all",
|
||||
...(metadata.githubVisibility === "selected" && {
|
||||
// we need to map the githubVisibilityRepoIds to numbers
|
||||
selected_repository_ids: metadata.githubVisibilityRepoIds?.map(Number) ?? []
|
||||
}),
|
||||
encrypted_value: encryptedSecret,
|
||||
key_id: repoPublicKey.key_id
|
||||
});
|
||||
@@ -1925,22 +1929,62 @@ const syncSecretsCircleCI = async ({
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const circleciOrganizationDetail = (
|
||||
await request.get(`${IntegrationUrls.CIRCLECI_API_URL}/v2/me/collaborations`, {
|
||||
const getProjectSlug = async () => {
|
||||
const requestConfig = {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
})
|
||||
).data[0];
|
||||
};
|
||||
|
||||
const { slug } = circleciOrganizationDetail;
|
||||
try {
|
||||
const projectDetails = (
|
||||
await request.get<{ slug: string }>(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${integration.appId}`,
|
||||
requestConfig
|
||||
)
|
||||
).data;
|
||||
|
||||
return projectDetails.slug;
|
||||
} catch (err) {
|
||||
if (err instanceof AxiosError) {
|
||||
if (err.response?.data?.message !== "Not Found") {
|
||||
throw new Error("Failed to get project slug from CircleCI during first attempt.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For backwards compatibility with old CircleCI integrations where we don't keep track of the organization name, so we can't filter by organization
|
||||
try {
|
||||
const circleCiOrganization = (
|
||||
await request.get<{ slug: string; name: string }[]>(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/me/collaborations`,
|
||||
requestConfig
|
||||
)
|
||||
).data;
|
||||
|
||||
// Case 1: This is a new integration where the organization name is stored under `integration.owner`
|
||||
if (integration.owner) {
|
||||
const org = circleCiOrganization.find((o) => o.name === integration.owner);
|
||||
if (org) {
|
||||
return `${org.slug}/${integration.app}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Case 2: This is an old integration where the organization name is not stored, so we have to assume the first organization is the correct one
|
||||
return `${circleCiOrganization[0].slug}/${integration.app}`;
|
||||
} catch (err) {
|
||||
throw new Error("Failed to get project slug from CircleCI during second attempt.");
|
||||
}
|
||||
};
|
||||
|
||||
const projectSlug = await getProjectSlug();
|
||||
|
||||
// sync secrets to CircleCI
|
||||
await Promise.all(
|
||||
Object.keys(secrets).map(async (key) =>
|
||||
request.post(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`,
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar`,
|
||||
{
|
||||
name: key,
|
||||
value: secrets[key].value
|
||||
@@ -1958,7 +2002,7 @@ const syncSecretsCircleCI = async ({
|
||||
// get secrets from CircleCI
|
||||
const getSecretsRes = (
|
||||
await request.get<{ items: { name: string }[] }>(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`,
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
@@ -1972,15 +2016,12 @@ const syncSecretsCircleCI = async ({
|
||||
await Promise.all(
|
||||
getSecretsRes.map(async (sec) => {
|
||||
if (!(sec.name in secrets)) {
|
||||
return request.delete(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar/${sec.name}`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
return request.delete(`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar/${sec.name}`, {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
|
@@ -5,14 +5,18 @@ import { INTEGRATION } from "@app/lib/api-docs";
|
||||
import { IntegrationMappingBehavior } from "../integration-auth/integration-list";
|
||||
|
||||
export const IntegrationMetadataSchema = z.object({
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
|
||||
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
|
||||
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
|
||||
mappingBehavior: z
|
||||
.nativeEnum(IntegrationMappingBehavior)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
|
||||
|
||||
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
|
||||
|
||||
secretGCPLabel: z
|
||||
.object({
|
||||
labelName: z.string(),
|
||||
@@ -20,6 +24,7 @@ export const IntegrationMetadataSchema = z.object({
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
|
||||
|
||||
secretAWSTag: z
|
||||
.array(
|
||||
z.object({
|
||||
@@ -29,7 +34,15 @@ export const IntegrationMetadataSchema = z.object({
|
||||
)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
|
||||
|
||||
githubVisibility: z
|
||||
.union([z.literal("selected"), z.literal("private"), z.literal("all")])
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.githubVisibility),
|
||||
githubVisibilityRepoIds: z.array(z.string()).optional().describe(INTEGRATION.CREATE.metadata.githubVisibilityRepoIds),
|
||||
|
||||
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
|
||||
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
|
||||
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete),
|
||||
shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets),
|
||||
|
@@ -27,6 +27,10 @@ export type TCreateIntegrationDTO = {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
|
||||
githubVisibility?: string;
|
||||
githubVisibilityRepoIds?: string[];
|
||||
|
||||
kmsKeyId?: string;
|
||||
shouldDisableDelete?: boolean;
|
||||
shouldMaskSecrets?: boolean;
|
||||
|
@@ -12,6 +12,7 @@ import {
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt, withTransaction } from "@app/lib/knex";
|
||||
import { generateKnexQueryFromScim } from "@app/lib/knex/scim";
|
||||
|
||||
export type TOrgDALFactory = ReturnType<typeof orgDALFactory>;
|
||||
|
||||
@@ -280,6 +281,67 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.ref("scimEnabled").withSchema(TableName.Organization),
|
||||
db.ref("externalId").withSchema(TableName.UserAliases)
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
|
||||
}
|
||||
const res = await query;
|
||||
return res;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find one" });
|
||||
}
|
||||
};
|
||||
|
||||
const findMembershipWithScimFilter = async (
|
||||
orgId: string,
|
||||
scimFilter: string | undefined,
|
||||
{ offset, limit, sort, tx }: TFindOpt<TOrgMemberships> = {}
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.OrgMembership)
|
||||
// eslint-disable-next-line
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.where((qb) => {
|
||||
if (scimFilter) {
|
||||
void generateKnexQueryFromScim(qb, scimFilter, (attrPath) => {
|
||||
switch (attrPath) {
|
||||
case "active":
|
||||
return `${TableName.OrgMembership}.isActive`;
|
||||
case "userName":
|
||||
return `${TableName.UserAliases}.externalId`;
|
||||
case "name.givenName":
|
||||
return `${TableName.Users}.firstName`;
|
||||
case "name.familyName":
|
||||
return `${TableName.Users}.lastName`;
|
||||
case "email.value":
|
||||
return `${TableName.Users}.email`;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.join(TableName.Users, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
|
||||
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
|
||||
.leftJoin(TableName.UserAliases, function joinUserAlias() {
|
||||
this.on(`${TableName.UserAliases}.userId`, "=", `${TableName.OrgMembership}.userId`)
|
||||
.andOn(`${TableName.UserAliases}.orgId`, "=", `${TableName.OrgMembership}.orgId`)
|
||||
.andOn(`${TableName.UserAliases}.aliasType`, "=", (tx || db).raw("?", ["saml"]));
|
||||
})
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
@@ -314,6 +376,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
updateById,
|
||||
deleteById,
|
||||
findMembership,
|
||||
findMembershipWithScimFilter,
|
||||
createMembership,
|
||||
updateMembershipById,
|
||||
deleteMembershipById,
|
||||
|
@@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretFolderVersions } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretFolderVersionDALFactory = ReturnType<typeof secretFolderVersionDALFactory>;
|
||||
|
||||
@@ -65,6 +67,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions started`);
|
||||
try {
|
||||
await db(TableName.SecretFolderVersion)
|
||||
.with("folder_cte", (qb) => {
|
||||
@@ -89,6 +92,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Folder Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions completed`);
|
||||
};
|
||||
|
||||
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions };
|
||||
|
@@ -158,9 +158,12 @@ export const fnSecretsV2FromImports = async ({
|
||||
depth?: number;
|
||||
cyclicDetector?: Set<string>;
|
||||
decryptor: (value?: Buffer | null) => string;
|
||||
expandSecretReferences?: (
|
||||
secrets: Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => Promise<Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>>;
|
||||
expandSecretReferences?: (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => Promise<string | undefined>;
|
||||
}) => {
|
||||
// avoid going more than a depth
|
||||
if (depth >= LEVEL_BREAK) return [];
|
||||
@@ -244,26 +247,21 @@ export const fnSecretsV2FromImports = async ({
|
||||
});
|
||||
|
||||
if (expandSecretReferences) {
|
||||
await Promise.all(
|
||||
processedImports.map(async (processedImport) => {
|
||||
const secretsGroupByKey = processedImport.secrets.reduce(
|
||||
(acc, item) => {
|
||||
acc[item.secretKey] = {
|
||||
value: item.secretValue,
|
||||
comment: item.secretComment,
|
||||
skipMultilineEncoding: item.skipMultilineEncoding
|
||||
};
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
);
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretsGroupByKey);
|
||||
processedImport.secrets.forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value;
|
||||
});
|
||||
})
|
||||
await Promise.allSettled(
|
||||
processedImports.map((processedImport) =>
|
||||
Promise.allSettled(
|
||||
processedImport.secrets.map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: processedImport.secretPath,
|
||||
environment: processedImport.environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
processedImport.secrets[index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
@@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretSharing } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretSharingDALFactory = ReturnType<typeof secretSharingDALFactory>;
|
||||
|
||||
@@ -30,6 +32,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExpiredSharedSecrets = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret started`);
|
||||
try {
|
||||
const today = new Date();
|
||||
const docs = await (tx || db)(TableName.SecretSharing)
|
||||
@@ -40,6 +43,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
tag: "",
|
||||
iv: ""
|
||||
});
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret completed`);
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "pruneExpiredSharedSecrets" });
|
||||
|
@@ -1,3 +1,6 @@
|
||||
import bcrypt from "bcrypt";
|
||||
|
||||
import { TSecretSharing } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { SecretSharingAccessType } from "@app/lib/types";
|
||||
@@ -36,6 +39,7 @@ export const secretSharingServiceFactory = ({
|
||||
iv,
|
||||
tag,
|
||||
name,
|
||||
password,
|
||||
accessType,
|
||||
expiresAt,
|
||||
expiresAfterViews
|
||||
@@ -60,8 +64,10 @@ export const secretSharingServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
const hashedPassword = password ? await bcrypt.hash(password, 10) : null;
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
name,
|
||||
password: hashedPassword,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@@ -77,6 +83,7 @@ export const secretSharingServiceFactory = ({
|
||||
};
|
||||
|
||||
const createPublicSharedSecret = async ({
|
||||
password,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@@ -102,7 +109,9 @@ export const secretSharingServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
const hashedPassword = password ? await bcrypt.hash(password, 10) : null;
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
password: hashedPassword,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@@ -111,6 +120,7 @@ export const secretSharingServiceFactory = ({
|
||||
expiresAfterViews,
|
||||
accessType
|
||||
});
|
||||
|
||||
return { id: newSharedSecret.id };
|
||||
};
|
||||
|
||||
@@ -152,7 +162,21 @@ export const secretSharingServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const getActiveSharedSecretById = async ({ sharedSecretId, hashedHex, orgId }: TGetActiveSharedSecretByIdDTO) => {
|
||||
const $decrementSecretViewCount = async (sharedSecret: TSecretSharing, sharedSecretId: string) => {
|
||||
const { expiresAfterViews } = sharedSecret;
|
||||
|
||||
if (expiresAfterViews) {
|
||||
// decrement view count if view count expiry set
|
||||
await secretSharingDAL.updateById(sharedSecretId, { $decr: { expiresAfterViews: 1 } });
|
||||
}
|
||||
|
||||
await secretSharingDAL.updateById(sharedSecretId, {
|
||||
lastViewedAt: new Date()
|
||||
});
|
||||
};
|
||||
|
||||
/** Get's passwordless secret. validates all secret's requested (must be fresh). */
|
||||
const getSharedSecretById = async ({ sharedSecretId, hashedHex, orgId, password }: TGetActiveSharedSecretByIdDTO) => {
|
||||
const sharedSecret = await secretSharingDAL.findOne({
|
||||
id: sharedSecretId,
|
||||
hashedHex
|
||||
@@ -169,6 +193,8 @@ export const secretSharingServiceFactory = ({
|
||||
if (accessType === SecretSharingAccessType.Organization && orgId !== sharedSecret.orgId)
|
||||
throw new UnauthorizedError();
|
||||
|
||||
// all secrets pass through here, meaning we check if its expired first and then check if it needs verification
|
||||
// or can be safely sent to the client.
|
||||
if (expiresAt !== null && expiresAt < new Date()) {
|
||||
// check lifetime expiry
|
||||
await secretSharingDAL.softDeleteById(sharedSecretId);
|
||||
@@ -185,21 +211,29 @@ export const secretSharingServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (expiresAfterViews) {
|
||||
// decrement view count if view count expiry set
|
||||
await secretSharingDAL.updateById(sharedSecretId, { $decr: { expiresAfterViews: 1 } });
|
||||
const isPasswordProtected = Boolean(sharedSecret.password);
|
||||
const hasProvidedPassword = Boolean(password);
|
||||
if (isPasswordProtected) {
|
||||
if (hasProvidedPassword) {
|
||||
const isMatch = await bcrypt.compare(password as string, sharedSecret.password as string);
|
||||
if (!isMatch) throw new UnauthorizedError({ message: "Invalid credentials" });
|
||||
} else {
|
||||
return { isPasswordProtected };
|
||||
}
|
||||
}
|
||||
|
||||
await secretSharingDAL.updateById(sharedSecretId, {
|
||||
lastViewedAt: new Date()
|
||||
});
|
||||
// decrement when we are sure the user will view secret.
|
||||
await $decrementSecretViewCount(sharedSecret, sharedSecretId);
|
||||
|
||||
return {
|
||||
...sharedSecret,
|
||||
orgName:
|
||||
sharedSecret.accessType === SecretSharingAccessType.Organization && orgId === sharedSecret.orgId
|
||||
? orgName
|
||||
: undefined
|
||||
isPasswordProtected,
|
||||
secret: {
|
||||
...sharedSecret,
|
||||
orgName:
|
||||
sharedSecret.accessType === SecretSharingAccessType.Organization && orgId === sharedSecret.orgId
|
||||
? orgName
|
||||
: undefined
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -216,6 +250,6 @@ export const secretSharingServiceFactory = ({
|
||||
createPublicSharedSecret,
|
||||
getSharedSecrets,
|
||||
deleteSharedSecretById,
|
||||
getActiveSharedSecretById
|
||||
getSharedSecretById
|
||||
};
|
||||
};
|
||||
|
@@ -15,6 +15,7 @@ export type TSharedSecretPermission = {
|
||||
orgId: string;
|
||||
accessType?: SecretSharingAccessType;
|
||||
name?: string;
|
||||
password?: string;
|
||||
};
|
||||
|
||||
export type TCreatePublicSharedSecretDTO = {
|
||||
@@ -24,6 +25,7 @@ export type TCreatePublicSharedSecretDTO = {
|
||||
tag: string;
|
||||
expiresAt: string;
|
||||
expiresAfterViews?: number;
|
||||
password?: string;
|
||||
accessType: SecretSharingAccessType;
|
||||
};
|
||||
|
||||
@@ -31,6 +33,11 @@ export type TGetActiveSharedSecretByIdDTO = {
|
||||
sharedSecretId: string;
|
||||
hashedHex: string;
|
||||
orgId?: string;
|
||||
password?: string;
|
||||
};
|
||||
|
||||
export type TValidateActiveSharedSecretDTO = TGetActiveSharedSecretByIdDTO & {
|
||||
password: string;
|
||||
};
|
||||
|
||||
export type TCreateSharedSecretDTO = TSharedSecretPermission & TCreatePublicSharedSecretDTO;
|
||||
|
@@ -377,150 +377,116 @@ type TInterpolateSecretArg = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
};
|
||||
|
||||
const MAX_SECRET_REFERENCE_DEPTH = 10;
|
||||
export const expandSecretReferencesFactory = ({
|
||||
projectId,
|
||||
decryptSecretValue: decryptSecret,
|
||||
secretDAL,
|
||||
folderDAL
|
||||
}: TInterpolateSecretArg) => {
|
||||
const fetchSecretFactory = () => {
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const getCacheUniqueKey = (environment: string, secretPath: string) => `${environment}-${secretPath}`;
|
||||
|
||||
return async (secRefEnv: string, secRefPath: string[], secRefKey: string) => {
|
||||
const referredSecretPathURL = path.join("/", ...secRefPath);
|
||||
const uniqueKey = `${secRefEnv}-${referredSecretPathURL}`;
|
||||
const fetchSecret = async (environment: string, secretPath: string, secretKey: string) => {
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
|
||||
if (secretCache?.[uniqueKey]) {
|
||||
return secretCache[uniqueKey][secRefKey];
|
||||
}
|
||||
if (secretCache?.[cacheKey]) {
|
||||
return secretCache[cacheKey][secretKey] || "";
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, secRefEnv, referredSecretPathURL);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
|
||||
const decryptedSecret = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
// eslint-disable-next-line
|
||||
prev[secret.key] = decryptSecret(secret.encryptedValue) || "";
|
||||
return prev;
|
||||
}, {});
|
||||
const decryptedSecret = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
prev[secret.key] = decryptSecret(secret.encryptedValue) || "";
|
||||
return prev;
|
||||
}, {});
|
||||
|
||||
secretCache[uniqueKey] = decryptedSecret;
|
||||
secretCache[cacheKey] = decryptedSecret;
|
||||
|
||||
return secretCache[uniqueKey][secRefKey];
|
||||
};
|
||||
return secretCache[cacheKey][secretKey] || "";
|
||||
};
|
||||
|
||||
const recursivelyExpandSecret = async (
|
||||
expandedSec: Record<string, string | undefined>,
|
||||
interpolatedSec: Record<string, string | undefined>,
|
||||
fetchSecret: (env: string, secPath: string[], secKey: string) => Promise<string>,
|
||||
recursionChainBreaker: Record<string, boolean>,
|
||||
key: string
|
||||
): Promise<string | undefined> => {
|
||||
if (expandedSec?.[key] !== undefined) {
|
||||
return expandedSec[key];
|
||||
}
|
||||
if (recursionChainBreaker?.[key]) {
|
||||
return "";
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
recursionChainBreaker[key] = true;
|
||||
const recursivelyExpandSecret = async (dto: { value?: string; secretPath: string; environment: string }) => {
|
||||
if (!dto.value) return "";
|
||||
|
||||
let interpolatedValue = interpolatedSec[key];
|
||||
if (!interpolatedValue) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`Couldn't find referenced value - ${key}`);
|
||||
return "";
|
||||
}
|
||||
const stack = [{ ...dto, depth: 0 }];
|
||||
let expandedValue = dto.value;
|
||||
|
||||
const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG);
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
while (stack.length) {
|
||||
const { value, secretPath, environment, depth } = stack.pop()!;
|
||||
// eslint-disable-next-line no-continue
|
||||
if (depth > MAX_SECRET_REFERENCE_DEPTH) continue;
|
||||
const refs = value?.match(INTERPOLATION_SYNTAX_REG);
|
||||
|
||||
if (entities.length === 1) {
|
||||
// eslint-disable-next-line
|
||||
const val = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
fetchSecret,
|
||||
recursionChainBreaker,
|
||||
interpolationKey
|
||||
);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
|
||||
if (entities.length > 1) {
|
||||
const secRefEnv = entities[0];
|
||||
const secRefPath = entities.slice(1, entities.length - 1);
|
||||
const secRefKey = entities[entities.length - 1];
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!entities.length) continue;
|
||||
|
||||
// eslint-disable-next-line
|
||||
const val = await fetchSecret(secRefEnv, secRefPath, secRefKey);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
if (entities.length === 1) {
|
||||
const [secretKey] = entities;
|
||||
|
||||
// eslint-disable-next-line no-continue,no-await-in-loop
|
||||
const referedValue = await fetchSecret(environment, secretPath, secretKey);
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
secretCache[cacheKey][secretKey] = referedValue;
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referedValue)) {
|
||||
stack.push({
|
||||
value: referedValue,
|
||||
secretPath,
|
||||
environment,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referedValue);
|
||||
} else {
|
||||
const secretReferenceEnvironment = entities[0];
|
||||
const secretReferencePath = path.join("/", ...entities.slice(1, entities.length - 1));
|
||||
const secretReferenceKey = entities[entities.length - 1];
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const referedValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey);
|
||||
const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath);
|
||||
secretCache[cacheKey][secretReferenceKey] = referedValue;
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referedValue)) {
|
||||
stack.push({
|
||||
value: referedValue,
|
||||
secretPath: secretReferencePath,
|
||||
environment: secretReferenceEnvironment,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
expandedSec[key] = interpolatedValue;
|
||||
return interpolatedValue;
|
||||
return expandedValue;
|
||||
};
|
||||
|
||||
const fetchSecret = fetchSecretFactory();
|
||||
const expandSecrets = async (
|
||||
inputSecrets: Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => {
|
||||
const expandedSecrets: Record<string, string | undefined> = {};
|
||||
const toBeExpandedSecrets: Record<string, string | undefined> = {};
|
||||
const expandSecret = async (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
if (!inputSecret.value) return inputSecret.value;
|
||||
|
||||
Object.keys(inputSecrets).forEach((key) => {
|
||||
if (inputSecrets[key].value?.match(INTERPOLATION_SYNTAX_REG)) {
|
||||
toBeExpandedSecrets[key] = inputSecrets[key].value;
|
||||
} else {
|
||||
expandedSecrets[key] = inputSecrets[key].value;
|
||||
}
|
||||
});
|
||||
const shouldExpand = Boolean(inputSecret.value?.match(INTERPOLATION_SYNTAX_REG));
|
||||
if (!shouldExpand) return inputSecret.value;
|
||||
|
||||
for (const key of Object.keys(inputSecrets)) {
|
||||
if (expandedSecrets?.[key]) {
|
||||
// should not do multi line encoding if user has set it to skip
|
||||
// eslint-disable-next-line
|
||||
inputSecrets[key].value = inputSecrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedSecrets[key])
|
||||
: expandedSecrets[key];
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
// this is to avoid recursion loop. So the graph should be direct graph rather than cyclic
|
||||
// so for any recursion building if there is an entity two times same key meaning it will be looped
|
||||
const recursionChainBreaker: Record<string, boolean> = {};
|
||||
// eslint-disable-next-line
|
||||
const expandedVal = await recursivelyExpandSecret(
|
||||
expandedSecrets,
|
||||
toBeExpandedSecrets,
|
||||
fetchSecret,
|
||||
recursionChainBreaker,
|
||||
key
|
||||
);
|
||||
|
||||
// eslint-disable-next-line
|
||||
inputSecrets[key].value = inputSecrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedVal)
|
||||
: expandedVal;
|
||||
}
|
||||
|
||||
return inputSecrets;
|
||||
const expandedSecretValue = await recursivelyExpandSecret(inputSecret);
|
||||
return inputSecret.skipMultilineEncoding ? formatMultiValueEnv(expandedSecretValue) : expandedSecretValue;
|
||||
};
|
||||
return expandSecrets;
|
||||
return expandSecret;
|
||||
};
|
||||
|
||||
export const reshapeBridgeSecret = (
|
||||
|
@@ -521,27 +521,22 @@ export const secretV2BridgeServiceFactory = ({
|
||||
|
||||
if (shouldExpandSecretReferences) {
|
||||
const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath);
|
||||
for (const secretPathKey in secretsGroupByPath) {
|
||||
if (Object.hasOwn(secretsGroupByPath, secretPathKey)) {
|
||||
const secretsGroupByKey = secretsGroupByPath[secretPathKey].reduce(
|
||||
(acc, item) => {
|
||||
acc[item.secretKey] = {
|
||||
value: item.secretValue,
|
||||
comment: item.secretComment,
|
||||
skipMultilineEncoding: item.skipMultilineEncoding
|
||||
};
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
);
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretsGroupByKey);
|
||||
secretsGroupByPath[secretPathKey].forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value || "";
|
||||
});
|
||||
}
|
||||
}
|
||||
await Promise.allSettled(
|
||||
Object.keys(secretsGroupByPath).map((groupedPath) =>
|
||||
Promise.allSettled(
|
||||
secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: groupedPath,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (!includeImports) {
|
||||
@@ -693,12 +688,14 @@ export const secretV2BridgeServiceFactory = ({
|
||||
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString()
|
||||
: "";
|
||||
if (shouldExpandSecretReferences && secretValue) {
|
||||
const secretReferenceExpandedRecord = {
|
||||
[secret.key]: { value: secretValue }
|
||||
};
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretReferenceExpandedRecord);
|
||||
secretValue = secretReferenceExpandedRecord[secret.key].value;
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment,
|
||||
secretPath: path,
|
||||
value: secretValue,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding
|
||||
});
|
||||
secretValue = expandedSecretValue || "";
|
||||
}
|
||||
|
||||
return reshapeBridgeSecret(projectId, environment, path, {
|
||||
|
@@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretVersionsV2, TSecretVersionsV2Update } from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretVersionV2DALFactory = ReturnType<typeof secretVersionV2BridgeDALFactory>;
|
||||
|
||||
@@ -87,6 +89,7 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 started`);
|
||||
try {
|
||||
await db(TableName.SecretVersionV2)
|
||||
.with("version_cte", (qb) => {
|
||||
@@ -112,6 +115,7 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 completed`);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -196,6 +196,13 @@ export const recursivelyGetSecretPaths = ({
|
||||
return getPaths;
|
||||
};
|
||||
|
||||
// used to convert multi line ones to quotes ones with \n
|
||||
const formatMultiValueEnv = (val?: string) => {
|
||||
if (!val) return "";
|
||||
if (!val.match("\n")) return val;
|
||||
return `"${val.replace(/\n/g, "\\n")}"`;
|
||||
};
|
||||
|
||||
type TInterpolateSecretArg = {
|
||||
projectId: string;
|
||||
secretEncKey: string;
|
||||
@@ -203,162 +210,128 @@ type TInterpolateSecretArg = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
};
|
||||
|
||||
const MAX_SECRET_REFERENCE_DEPTH = 5;
|
||||
const INTERPOLATION_SYNTAX_REG = /\${([^}]+)}/g;
|
||||
export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderDAL }: TInterpolateSecretArg) => {
|
||||
const fetchSecretsCrossEnv = () => {
|
||||
const fetchCache: Record<string, Record<string, string>> = {};
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const getCacheUniqueKey = (environment: string, secretPath: string) => `${environment}-${secretPath}`;
|
||||
|
||||
return async (secRefEnv: string, secRefPath: string[], secRefKey: string) => {
|
||||
const secRefPathUrl = path.join("/", ...secRefPath);
|
||||
const uniqKey = `${secRefEnv}-${secRefPathUrl}`;
|
||||
const fetchSecret = async (environment: string, secretPath: string, secretKey: string) => {
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
const uniqKey = `${environment}-${cacheKey}`;
|
||||
|
||||
if (fetchCache?.[uniqKey]) {
|
||||
return fetchCache[uniqKey][secRefKey];
|
||||
}
|
||||
if (secretCache?.[uniqKey]) {
|
||||
return secretCache[uniqKey][secretKey] || "";
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, secRefEnv, secRefPathUrl);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
|
||||
const decryptedSec = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const decryptedSec = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
const decryptedSecretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const decryptedSecretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
|
||||
// eslint-disable-next-line
|
||||
prev[secretKey] = secretValue;
|
||||
return prev;
|
||||
}, {});
|
||||
// eslint-disable-next-line
|
||||
prev[decryptedSecretKey] = decryptedSecretValue;
|
||||
return prev;
|
||||
}, {});
|
||||
|
||||
fetchCache[uniqKey] = decryptedSec;
|
||||
secretCache[uniqKey] = decryptedSec;
|
||||
|
||||
return fetchCache[uniqKey][secRefKey];
|
||||
};
|
||||
return secretCache[uniqKey][secretKey] || "";
|
||||
};
|
||||
|
||||
const recursivelyExpandSecret = async (
|
||||
expandedSec: Record<string, string>,
|
||||
interpolatedSec: Record<string, string>,
|
||||
fetchCrossEnv: (env: string, secPath: string[], secKey: string) => Promise<string>,
|
||||
recursionChainBreaker: Record<string, boolean>,
|
||||
key: string
|
||||
) => {
|
||||
if (expandedSec?.[key] !== undefined) {
|
||||
return expandedSec[key];
|
||||
}
|
||||
if (recursionChainBreaker?.[key]) {
|
||||
return "";
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
recursionChainBreaker[key] = true;
|
||||
const recursivelyExpandSecret = async ({
|
||||
value,
|
||||
secretPath,
|
||||
environment,
|
||||
depth = 0
|
||||
}: {
|
||||
value?: string;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
depth?: number;
|
||||
}) => {
|
||||
if (!value) return "";
|
||||
if (depth > MAX_SECRET_REFERENCE_DEPTH) return "";
|
||||
|
||||
let interpolatedValue = interpolatedSec[key];
|
||||
if (!interpolatedValue) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`Couldn't find referenced value - ${key}`);
|
||||
return "";
|
||||
}
|
||||
|
||||
const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG);
|
||||
const refs = value.match(INTERPOLATION_SYNTAX_REG);
|
||||
let expandedValue = value;
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
|
||||
if (entities.length === 1) {
|
||||
const val = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
fetchCrossEnv,
|
||||
recursionChainBreaker,
|
||||
interpolationKey
|
||||
);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
}
|
||||
const [secretKey] = entities;
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
let referenceValue = await fetchSecret(environment, secretPath, secretKey);
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referenceValue)) {
|
||||
// eslint-disable-next-line
|
||||
referenceValue = await recursivelyExpandSecret({
|
||||
environment,
|
||||
secretPath,
|
||||
value: referenceValue,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
secretCache[cacheKey][secretKey] = referenceValue;
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referenceValue);
|
||||
}
|
||||
|
||||
if (entities.length > 1) {
|
||||
const secRefEnv = entities[0];
|
||||
const secRefPath = entities.slice(1, entities.length - 1);
|
||||
const secRefKey = entities[entities.length - 1];
|
||||
const secretReferenceEnvironment = entities[0];
|
||||
const secretReferencePath = path.join("/", ...entities.slice(1, entities.length - 1));
|
||||
const secretReferenceKey = entities[entities.length - 1];
|
||||
|
||||
const val = await fetchCrossEnv(secRefEnv, secRefPath, secRefKey);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
// eslint-disable-next-line
|
||||
let referenceValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey);
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referenceValue)) {
|
||||
// eslint-disable-next-line
|
||||
referenceValue = await recursivelyExpandSecret({
|
||||
environment: secretReferenceEnvironment,
|
||||
secretPath: secretReferencePath,
|
||||
value: referenceValue,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath);
|
||||
secretCache[cacheKey][secretReferenceKey] = referenceValue;
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referenceValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
expandedSec[key] = interpolatedValue;
|
||||
return interpolatedValue;
|
||||
return expandedValue;
|
||||
};
|
||||
|
||||
// used to convert multi line ones to quotes ones with \n
|
||||
const formatMultiValueEnv = (val?: string) => {
|
||||
if (!val) return "";
|
||||
if (!val.match("\n")) return val;
|
||||
return `"${val.replace(/\n/g, "\\n")}"`;
|
||||
const expandSecret = async (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
if (!inputSecret.value) return inputSecret.value;
|
||||
|
||||
const shouldExpand = Boolean(inputSecret.value?.match(INTERPOLATION_SYNTAX_REG));
|
||||
if (!shouldExpand) return inputSecret.value;
|
||||
|
||||
const expandedSecretValue = await recursivelyExpandSecret(inputSecret);
|
||||
return inputSecret.skipMultilineEncoding ? formatMultiValueEnv(expandedSecretValue) : expandedSecretValue;
|
||||
};
|
||||
|
||||
const expandSecrets = async (
|
||||
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => {
|
||||
const expandedSec: Record<string, string> = {};
|
||||
const interpolatedSec: Record<string, string> = {};
|
||||
|
||||
const crossSecEnvFetch = fetchSecretsCrossEnv();
|
||||
|
||||
Object.keys(secrets).forEach((key) => {
|
||||
if (secrets[key].value.match(INTERPOLATION_SYNTAX_REG)) {
|
||||
interpolatedSec[key] = secrets[key].value;
|
||||
} else {
|
||||
expandedSec[key] = secrets[key].value;
|
||||
}
|
||||
});
|
||||
|
||||
for (const key of Object.keys(secrets)) {
|
||||
if (expandedSec?.[key]) {
|
||||
// should not do multi line encoding if user has set it to skip
|
||||
// eslint-disable-next-line
|
||||
secrets[key].value = secrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedSec[key])
|
||||
: expandedSec[key];
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
// this is to avoid recursion loop. So the graph should be direct graph rather than cyclic
|
||||
// so for any recursion building if there is an entity two times same key meaning it will be looped
|
||||
const recursionChainBreaker: Record<string, boolean> = {};
|
||||
const expandedVal = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
crossSecEnvFetch,
|
||||
recursionChainBreaker,
|
||||
key
|
||||
);
|
||||
|
||||
// eslint-disable-next-line
|
||||
secrets[key].value = secrets[key].skipMultilineEncoding ? formatMultiValueEnv(expandedVal) : expandedVal;
|
||||
}
|
||||
|
||||
return secrets;
|
||||
};
|
||||
return expandSecrets;
|
||||
return expandSecret;
|
||||
};
|
||||
|
||||
export const decryptSecretRaw = (
|
||||
|
@@ -258,6 +258,7 @@ export const secretQueueFactory = ({
|
||||
const getIntegrationSecretsV2 = async (dto: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
folderId: string;
|
||||
depth: number;
|
||||
decryptor: (value: Buffer | null | undefined) => string;
|
||||
@@ -269,30 +270,36 @@ export const secretQueueFactory = ({
|
||||
);
|
||||
return content;
|
||||
}
|
||||
|
||||
// process secrets in current folder
|
||||
const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId);
|
||||
secrets.forEach((secret) => {
|
||||
const secretKey = secret.key;
|
||||
const secretValue = dto.decryptor(secret.encryptedValue);
|
||||
content[secretKey] = { value: secretValue };
|
||||
|
||||
if (secret.encryptedComment) {
|
||||
const commentValue = dto.decryptor(secret.encryptedComment);
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
});
|
||||
|
||||
const expandSecretReferences = expandSecretReferencesFactory({
|
||||
decryptSecretValue: dto.decryptor,
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
folderDAL,
|
||||
projectId: dto.projectId
|
||||
});
|
||||
// process secrets in current folder
|
||||
const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId);
|
||||
|
||||
await Promise.allSettled(
|
||||
secrets.map(async (secret) => {
|
||||
const secretKey = secret.key;
|
||||
const secretValue = dto.decryptor(secret.encryptedValue);
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment: dto.environment,
|
||||
secretPath: dto.secretPath,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
value: secretValue
|
||||
});
|
||||
content[secretKey] = { value: expandedSecretValue || "" };
|
||||
|
||||
if (secret.encryptedComment) {
|
||||
const commentValue = dto.decryptor(secret.encryptedComment);
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
})
|
||||
);
|
||||
|
||||
await expandSecretReferences(content);
|
||||
// check if current folder has any imports from other folders
|
||||
const secretImports = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
|
||||
|
||||
@@ -329,6 +336,7 @@ export const secretQueueFactory = ({
|
||||
const getIntegrationSecrets = async (dto: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
folderId: string;
|
||||
key: string;
|
||||
depth: number;
|
||||
@@ -341,46 +349,52 @@ export const secretQueueFactory = ({
|
||||
return content;
|
||||
}
|
||||
|
||||
// process secrets in current folder
|
||||
const secrets = await secretDAL.findByFolderId(dto.folderId);
|
||||
secrets.forEach((secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
content[secretKey] = { value: secretValue };
|
||||
|
||||
if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) {
|
||||
const commentValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretCommentCiphertext,
|
||||
iv: secret.secretCommentIV,
|
||||
tag: secret.secretCommentTag,
|
||||
key: dto.key
|
||||
});
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
});
|
||||
|
||||
const expandSecrets = interpolateSecrets({
|
||||
const expandSecretReferences = interpolateSecrets({
|
||||
projectId: dto.projectId,
|
||||
secretEncKey: dto.key,
|
||||
folderDAL,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
await expandSecrets(content);
|
||||
// process secrets in current folder
|
||||
const secrets = await secretDAL.findByFolderId(dto.folderId);
|
||||
await Promise.allSettled(
|
||||
secrets.map(async (secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: dto.key
|
||||
});
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment: dto.environment,
|
||||
secretPath: dto.secretPath,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
value: secretValue
|
||||
});
|
||||
|
||||
content[secretKey] = { value: expandedSecretValue || "" };
|
||||
|
||||
if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) {
|
||||
const commentValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretCommentCiphertext,
|
||||
iv: secret.secretCommentIV,
|
||||
tag: secret.secretCommentTag,
|
||||
key: dto.key
|
||||
});
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
})
|
||||
);
|
||||
|
||||
// check if current folder has any imports from other folders
|
||||
const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
|
||||
@@ -404,7 +418,8 @@ export const secretQueueFactory = ({
|
||||
projectId: dto.projectId,
|
||||
folderId: folder.id,
|
||||
key: dto.key,
|
||||
depth: dto.depth + 1
|
||||
depth: dto.depth + 1,
|
||||
secretPath: dto.secretPath
|
||||
});
|
||||
|
||||
// add the imported secrets to the current folder secrets
|
||||
@@ -686,6 +701,7 @@ export const secretQueueFactory = ({
|
||||
projectId,
|
||||
folderId: folder.id,
|
||||
depth: 1,
|
||||
secretPath,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
|
||||
})
|
||||
: await getIntegrationSecrets({
|
||||
@@ -693,7 +709,8 @@ export const secretQueueFactory = ({
|
||||
projectId,
|
||||
folderId: folder.id,
|
||||
key: botKey as string,
|
||||
depth: 1
|
||||
depth: 1,
|
||||
secretPath
|
||||
});
|
||||
|
||||
for (const integration of toBeSyncedIntegrations) {
|
||||
|
@@ -482,7 +482,7 @@ export const secretServiceFactory = ({
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
// TODO(akhilmhdh-pg): license check, posthog service and snapshot
|
||||
return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment, secretPath: path };
|
||||
};
|
||||
|
||||
@@ -1047,74 +1047,47 @@ export const secretServiceFactory = ({
|
||||
};
|
||||
});
|
||||
|
||||
const expandSecret = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
if (expandSecretReferences) {
|
||||
const expandSecrets = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
const batchSecretsExpand = async (
|
||||
secretBatch: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
secretPath: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}[]
|
||||
) => {
|
||||
// Group secrets by secretPath
|
||||
const secretsByPath: Record<
|
||||
string,
|
||||
{
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}[]
|
||||
> = {};
|
||||
|
||||
secretBatch.forEach((secret) => {
|
||||
if (!secretsByPath[secret.secretPath]) {
|
||||
secretsByPath[secret.secretPath] = [];
|
||||
}
|
||||
secretsByPath[secret.secretPath].push(secret);
|
||||
});
|
||||
|
||||
// Expand secrets for each group
|
||||
for (const secPath in secretsByPath) {
|
||||
if (!Object.hasOwn(secretsByPath, path)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const secretRecord: Record<
|
||||
string,
|
||||
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
|
||||
> = {};
|
||||
secretsByPath[secPath].forEach((decryptedSecret) => {
|
||||
secretRecord[decryptedSecret.secretKey] = {
|
||||
value: decryptedSecret.secretValue,
|
||||
comment: decryptedSecret.secretComment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
};
|
||||
});
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
|
||||
secretsByPath[secPath].forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretRecord[decryptedSecret.secretKey].value;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// expand secrets
|
||||
await batchSecretsExpand(filteredSecrets);
|
||||
|
||||
// expand imports by batch
|
||||
await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets)));
|
||||
const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath);
|
||||
await Promise.allSettled(
|
||||
Object.keys(secretsGroupByPath).map((groupedPath) =>
|
||||
Promise.allSettled(
|
||||
secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecret({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: groupedPath,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.allSettled(
|
||||
processedImports.map((processedImport) =>
|
||||
Promise.allSettled(
|
||||
processedImport.secrets.map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecret({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: path,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
processedImport.secrets[index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -1177,40 +1150,19 @@ export const secretServiceFactory = ({
|
||||
const decryptedSecret = decryptSecretRaw(encryptedSecret, botKey);
|
||||
|
||||
if (expandSecretReferences) {
|
||||
const expandSecrets = interpolateSecrets({
|
||||
const expandSecret = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
const expandSingleSecret = async (secret: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
secretPath: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}) => {
|
||||
const secretRecord: Record<
|
||||
string,
|
||||
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
|
||||
> = {
|
||||
[secret.secretKey]: {
|
||||
value: secret.secretValue,
|
||||
comment: secret.secretComment,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding
|
||||
}
|
||||
};
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
|
||||
// Update the secret with the expanded value
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secret.secretValue = secretRecord[secret.secretKey].value;
|
||||
};
|
||||
|
||||
// Expand the secret
|
||||
await expandSingleSecret(decryptedSecret);
|
||||
const expandedSecretValue = await expandSecret({
|
||||
environment,
|
||||
secretPath: path,
|
||||
value: decryptedSecret.secretValue,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
decryptedSecret.secretValue = expandedSecretValue || "";
|
||||
}
|
||||
|
||||
return decryptedSecret;
|
||||
|
@@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretVersions, TSecretVersionsUpdate } from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretVersionDALFactory = ReturnType<typeof secretVersionDALFactory>;
|
||||
|
||||
@@ -112,6 +114,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v1 started`);
|
||||
try {
|
||||
await db(TableName.SecretVersion)
|
||||
.with("version_cte", (qb) => {
|
||||
@@ -137,6 +140,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v1 completed`);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import tsconfigPaths from "vite-tsconfig-paths"; // only if you are using custom tsconfig paths
|
||||
import path from "path";
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
@@ -15,7 +15,14 @@ export default defineConfig({
|
||||
useAtomics: true,
|
||||
isolate: false
|
||||
}
|
||||
},
|
||||
alias: {
|
||||
"./license-fns": path.resolve(__dirname, "./src/ee/services/license/__mocks__/license-fns")
|
||||
}
|
||||
},
|
||||
plugins: [tsconfigPaths()] // only if you are using custom tsconfig paths,
|
||||
resolve: {
|
||||
alias: {
|
||||
"@app": path.resolve(__dirname, "./src")
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@@ -11,12 +11,25 @@ sinks:
|
||||
config:
|
||||
path: "access-token"
|
||||
templates:
|
||||
- source-path: my-dot-ev-secret-template
|
||||
- template-content: |
|
||||
{{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }}
|
||||
{{- range . }}
|
||||
{{ .Key }}={{ .Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
destination-path: my-dot-env-0.env
|
||||
config:
|
||||
polling-interval: 60s
|
||||
execute:
|
||||
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
|
||||
|
||||
- base64-template-content: e3stIHdpdGggc2VjcmV0ICIyMDJmMDRkNy1lNGNiLTQzZDQtYTI5Mi1lODkzNzEyZDYxZmMiICJkZXYiICIvIiB9fQp7ey0gcmFuZ2UgLiB9fQp7eyAuS2V5IH19PXt7IC5WYWx1ZSB9fQp7ey0gZW5kIH19Cnt7LSBlbmQgfX0=
|
||||
destination-path: my-dot-env.env
|
||||
config:
|
||||
polling-interval: 60s
|
||||
execute:
|
||||
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
|
||||
|
||||
- source-path: my-dot-ev-secret-template1
|
||||
destination-path: my-dot-env-1.env
|
||||
config:
|
||||
|
@@ -95,6 +95,7 @@ type Template struct {
|
||||
SourcePath string `yaml:"source-path"`
|
||||
Base64TemplateContent string `yaml:"base64-template-content"`
|
||||
DestinationPath string `yaml:"destination-path"`
|
||||
TemplateContent string `yaml:"template-content"`
|
||||
|
||||
Config struct { // Configurations for the template
|
||||
PollingInterval string `yaml:"polling-interval"` // How often to poll for changes in the secret
|
||||
@@ -432,6 +433,30 @@ func ProcessBase64Template(templateId int, encodedTemplate string, data interfac
|
||||
return &buf, nil
|
||||
}
|
||||
|
||||
func ProcessLiteralTemplate(templateId int, templateString string, data interface{}, accessToken string, existingEtag string, currentEtag *string, dynamicSecretLeaser *DynamicSecretLeaseManager) (*bytes.Buffer, error) {
|
||||
secretFunction := secretTemplateFunction(accessToken, existingEtag, currentEtag) // TODO: Fix this
|
||||
dynamicSecretFunction := dynamicSecretTemplateFunction(accessToken, dynamicSecretLeaser, templateId)
|
||||
funcs := template.FuncMap{
|
||||
"secret": secretFunction,
|
||||
"dynamic_secret": dynamicSecretFunction,
|
||||
}
|
||||
|
||||
templateName := "literalTemplate"
|
||||
|
||||
tmpl, err := template.New(templateName).Funcs(funcs).Parse(templateString)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := tmpl.Execute(&buf, data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &buf, nil
|
||||
}
|
||||
|
||||
|
||||
type AgentManager struct {
|
||||
accessToken string
|
||||
accessTokenTTL time.Duration
|
||||
@@ -820,6 +845,8 @@ func (tm *AgentManager) MonitorSecretChanges(secretTemplate Template, templateId
|
||||
|
||||
if secretTemplate.SourcePath != "" {
|
||||
processedTemplate, err = ProcessTemplate(templateId, secretTemplate.SourcePath, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases)
|
||||
} else if secretTemplate.TemplateContent != "" {
|
||||
processedTemplate, err = ProcessLiteralTemplate(templateId, secretTemplate.TemplateContent, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases)
|
||||
} else {
|
||||
processedTemplate, err = ProcessBase64Template(templateId, secretTemplate.Base64TemplateContent, nil, token, existingEtag, ¤tEtag, tm.dynamicSecretLeases)
|
||||
}
|
||||
|
@@ -19,10 +19,11 @@ Every new joiner has an onboarding buddy who should ideally be in the the same t
|
||||
1. Join the weekly all-hands meeting. It typically happens on Monday's at 8:30am PT.
|
||||
2. Ship something together on day one – even if tiny! It feels great to hit the ground running, with a development environment all ready to go.
|
||||
3. Check out the [Areas of Responsibility (AoR) Table](https://docs.google.com/spreadsheets/d/1RnXlGFg83Sgu0dh7ycuydsSobmFfI3A0XkGw7vrVxEI/edit?usp=sharing). This is helpful to know who you can ask about particular areas of Infisical. Feel free to add yourself to the areas you'd be most interesting to dive into.
|
||||
4. Read the [Infisical Strategy Doc](https://docs.google.com/document/d/1oy_NP1Q_Zt1oqxLpyNkLIGmhAI3N28AmZq6dDIOONSQ/edit?usp=sharing).
|
||||
4. Read the [Infisical Strategy Doc](https://docs.google.com/document/d/1RaJd3RoS2QpWLFHlgfHaXnHqCCwRt6mCGZkbJ75J_D0/edit?usp=sharing).
|
||||
5. Update your LinkedIn profile with one of [Infisical's official banners](https://drive.google.com/drive/u/0/folders/1oSNWjbpRl9oNYwxM_98IqzKs9fAskrb2) (if you want to). You can also coordinate your social posts in the #marketing Slack channel, so that we can boost it from Infisical's official social media accounts.
|
||||
6. Over the first few weeks, feel free to schedule 1:1s with folks on the team to get to know them a bit better.
|
||||
7. Change your Slack username in the users channel to `[NAME] (Infisical)`.
|
||||
8. Go through the [technical overview](https://infisical.com/docs/internals/overview) of Infisical.
|
||||
9. Request a company credit card (Maidul will be able to help with that).
|
||||
|
||||
|
||||
|
@@ -1,9 +1,9 @@
|
||||
---
|
||||
title: "AWS Elasticahe"
|
||||
description: "Learn how to dynamically generate Redis Database user credentials."
|
||||
title: "AWS ElastiCache"
|
||||
description: "Learn how to dynamically generate AWS ElastiCache user credentials."
|
||||
---
|
||||
|
||||
The Infisical Redis dynamic secret allows you to generate Redis Database credentials on demand based on configured role.
|
||||
The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCache credentials on demand based on configured role.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@@ -38,7 +38,7 @@ The Infisical Redis dynamic secret allows you to generate Redis Database credent
|
||||
|
||||
<Note>
|
||||
New leases may take up-to a couple of minutes before ElastiCache has the chance to complete their configuration.
|
||||
It is recommended to use a retry strategy when establishing new Redis ElastiCache connections.
|
||||
It is recommended to use a retry strategy when establishing new ElastiCache connections.
|
||||
This may prevent errors when trying to use a password that isn't yet live on the targeted ElastiCache cluster.
|
||||
|
||||
While a leasing is being created, you will be unable to create new leases for the same dynamic secret.
|
||||
@@ -51,7 +51,7 @@ The Infisical Redis dynamic secret allows you to generate Redis Database credent
|
||||
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with Redis
|
||||
## Set up Dynamic Secrets with AWS ElastiCache
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
@@ -60,8 +60,8 @@ The Infisical Redis dynamic secret allows you to generate Redis Database credent
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select 'Redis'">
|
||||

|
||||
<Step title="Select 'AWS ElastiCache'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
|
127
docs/documentation/platform/dynamic-secrets/elastic-search.mdx
Normal file
@@ -0,0 +1,127 @@
|
||||
---
|
||||
title: "Elastic Search"
|
||||
description: "Learn how to dynamically generate Elastic Search user credentials."
|
||||
---
|
||||
|
||||
The Infisical Elastic Search dynamic secret allows you to generate Elastic Search credentials on demand based on configured role.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
|
||||
|
||||
1. Create a role with at least `manage_security` and `monitor` permissions.
|
||||
2. Assign the newly created role to your API key or user that you'll use later in the dynamic secret configuration.
|
||||
|
||||
<Note>
|
||||
For testing purposes, you can also use a highly privileged role like `superuser`, that will have full control over the cluster. This is not recommended in production environments following the principle of least privilege.
|
||||
</Note>
|
||||
|
||||
## Set up Dynamic Secrets with Elastic Search
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select 'Elastic Search'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Your Elastic Search host. This is the endpoint that your instance runs on. _(Example: https://your-cluster-ip)_
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="string" required>
|
||||
The port that your Elastic Search instance is running on. _(Example: 9200)_
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Roles" type="string[]" required>
|
||||
The roles that the new user that is created when a lease is provisioned will be assigned to. This is a required field. This defaults to `superuser`, which is highly privileged. It is recommended to create a new role with the least privileges required for the lease.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Authentication Method" type="API Key | Username/Password" required>
|
||||
Select the authentication method you want to use to connect to your Elastic Search instance.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Username" type="string" required>
|
||||
The username of the user that will be used to provision new dynamic secret leases. Only required if you selected the `Username/Password` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
The password of the user that will be used to provision new dynamic secret leases. Only required if you selected the `Username/Password` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="API Key ID" required>
|
||||
The ID of the API key that will be used to provision new dynamic secret leases. Only required if you selected the `API Key` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="API Key" required>
|
||||
The API key that will be used to provision new dynamic secret leases. Only required if you selected the `API Key` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service.
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
|
||||
</Step>
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certificate.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
114
docs/documentation/platform/dynamic-secrets/mongo-atlas.mdx
Normal file
@@ -0,0 +1,114 @@
|
||||
---
|
||||
title: "Mongo Atlas"
|
||||
description: "Learn how to dynamically generate Mongo Atlas Database user credentials."
|
||||
---
|
||||
|
||||
The Infisical Mongo Atlas dynamic secret allows you to generate Mongo Atlas Database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
Create a project scopped API Key with the required permission in your Mongo Atlas following the [official doc](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-a-project).
|
||||
|
||||
<Info>
|
||||
The API Key must have permission to manage users in the project.
|
||||
</Info>
|
||||
|
||||
## Set up Dynamic Secrets with Mongo Atlas
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select Mongo Atlas">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Admin public key" type="string" required>
|
||||
The public key of your generated Atlas API Key. This acts as a username.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Admin private key" type="string" required>
|
||||
The private key of your generated Atlas API Key. This acts as a password.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Group ID" type="number" required>
|
||||
Unique 24-hexadecimal digit string that identifies your project. This is same as project id
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Roles" type="string" required>
|
||||
List that provides the pairings of one role with one applicable database.
|
||||
- **Database Name**: Database to which the user is granted access privileges.
|
||||
- **Collection**: Collection on which this role applies.
|
||||
- **Role Name**: Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.
|
||||
- Enum: `atlasAdmin` `backup` `clusterMonitor` `dbAdmin` `dbAdminAnyDatabase` `enableSharding` `read` `readAnyDatabase` `readWrite` `readWriteAnyDatabase` `<a custom role name>`.
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify Access Scope">
|
||||
List that contains clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances that this database user can access. If omitted, MongoDB Cloud grants the database user access to all the clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances in the project.
|
||||
|
||||

|
||||
- **Label**: Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access.
|
||||
- **Type**: Category of resource that this database user can access.
|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certficate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
@@ -21,7 +21,8 @@ These endpoints are exposed on port 8443 under the .well-known/est path e.g.
|
||||
|
||||
- You need to have an existing [CA hierarchy](/documentation/platform/pki/private-ca).
|
||||
- The client devices need to have a bootstrap/pre-installed certificate.
|
||||
- The client devices must trust the server certificates used by Infisical's EST server. If the devices are new or lack existing trust configurations, you need to manually establish trust for the appropriate certificates. When using Infisical Cloud, this means establishing trust for certificates issued by AWS.
|
||||
- The client devices must trust the server certificates used by Infisical's EST server. If the devices are new or lack existing trust configurations, you need to manually establish trust for the appropriate certificates.
|
||||
- For Infisical Cloud users, the devices must be configured to trust the [Amazon root CA certificates](https://www.amazontrust.com/repository).
|
||||
|
||||
## Guide to configuring EST
|
||||
|
||||
@@ -42,7 +43,7 @@ These endpoints are exposed on port 8443 under the .well-known/est path e.g.
|
||||
4. Once the configuration of enrollment options is completed, a new **EST Label** field appears in the enrollment settings. This is the value to use as label in the URL when configuring the connection of EST clients to Infisical.
|
||||

|
||||
|
||||
For demonstration, the complete URL of the supported EST endpoints will look like the following:
|
||||
The complete URL of the supported EST endpoints will look like the following:
|
||||
|
||||
- https://app.infisical.com:8443/.well-known/est/f110f308-9888-40ab-b228-237b12de8b96/cacerts
|
||||
- https://app.infisical.com:8443/.well-known/est/f110f308-9888-40ab-b228-237b12de8b96/simpleenroll
|
||||
|
Before Width: | Height: | Size: 691 KiB After Width: | Height: | Size: 700 KiB |
Before Width: | Height: | Size: 709 KiB After Width: | Height: | Size: 710 KiB |
Before Width: | Height: | Size: 715 KiB After Width: | Height: | Size: 696 KiB |
BIN
docs/images/platform/dynamic-secrets/advanced-option-atlas.png
Normal file
After Width: | Height: | Size: 18 KiB |
After Width: | Height: | Size: 50 KiB |
After Width: | Height: | Size: 173 KiB |
After Width: | Height: | Size: 76 KiB |
After Width: | Height: | Size: 151 KiB |
After Width: | Height: | Size: 149 KiB |
After Width: | Height: | Size: 188 KiB |
@@ -41,6 +41,13 @@ Prerequisites:
|
||||
</Tab>
|
||||
<Tab title="Organization">
|
||||

|
||||
|
||||
When using the organization scope, your secrets will be saved in the top-level of your Github Organization.
|
||||
|
||||
You can choose the visibility, which defines which repositories can access the secrets. The options are:
|
||||
- **All public repositories**: All public repositories in the organization can access the secrets.
|
||||
- **All private repositories**: All private repositories in the organization can access the secrets.
|
||||
- **Selected repositories**: Only the selected repositories can access the secrets. This gives a more fine-grained control over which repositories can access the secrets. You can select _both_ private and public repositories with this option.
|
||||
</Tab>
|
||||
<Tab title="Repository Environment">
|
||||

|
||||
|
@@ -1,27 +1,30 @@
|
||||
---
|
||||
title: 'Amazon ECS'
|
||||
title: "Amazon ECS"
|
||||
description: "Learn how to deliver secrets to Amazon Elastic Container Service."
|
||||
---
|
||||
|
||||

|
||||
|
||||
This guide will go over the steps needed to access secrets stored in Infisical from Amazon Elastic Container Service (ECS).
|
||||
This guide will go over the steps needed to access secrets stored in Infisical from Amazon Elastic Container Service (ECS).
|
||||
|
||||
At a high level, the steps involve setting up an ECS task with a [Infisical Agent](/infisical-agent/overview) as a sidecar container. This sidecar container uses [Universal Auth](/documentation/platform/identities/universal-auth) to authenticate with Infisical to fetch secrets/access tokens.
|
||||
At a high level, the steps involve setting up an ECS task with an [Infisical Agent](/infisical-agent/overview) as a sidecar container. This sidecar container uses [AWS Auth](/documentation/platform/identities/aws-auth) to authenticate with Infisical to fetch secrets/access tokens.
|
||||
Once the secrets/access tokens are retrieved, they are then stored in a shared [Amazon Elastic File System](https://aws.amazon.com/efs/) (EFS) volume. This volume is then made accessible to your application and all of its replicas.
|
||||
|
||||
This guide primarily focuses on integrating Infisical Cloud with Amazon ECS on AWS Fargate and Amazon EFS.
|
||||
However, the principles and steps can be adapted for use with any instance of Infisical (on premise or cloud) and different ECS launch configurations.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
This guide requires the following prerequisites:
|
||||
- Infisical account
|
||||
|
||||
- Infisical account
|
||||
- Git installed
|
||||
- Terraform v1.0 or later installed
|
||||
- Access to AWS credentials
|
||||
- Understanding of [Infisical Agent](/infisical-agent/overview)
|
||||
|
||||
## What we will deploy
|
||||
|
||||
For this demonstration, we'll deploy the [File Browser](https://github.com/filebrowser/filebrowser) application on our ECS cluster.
|
||||
Although this guide focuses on File Browser, the principles outlined here can be applied to any application of your choice.
|
||||
|
||||
@@ -29,20 +32,20 @@ File Browser plays a key role in this context because it enables us to view all
|
||||
This feature is important for our demonstration, as it allows us to verify whether the Infisical agent is depositing the expected files into the designated file volume and if those files are accessible to the application.
|
||||
|
||||
<Warning>
|
||||
Volumes that contain sensitive secrets should not be publicly accessible. The use of File Browser here is solely for demonstration and verification purposes.
|
||||
Volumes that contain sensitive secrets should not be publicly accessible. The
|
||||
use of File Browser here is solely for demonstration and verification
|
||||
purposes.
|
||||
</Warning>
|
||||
|
||||
|
||||
## Configure Authentication with Infisical
|
||||
In order for the Infisical agent to fetch credentials from Infisical, we'll first need to authenticate with Infisical.
|
||||
While Infisical supports various authentication methods, this guide focuses on using Universal Auth to authenticate the agent with Infisical.
|
||||
|
||||
Follow the documentation to configure and generate a client id and client secret with Universal auth [here](/documentation/platform/identities/universal-auth).
|
||||
Make sure to save these credentials somewhere handy because you'll need them soon.
|
||||
In order for the Infisical agent to fetch credentials from Infisical, we'll first need to authenticate with Infisical. Follow the documentation to configure a machine identity with AWS Auth [here](/documentation/platform/identities/aws-auth).
|
||||
Take note of the Machine Identity ID as you will be needing this in the preceding steps.
|
||||
|
||||
## Clone guide assets repository
|
||||
|
||||
To help you quickly deploy the example application, please clone the guide assets from this [Github repository](https://github.com/Infisical/infisical-guides.git).
|
||||
This repository contains assets for all Infisical guides. The content for this guide can be found within a sub directory called `aws-ecs-with-agent`.
|
||||
This repository contains assets for all Infisical guides. The content for this guide can be found within a sub directory called `aws-ecs-with-agent`.
|
||||
The guide will assume that `aws-ecs-with-agent` is your working directory going forward.
|
||||
|
||||
## Deploy example application
|
||||
@@ -50,95 +53,84 @@ The guide will assume that `aws-ecs-with-agent` is your working directory going
|
||||
Before we can deploy our full application and its related infrastructure with Terraform, we'll need to first configure our Infisical agent.
|
||||
|
||||
### Agent configuration overview
|
||||
|
||||
The agent config file defines what authentication method will be used when connecting with Infisical along with where the fetched secrets/access tokens should be saved to.
|
||||
|
||||
Since the Infisical agent will be deployed as a sidecar, the agent configuration file and any secret template files will need to be encoded in base64.
|
||||
This encoding step is necessary as it allows these files to be added into our Terraform configuration file without needing to upload them first.
|
||||
|
||||
#### Secret template file
|
||||
The Infisical agent accepts one or more optional template files. If provided, the agent will fetch secrets using the set authentication method and format the fetched secrets according to the given template file.
|
||||
|
||||
For demonstration purposes, we will create the following secret template file.
|
||||
This template will transform our secrets from Infisical project with the ID `62fd92aa8b63973fee23dec7`, in the `dev` environment, and secrets located in the path `/`, into a `KEY=VALUE` format.
|
||||
|
||||
<Tip>
|
||||
Remember to update the project id, environment slug and secret path to one that exists within your Infisical project
|
||||
</Tip>
|
||||
|
||||
```secrets.template secrets.template
|
||||
{{- with secret "62fd92aa8b63973fee23dec7" "dev" "/" }}
|
||||
{{- range . }}
|
||||
{{ .Key }}={{ .Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
```
|
||||
|
||||
Next, we need encode this template file in `base64` so it can be set in the agent configuration file.
|
||||
|
||||
```bash
|
||||
cat secrets.template | base64
|
||||
Cnt7LSB3aXRoIHNlY3JldCAiMWVkMjk2MWQtNDM5NS00MmNlLTlkNzQtYjk2ZGQwYmYzMDg0IiAiZGV2IiAiLyIgfX0Ke3stIHJhbmdlIC4gfX0Ke3sgLktleSB9fT17eyAuVmFsdWUgfX0Ke3stIGVuZCB9fQp7ey0gZW5kIH19
|
||||
```
|
||||
Since the Infisical agent will be deployed as a sidecar, the agent configuration file will need to be encoded in base64.
|
||||
This encoding step is necessary as it allows the agent configuration file to be added into our Terraform configuration without needing to upload it first.
|
||||
|
||||
#### Full agent configuration file
|
||||
This agent config file will connect with Infisical Cloud using Universal Auth and deposit access tokens at path `/infisical-agent/access-token` and render secrets to file `/infisical-agent/secrets`.
|
||||
|
||||
You'll notice that instead of passing the path to the secret template file as we normally would, we set the base64 encoded template from the previous step under `base64-template-content` property.
|
||||
Inside the `aws-ecs-with-agent` directory, you will find a sample `agent-config.yaml` file. This agent config file will connect with Infisical Cloud using AWS Auth and deposit access tokens at path `/infisical-agent/access-token` and render secrets to file `/infisical-agent/secrets`.
|
||||
|
||||
```yaml agent-config.yaml
|
||||
infisical:
|
||||
address: https://app.infisical.com
|
||||
exit-after-auth: true
|
||||
auth:
|
||||
type: universal-auth
|
||||
config:
|
||||
remove_client_secret_on_read: false
|
||||
type: aws-iam
|
||||
sinks:
|
||||
- type: file
|
||||
config:
|
||||
path: /infisical-agent/access-token
|
||||
templates:
|
||||
- base64-template-content: Cnt7LSB3aXRoIHNlY3JldCAiMWVkMjk2MWQtNDM5NS00MmNlLTlkNzQtYjk2ZGQwYmYzMDg0IiAiZGV2IiAiLyIgfX0Ke3stIHJhbmdlIC4gfX0Ke3sgLktleSB9fT17eyAuVmFsdWUgfX0Ke3stIGVuZCB9fQp7ey0gZW5kIH19
|
||||
- template-content: |
|
||||
{{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }}
|
||||
{{- range . }}
|
||||
{{ .Key }}={{ .Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
destination-path: /infisical-agent/secrets
|
||||
```
|
||||
|
||||
Again, we'll need to encode the full configuration file in `base64` so it can be easily delivered via Terraform.
|
||||
#### Secret template
|
||||
|
||||
```bash
|
||||
cat agent-config.yaml | base64
|
||||
aW5maXNpY2FsOgogIGFkZHJlc3M6IGh0dHBzOi8vYXBwLmluZmlzaWNhbC5jb20KICBleGl0LWFmdGVyLWF1dGg6IHRydWUKYXV0aDoKICB0eXBlOiB1bml2ZXJzYWwtYXV0aAogIGNvbmZpZzoKICAgIHJlbW92ZV9jbGllbnRfc2VjcmV0X29uX3JlYWQ6IGZhbHNlCnNpbmtzOgogIC0gdHlwZTogZmlsZQogICAgY29uZmlnOgogICAgICBwYXRoOiAvaW5maXNpY2FsLWFnZW50L2FjY2Vzcy10b2tlbgp0ZW1wbGF0ZXM6CiAgLSBiYXNlNjQtdGVtcGxhdGUtY29udGVudDogQ250N0xTQjNhWFJvSUhObFkzSmxkQ0FpTVdWa01qazJNV1F0TkRNNU5TMDBNbU5sTFRsa056UXRZamsyWkdRd1ltWXpNRGcwSWlBaVpHVjJJaUFpTHlJZ2ZYMEtlM3N0SUhKaGJtZGxJQzRnZlgwS2Uzc2dMa3RsZVNCOWZUMTdleUF1Vm1Gc2RXVWdmWDBLZTNzdElHVnVaQ0I5ZlFwN2V5MGdaVzVrSUgxOQogICAgZGVzdGluYXRpb24tcGF0aDogL2luZmlzaWNhbC1hZ2VudC9zZWNyZXRzCg==
|
||||
```
|
||||
The Infisical agent accepts one or more optional templates. If provided, the agent will fetch secrets using the set authentication method and format the fetched secrets according to the given template.
|
||||
Typically, these templates are passed in to the agent configuration file via file reference using the `source-path` property but for simplicity we define them inline.
|
||||
|
||||
## Add auth credentials & agent config
|
||||
With the base64 encoded agent config file and Universal Auth credentials in hand, it's time to assign them as values in our Terraform config file.
|
||||
In the agent configuration above, the template defined will transform the secrets from Infisical project with the ID `202f04d7-e4cb-43d4-a292-e893712d61fc`, in the `dev` environment, and secrets located in the path `/`, into a `KEY=VALUE` format.
|
||||
|
||||
To configure these values, navigate to the `ecs.tf` file in your preferred code editor and assign values to `auth_client_id`, `auth_client_secret`, and `agent_config`.
|
||||
<Tip>
|
||||
Remember to update the project id, environment slug and secret path to one
|
||||
that exists within your Infisical project
|
||||
</Tip>
|
||||
|
||||
## Configure app on terraform
|
||||
|
||||
Navigate to the `ecs.tf` file in your preferred code editor. In the container_definitions section, assign the values to the `machine_identity_id` and `agent_config` properties.
|
||||
The `agent_config` property expects the base64-encoded agent configuration file. In order to get this, we use the `base64encode` and `file` functions of HCL.
|
||||
|
||||
```hcl ecs.tf
|
||||
...snip...
|
||||
data "template_file" "cb_app" {
|
||||
template = file("./templates/ecs/cb_app.json.tpl")
|
||||
|
||||
vars = {
|
||||
app_image = var.app_image
|
||||
sidecar_image = var.sidecar_image
|
||||
app_port = var.app_port
|
||||
fargate_cpu = var.fargate_cpu
|
||||
fargate_memory = var.fargate_memory
|
||||
aws_region = var.aws_region
|
||||
auth_client_id = "<paste-client-id-string>"
|
||||
auth_client_secret = "<paset-client-secret-string>"
|
||||
agent_config = "<paste-base64-encoded-agent-config-string>"
|
||||
resource "aws_ecs_task_definition" "app" {
|
||||
family = "cb-app-task"
|
||||
execution_role_arn = aws_iam_role.ecs_task_execution_role.arn
|
||||
task_role_arn = aws_iam_role.ecs_task_role.arn
|
||||
network_mode = "awsvpc"
|
||||
requires_compatibilities = ["FARGATE"]
|
||||
cpu = 4096
|
||||
memory = 8192
|
||||
container_definitions = templatefile("./templates/ecs/cb_app.json.tpl", {
|
||||
app_image = var.app_image
|
||||
sidecar_image = var.sidecar_image
|
||||
app_port = var.app_port
|
||||
fargate_cpu = var.fargate_cpu
|
||||
fargate_memory = var.fargate_memory
|
||||
aws_region = var.aws_region
|
||||
machine_identity_id = "5655f4f5-332b-45f9-af06-8f493edff36f"
|
||||
agent_config = base64encode(file("../agent-config.yaml"))
|
||||
})
|
||||
volume {
|
||||
name = "infisical-efs"
|
||||
efs_volume_configuration {
|
||||
file_system_id = aws_efs_file_system.infisical_efs.id
|
||||
root_directory = "/"
|
||||
}
|
||||
}
|
||||
}
|
||||
...snip...
|
||||
```
|
||||
|
||||
<Warning>
|
||||
To keep this guide simple, `auth_client_id`, `auth_client_secret` have been added directly into the ECS container definition.
|
||||
However, in production, you should securely fetch these values from AWS Secrets Manager or AWS Parameter store and feed them directly to agent sidecar.
|
||||
</Warning>
|
||||
|
||||
After these values have been set, they will be passed to the Infisical agent during startup through environment variables, as configured in the `infisical-sidecar` container below.
|
||||
|
||||
```terraform templates/ecs/cb_app.json.tpl
|
||||
@@ -169,12 +161,8 @@ After these values have been set, they will be passed to the Infisical agent dur
|
||||
},
|
||||
"environment": [
|
||||
{
|
||||
"name": "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID",
|
||||
"value": "${auth_client_id}"
|
||||
},
|
||||
{
|
||||
"name": "INFISICAL_UNIVERSAL_CLIENT_SECRET",
|
||||
"value": "${auth_client_secret}"
|
||||
"name": "INFISICAL_MACHINE_IDENTITY_ID",
|
||||
"value": "${machine_identity_id}"
|
||||
},
|
||||
{
|
||||
"name": "INFISICAL_AGENT_CONFIG_BASE64",
|
||||
@@ -191,9 +179,9 @@ After these values have been set, they will be passed to the Infisical agent dur
|
||||
]
|
||||
```
|
||||
|
||||
In the above container definition, you'll notice that that the Infisical agent has a `mountPoints` defined.
|
||||
This mount point is referencing to the already configured EFS volume as shown below.
|
||||
`containerPath` is set to `/infisical-agent` because that is that the folder we have instructed the agent to deposit the credentials to.
|
||||
In the above container definition, you'll notice that that the Infisical agent has a `mountPoints` defined.
|
||||
This mount point is referencing to the already configured EFS volume as shown below.
|
||||
`containerPath` is set to `/infisical-agent` because that is that the folder we have instructed the agent to deposit the credentials to.
|
||||
|
||||
```hcl terraform/efs.tf
|
||||
resource "aws_efs_file_system" "infisical_efs" {
|
||||
@@ -211,8 +199,9 @@ resource "aws_efs_mount_target" "mount" {
|
||||
```
|
||||
|
||||
## Configure AWS credentials
|
||||
|
||||
Because we'll be deploying the example file browser application to AWS via Terraform, you will need to obtain a set of `AWS Access Key` and `Secret Key`.
|
||||
Once you have generated these credentials, export them to your terminal.
|
||||
Once you have generated these credentials, export them to your terminal.
|
||||
|
||||
1. Export the AWS Access Key ID:
|
||||
|
||||
@@ -227,26 +216,31 @@ Once you have generated these credentials, export them to your terminal.
|
||||
```
|
||||
|
||||
## Deploy terraform configuration
|
||||
|
||||
With the agent's sidecar configuration complete, we can now deploy our changes to AWS via Terraform.
|
||||
|
||||
1. Change your directory to `terraform`
|
||||
```sh
|
||||
|
||||
```sh
|
||||
cd terraform
|
||||
```
|
||||
|
||||
2. Initialize Terraform
|
||||
|
||||
```
|
||||
$ terraform init
|
||||
$ terraform init
|
||||
```
|
||||
|
||||
3. Preview resources that will be created
|
||||
3. Preview resources that will be created
|
||||
|
||||
```
|
||||
$ terraform plan
|
||||
```
|
||||
|
||||
4. Trigger resource creation
|
||||
|
||||
```bash
|
||||
$ terraform apply
|
||||
$ terraform apply
|
||||
|
||||
Do you want to perform these actions?
|
||||
Terraform will perform the actions described above.
|
||||
@@ -264,24 +258,24 @@ Outputs:
|
||||
alb_hostname = "cb-load-balancer-1675475779.us-east-1.elb.amazonaws.com:8080"
|
||||
```
|
||||
|
||||
Once the resources have been successfully deployed, Terrafrom will output the host address where the file browser application will be accessible.
|
||||
It may take a few minutes for the application to become fully ready.
|
||||
|
||||
Once the resources have been successfully deployed, Terraform will output the host address where the file browser application will be accessible.
|
||||
It may take a few minutes for the application to become fully ready.
|
||||
|
||||
## Verify secrets/tokens in EFS volume
|
||||
|
||||
To verify that the agent is depositing access tokens and rendering secrets to the paths specified in the agent config, navigate to the web address from the previous step.
|
||||
Once you visit the address, you'll be prompted to login. Enter the credentials shown below.
|
||||
|
||||

|
||||
|
||||
Since our EFS volume is mounted to the path of the file browser application, we should see the access token and rendered secret file we defined via the agent config file.
|
||||
Since our EFS volume is mounted to the path of the file browser application, we should see the access token and rendered secret file we defined via the agent config file.
|
||||
|
||||

|
||||
|
||||
As expected, two files are present: `access-token` and `secrets`.
|
||||
The `access-token` file should hold a valid `Bearer` token, which can be used to make HTTP requests to Infisical.
|
||||
As expected, two files are present: `access-token` and `secrets`.
|
||||
The `access-token` file should hold a valid `Bearer` token, which can be used to make HTTP requests to Infisical.
|
||||
The `secrets` file should contain secrets, formatted according to the specifications in our secret template file (presented in key=value format).
|
||||
|
||||

|
||||
|
||||

|
||||

|
||||
|
@@ -9,56 +9,60 @@ It eliminates the need to modify application logic by enabling clients to decide
|
||||

|
||||
|
||||
### Key features:
|
||||
|
||||
- Token renewal: Automatically authenticates with Infisical and deposits renewed access tokens at specified path for applications to consume
|
||||
- Templating: Renders secrets via user provided templates to desired formats for applications to consume
|
||||
|
||||
### Token renewal
|
||||
|
||||
The Infisical agent can help manage the life cycle of access tokens. The token renewal process is split into two main components: a `Method`, which is the authentication process suitable for your current setup, and `Sinks`, which are the places where the agent deposits the new access token whenever it receives updates.
|
||||
|
||||
When the Infisical Agent is started, it will attempt to obtain a valid access token using the authentication method you have configured. If the agent is unable to fetch a valid token, the agent will keep trying, increasing the time between each attempt.
|
||||
When the Infisical Agent is started, it will attempt to obtain a valid access token using the authentication method you have configured. If the agent is unable to fetch a valid token, the agent will keep trying, increasing the time between each attempt.
|
||||
|
||||
Once a access token is successfully fetched, the agent will make sure the access token stays valid, continuing to renew it before it expires.
|
||||
|
||||
Every time the agent successfully retrieves a new access token, it writes the new token to the Sinks you've configured.
|
||||
|
||||
<Info>
|
||||
Access tokens can be utilized with Infisical SDKs or directly in API requests to retrieve secrets from Infisical
|
||||
Access tokens can be utilized with Infisical SDKs or directly in API requests
|
||||
to retrieve secrets from Infisical
|
||||
</Info>
|
||||
|
||||
### Templating
|
||||
The Infisical agent can help deliver formatted secrets to your application in a variety of environments. To achieve this, the agent will retrieve secrets from Infisical, format them using a specified template, and then save these formatted secrets to a designated file path.
|
||||
|
||||
Templating process is done through the use of Go language's [text/template feature](https://pkg.go.dev/text/template). Multiple template definitions can be set in the agent configuration file to generate a variety of formatted secret files.
|
||||
The Infisical agent can help deliver formatted secrets to your application in a variety of environments. To achieve this, the agent will retrieve secrets from Infisical, format them using a specified template, and then save these formatted secrets to a designated file path.
|
||||
|
||||
When the agent is started and templates are defined in the agent configuration file, the agent will attempt to acquire a valid access token using the set authentication method outlined in the agent's configuration.
|
||||
Templating process is done through the use of Go language's [text/template feature](https://pkg.go.dev/text/template).You can refer to the available secret template functions [here](#available-secret-template-functions). Multiple template definitions can be set in the agent configuration file to generate a variety of formatted secret files.
|
||||
|
||||
When the agent is started and templates are defined in the agent configuration file, the agent will attempt to acquire a valid access token using the set authentication method outlined in the agent's configuration.
|
||||
If this initial attempt is unsuccessful, the agent will momentarily pauses before continuing to make more attempts.
|
||||
|
||||
Once the agent successfully obtains a valid access token, the agent proceeds to fetch the secrets from Infisical using it.
|
||||
Once the agent successfully obtains a valid access token, the agent proceeds to fetch the secrets from Infisical using it.
|
||||
It then formats these secrets using the user provided templates and writes the formatted data to configured file paths.
|
||||
|
||||
## Agent configuration file
|
||||
## Agent configuration file
|
||||
|
||||
To set up the authentication method for token renewal and to define secret templates, the Infisical agent requires a YAML configuration file containing properties defined below.
|
||||
To set up the authentication method for token renewal and to define secret templates, the Infisical agent requires a YAML configuration file containing properties defined below.
|
||||
While specifying an authentication method is mandatory to start the agent, configuring sinks and secret templates are optional.
|
||||
|
||||
| Field | Description |
|
||||
| ------------------------------------------------| ----------------------------- |
|
||||
| `infisical.address` | The URL of the Infisical service. Default: `"https://app.infisical.com"`. |
|
||||
| `auth.type` | The type of authentication method used. Available options: `universal-auth`, `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, `aws-iam`|
|
||||
| `auth.config.identity-id` | The file path where the machine identity id is stored<br/><br/>This field is required when using any of the following auth types: `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, or `aws-iam`. |
|
||||
| `auth.config.service-account-token` | Path to the Kubernetes service account token to use (optional)<br/><br/>Default: `/var/run/secrets/kubernetes.io/serviceaccount/token` |
|
||||
| `auth.config.service-account-key` | Path to your GCP service account key file. This field is required when using `gcp-iam` auth type.<br/><br/>Please note that the file should be in JSON format. |
|
||||
| `auth.config.client-id` | The file path where the universal-auth client id is stored. |
|
||||
| `auth.config.client-secret` | The file path where the universal-auth client secret is stored. |
|
||||
| `auth.config.remove_client_secret_on_read` | This will instruct the agent to remove the client secret from disk. |
|
||||
| `sinks[].type` | The type of sink in a list of sinks. Each item specifies a sink type. Currently, only `"file"` type is available. |
|
||||
| `sinks[].config.path` | The file path where the access token should be stored for each sink in the list. |
|
||||
| `templates[].source-path` | The path to the template file that should be used to render secrets. |
|
||||
| `templates[].destination-path` | The path where the rendered secrets from the source template will be saved to. |
|
||||
| `templates[].config.polling-interval` | How frequently to check for secret changes. Default: `5 minutes` (optional) |
|
||||
| `templates[].config.execute.command` | The command to execute when secret change is detected (optional) |
|
||||
| `templates[].config.execute.timeout` | How long in seconds to wait for command to execute before timing out (optional) |
|
||||
|
||||
| Field | Description |
|
||||
| ------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `infisical.address` | The URL of the Infisical service. Default: `"https://app.infisical.com"`. |
|
||||
| `auth.type` | The type of authentication method used. Available options: `universal-auth`, `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, `aws-iam` |
|
||||
| `auth.config.identity-id` | The file path where the machine identity id is stored<br/><br/>This field is required when using any of the following auth types: `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, or `aws-iam`. |
|
||||
| `auth.config.service-account-token` | Path to the Kubernetes service account token to use (optional)<br/><br/>Default: `/var/run/secrets/kubernetes.io/serviceaccount/token` |
|
||||
| `auth.config.service-account-key` | Path to your GCP service account key file. This field is required when using `gcp-iam` auth type.<br/><br/>Please note that the file should be in JSON format. |
|
||||
| `auth.config.client-id` | The file path where the universal-auth client id is stored. |
|
||||
| `auth.config.client-secret` | The file path where the universal-auth client secret is stored. |
|
||||
| `auth.config.remove_client_secret_on_read` | This will instruct the agent to remove the client secret from disk. |
|
||||
| `sinks[].type` | The type of sink in a list of sinks. Each item specifies a sink type. Currently, only `"file"` type is available. |
|
||||
| `sinks[].config.path` | The file path where the access token should be stored for each sink in the list. |
|
||||
| `templates[].source-path` | The path to the template file that should be used to render secrets. |
|
||||
| `templates[].template-content` | The inline secret template to be used for rendering the secrets. |
|
||||
| `templates[].destination-path` | The path where the rendered secrets from the source template will be saved to. |
|
||||
| `templates[].config.polling-interval` | How frequently to check for secret changes. Default: `5 minutes` (optional) |
|
||||
| `templates[].config.execute.command` | The command to execute when secret change is detected (optional) |
|
||||
| `templates[].config.execute.timeout` | How long in seconds to wait for command to execute before timing out (optional) |
|
||||
|
||||
## Authentication
|
||||
|
||||
@@ -68,19 +72,20 @@ The Infisical agent supports multiple authentication methods. Below are the avai
|
||||
<Accordion title="Universal Auth">
|
||||
The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical.
|
||||
|
||||
<ParamField query="config" type="UniversalAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="client-id" type="string" required>
|
||||
Path to the file containing the universal auth client ID.
|
||||
</ParamField>
|
||||
<ParamField query="client-secret" type="string" required>
|
||||
Path to the file containing the universal auth client secret.
|
||||
</ParamField>
|
||||
<ParamField query="remove_client_secret_on_read" type="boolean" optional>
|
||||
Instructs the agent to remove the client secret from disk after reading it.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
<ParamField query="config" type="UniversalAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="client-id" type="string" required>
|
||||
Path to the file containing the universal auth client ID.
|
||||
</ParamField>
|
||||
<ParamField query="client-secret" type="string" required>
|
||||
Path to the file containing the universal auth client secret.
|
||||
</ParamField>
|
||||
<ParamField query="remove_client_secret_on_read" type="boolean" optional>
|
||||
Instructs the agent to remove the client secret from disk after reading
|
||||
it.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a universal auth machine identity">
|
||||
@@ -98,21 +103,25 @@ The Infisical agent supports multiple authentication methods. Below are the avai
|
||||
remove_client_secret_on_read: false # Optional field, instructs the agent to remove the client secret from disk after reading it
|
||||
```
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Native Kubernetes">
|
||||
The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical.
|
||||
|
||||
<ParamField query="config" type="KubernetesAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="identity-id" type="string" required>
|
||||
Path to the file containing the machine identity ID.
|
||||
</ParamField>
|
||||
<ParamField query="service-account-token" type="string" optional>
|
||||
Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
{" "}
|
||||
|
||||
<ParamField query="config" type="KubernetesAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="identity-id" type="string" required>
|
||||
Path to the file containing the machine identity ID.
|
||||
</ParamField>
|
||||
<ParamField query="service-account-token" type="string" optional>
|
||||
Path to the Kubernetes service account token to use. Default:
|
||||
`/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a Kubernetes machine identity">
|
||||
@@ -129,6 +138,7 @@ The Infisical agent supports multiple authentication methods. Below are the avai
|
||||
service-account-token: "/var/run/secrets/kubernetes.io/serviceaccount/token" # Optional field, custom path to the Kubernetes service account token to use
|
||||
```
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
@@ -186,6 +196,7 @@ The Infisical agent supports multiple authentication methods. Below are the avai
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="GCP IAM">
|
||||
The GCP IAM method is used to authenticate with Infisical with a GCP service account key.
|
||||
@@ -217,6 +228,7 @@ The Infisical agent supports multiple authentication methods. Below are the avai
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="Native AWS IAM">
|
||||
The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc.
|
||||
@@ -244,10 +256,12 @@ The Infisical agent supports multiple authentication methods. Below are the avai
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
## Quick start Infisical Agent
|
||||
|
||||
To install the Infisical agent, you must first install the [Infisical CLI](../cli/overview) in the desired environment where you'd like the agent to run. This is because the Infisical agent is a sub-command of the Infisical CLI.
|
||||
|
||||
Once you have the CLI installed, you will need to provision programmatic access for the agent via [Universal Auth](/documentation/platform/identities/universal-auth). To obtain a **Client ID** and a **Client Secret**, follow the step by step guide outlined [here](/documentation/platform/identities/universal-auth).
|
||||
@@ -277,8 +291,8 @@ templates:
|
||||
command: ./reload-app.sh
|
||||
```
|
||||
|
||||
The secret template below will be used to render the secrets with the key and the value separated by `=` sign. You'll notice that a custom function named `secret` is used to fetch the secrets.
|
||||
This function takes the following arguments: `secret "<project-id>" "<environment-slug>" "<secret-path>"`.
|
||||
The secret template below will be used to render the secrets with the key and the value separated by `=` sign. You'll notice that a custom function named `secret` is used to fetch the secrets.
|
||||
This function takes the following arguments: `secret "<project-id>" "<environment-slug>" "<secret-path>"`.
|
||||
|
||||
```text my-dot-ev-secret-template
|
||||
{{- with secret "6553ccb2b7da580d7f6e7260" "dev" "/" }}
|
||||
@@ -290,13 +304,12 @@ This function takes the following arguments: `secret "<project-id>" "<environmen
|
||||
|
||||
After defining the agent configuration file, run the command below pointing to the path where the agent configuration file is located.
|
||||
|
||||
|
||||
```bash
|
||||
```bash
|
||||
infisical agent --config example-agent-config-file.yaml
|
||||
```
|
||||
|
||||
|
||||
### Available secret template functions
|
||||
|
||||
<Accordion title="listSecrets">
|
||||
```bash
|
||||
listSecrets "<project-id>" "environment-slug" "<secret-path>"
|
||||
@@ -309,11 +322,12 @@ infisical agent --config example-agent-config-file.yaml
|
||||
{{- end }}
|
||||
```
|
||||
|
||||
**Function name**: listSecrets
|
||||
**Function name**: listSecrets
|
||||
|
||||
**Description**: This function can be used to render the full list of secrets within a given project, environment and secret path.
|
||||
**Description**: This function can be used to render the full list of secrets within a given project, environment and secret path.
|
||||
|
||||
**Returns**: A single secret object with the following keys `Key, WorkspaceId, Value, Type, ID, and Comment`
|
||||
|
||||
**Returns**: A single secret object with the following keys `Key, WorkspaceId, Value, Type, ID, and Comment`
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="getSecretByName">
|
||||
@@ -321,18 +335,18 @@ infisical agent --config example-agent-config-file.yaml
|
||||
getSecretByName "<project-id>" "<environment-slug>" "<secret-path>" "<secret-name>"
|
||||
```
|
||||
|
||||
```bash example-template-usage
|
||||
{{ with getSecretByName "d821f21d-aa90-453b-8448-8c78c1160a0e" "dev" "/" "POSTHOG_HOST"}}
|
||||
{{ if .Value }}
|
||||
password = "{{ .Value }}"
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
```
|
||||
```bash example-template-usage
|
||||
{{ with getSecretByName "d821f21d-aa90-453b-8448-8c78c1160a0e" "dev" "/" "POSTHOG_HOST"}}
|
||||
{{ if .Value }}
|
||||
password = "{{ .Value }}"
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
```
|
||||
|
||||
**Function name**: getSecretByName
|
||||
**Function name**: getSecretByName
|
||||
|
||||
**Description**: This function can be used to render a single secret by it's name.
|
||||
**Description**: This function can be used to render a single secret by it's name.
|
||||
|
||||
**Returns**: A list of secret objects with the following keys `Key, WorkspaceId, Value, Type, ID, and Comment`
|
||||
**Returns**: A list of secret objects with the following keys `Key, WorkspaceId, Value, Type, ID, and Comment`
|
||||
|
||||
</Accordion>
|
||||
|
@@ -156,7 +156,9 @@
|
||||
"documentation/platform/dynamic-secrets/cassandra",
|
||||
"documentation/platform/dynamic-secrets/redis",
|
||||
"documentation/platform/dynamic-secrets/aws-elasticache",
|
||||
"documentation/platform/dynamic-secrets/aws-iam"
|
||||
"documentation/platform/dynamic-secrets/elastic-search",
|
||||
"documentation/platform/dynamic-secrets/aws-iam",
|
||||
"documentation/platform/dynamic-secrets/mongo-atlas"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
187
frontend/package-lock.json
generated
@@ -4,6 +4,7 @@
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "frontend",
|
||||
"dependencies": {
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@casl/react": "^3.1.0",
|
||||
@@ -84,6 +85,7 @@
|
||||
"react-grid-layout": "^1.3.4",
|
||||
"react-hook-form": "^7.43.0",
|
||||
"react-i18next": "^12.2.2",
|
||||
"react-icons": "^5.3.0",
|
||||
"react-mailchimp-subscribe": "^2.1.3",
|
||||
"react-markdown": "^8.0.3",
|
||||
"react-redux": "^8.0.2",
|
||||
@@ -8609,26 +8611,6 @@
|
||||
"integrity": "sha512-AjwI4MvWx3HAOaZqYsjKWyEObT9lcVV0Y0V8nXo6cXzN8ZiMxVhf6F3d/UNvXVGKrEzL/Dluc5p+y9GkzlTWig==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/eslint": {
|
||||
"version": "8.56.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.2.tgz",
|
||||
"integrity": "sha512-uQDwm1wFHmbBbCZCqAlq6Do9LYwByNZHWzXppSnay9SuwJ+VRbjkbLABer54kcPnMSlG6Fdiy2yaFXm/z9Z5gw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/estree": "*",
|
||||
"@types/json-schema": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/eslint-scope": {
|
||||
"version": "3.7.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz",
|
||||
"integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/eslint": "*",
|
||||
"@types/estree": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
"version": "0.0.51",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz",
|
||||
@@ -9298,9 +9280,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/ast": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz",
|
||||
"integrity": "sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz",
|
||||
"integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/helper-numbers": "1.11.6",
|
||||
@@ -9320,9 +9302,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/helper-buffer": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz",
|
||||
"integrity": "sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz",
|
||||
"integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/helper-numbers": {
|
||||
@@ -9343,15 +9325,15 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/helper-wasm-section": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz",
|
||||
"integrity": "sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz",
|
||||
"integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/helper-buffer": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-buffer": "1.12.1",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
|
||||
"@webassemblyjs/wasm-gen": "1.11.6"
|
||||
"@webassemblyjs/wasm-gen": "1.12.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/ieee754": {
|
||||
@@ -9379,28 +9361,28 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/wasm-edit": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz",
|
||||
"integrity": "sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz",
|
||||
"integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/helper-buffer": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-buffer": "1.12.1",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
|
||||
"@webassemblyjs/helper-wasm-section": "1.11.6",
|
||||
"@webassemblyjs/wasm-gen": "1.11.6",
|
||||
"@webassemblyjs/wasm-opt": "1.11.6",
|
||||
"@webassemblyjs/wasm-parser": "1.11.6",
|
||||
"@webassemblyjs/wast-printer": "1.11.6"
|
||||
"@webassemblyjs/helper-wasm-section": "1.12.1",
|
||||
"@webassemblyjs/wasm-gen": "1.12.1",
|
||||
"@webassemblyjs/wasm-opt": "1.12.1",
|
||||
"@webassemblyjs/wasm-parser": "1.12.1",
|
||||
"@webassemblyjs/wast-printer": "1.12.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/wasm-gen": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz",
|
||||
"integrity": "sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz",
|
||||
"integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
|
||||
"@webassemblyjs/ieee754": "1.11.6",
|
||||
"@webassemblyjs/leb128": "1.11.6",
|
||||
@@ -9408,24 +9390,24 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/wasm-opt": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz",
|
||||
"integrity": "sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz",
|
||||
"integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/helper-buffer": "1.11.6",
|
||||
"@webassemblyjs/wasm-gen": "1.11.6",
|
||||
"@webassemblyjs/wasm-parser": "1.11.6"
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-buffer": "1.12.1",
|
||||
"@webassemblyjs/wasm-gen": "1.12.1",
|
||||
"@webassemblyjs/wasm-parser": "1.12.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/wasm-parser": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz",
|
||||
"integrity": "sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz",
|
||||
"integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-api-error": "1.11.6",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
|
||||
"@webassemblyjs/ieee754": "1.11.6",
|
||||
@@ -9434,12 +9416,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/wast-printer": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz",
|
||||
"integrity": "sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz",
|
||||
"integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@xtuc/long": "4.2.2"
|
||||
}
|
||||
},
|
||||
@@ -12933,9 +12915,9 @@
|
||||
"integrity": "sha512-JGmudTwg7yxMYvR/gWbalqqQiyu7WTFv2Xu3vw4cJHXPFxNgAk0oy8UHaer8nLF4lZJa+rNoj6GsrKIVJTV6Tw=="
|
||||
},
|
||||
"node_modules/elliptic": {
|
||||
"version": "6.5.4",
|
||||
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz",
|
||||
"integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==",
|
||||
"version": "6.5.7",
|
||||
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.7.tgz",
|
||||
"integrity": "sha512-ESVCtTwiA+XhY3wyh24QqRGBoP3rEdDUl3EDUUo9tft074fi19IrdpH7hLCMMP3CIj7jb3W96rn8lt/BqIlt5Q==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"bn.js": "^4.11.9",
|
||||
@@ -12998,9 +12980,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/enhanced-resolve": {
|
||||
"version": "5.15.0",
|
||||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz",
|
||||
"integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==",
|
||||
"version": "5.17.1",
|
||||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz",
|
||||
"integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.4",
|
||||
@@ -15867,11 +15849,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/infisical-node/node_modules/axios": {
|
||||
"version": "1.6.5",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.5.tgz",
|
||||
"integrity": "sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==",
|
||||
"version": "1.7.5",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.5.tgz",
|
||||
"integrity": "sha512-fZu86yCo+svH3uqJ/yTdQ0QHpQu5oL+/QE+QPSv6BZSkDAoky9vytxp7u5qk83OJFS3kEBcesWni9WTZAv3tSw==",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.4",
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
@@ -18133,12 +18115,12 @@
|
||||
]
|
||||
},
|
||||
"node_modules/micromatch": {
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
|
||||
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
|
||||
"version": "4.0.8",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
|
||||
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"braces": "^3.0.2",
|
||||
"braces": "^3.0.3",
|
||||
"picomatch": "^2.3.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -20968,6 +20950,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/react-icons": {
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.3.0.tgz",
|
||||
"integrity": "sha512-DnUk8aFbTyQPSkCfF8dbX6kQjXA9DktMeJqfjrg6cK9vwQVMxmcA3BfP4QoiztVmEHtwlTgLFsPuH2NskKT6eg==",
|
||||
"peerDependencies": {
|
||||
"react": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/react-is": {
|
||||
"version": "18.2.0",
|
||||
"resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz",
|
||||
@@ -24479,9 +24469,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/watchpack": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz",
|
||||
"integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==",
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz",
|
||||
"integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"glob-to-regexp": "^0.4.1",
|
||||
@@ -24506,34 +24496,33 @@
|
||||
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
|
||||
},
|
||||
"node_modules/webpack": {
|
||||
"version": "5.89.0",
|
||||
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.89.0.tgz",
|
||||
"integrity": "sha512-qyfIC10pOr70V+jkmud8tMfajraGCZMBWJtrmuBymQKCrLTRejBI8STDp1MCyZu/QTdZSeacCQYpYNQVOzX5kw==",
|
||||
"version": "5.94.0",
|
||||
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz",
|
||||
"integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/eslint-scope": "^3.7.3",
|
||||
"@types/estree": "^1.0.0",
|
||||
"@webassemblyjs/ast": "^1.11.5",
|
||||
"@webassemblyjs/wasm-edit": "^1.11.5",
|
||||
"@webassemblyjs/wasm-parser": "^1.11.5",
|
||||
"@types/estree": "^1.0.5",
|
||||
"@webassemblyjs/ast": "^1.12.1",
|
||||
"@webassemblyjs/wasm-edit": "^1.12.1",
|
||||
"@webassemblyjs/wasm-parser": "^1.12.1",
|
||||
"acorn": "^8.7.1",
|
||||
"acorn-import-assertions": "^1.9.0",
|
||||
"browserslist": "^4.14.5",
|
||||
"acorn-import-attributes": "^1.9.5",
|
||||
"browserslist": "^4.21.10",
|
||||
"chrome-trace-event": "^1.0.2",
|
||||
"enhanced-resolve": "^5.15.0",
|
||||
"enhanced-resolve": "^5.17.1",
|
||||
"es-module-lexer": "^1.2.1",
|
||||
"eslint-scope": "5.1.1",
|
||||
"events": "^3.2.0",
|
||||
"glob-to-regexp": "^0.4.1",
|
||||
"graceful-fs": "^4.2.9",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"json-parse-even-better-errors": "^2.3.1",
|
||||
"loader-runner": "^4.2.0",
|
||||
"mime-types": "^2.1.27",
|
||||
"neo-async": "^2.6.2",
|
||||
"schema-utils": "^3.2.0",
|
||||
"tapable": "^2.1.1",
|
||||
"terser-webpack-plugin": "^5.3.7",
|
||||
"watchpack": "^2.4.0",
|
||||
"terser-webpack-plugin": "^5.3.10",
|
||||
"watchpack": "^2.4.1",
|
||||
"webpack-sources": "^3.2.3"
|
||||
},
|
||||
"bin": {
|
||||
@@ -24666,9 +24655,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/webpack/node_modules/acorn": {
|
||||
"version": "8.11.3",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
|
||||
"integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==",
|
||||
"version": "8.12.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz",
|
||||
"integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
@@ -24677,10 +24666,10 @@
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/webpack/node_modules/acorn-import-assertions": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz",
|
||||
"integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==",
|
||||
"node_modules/webpack/node_modules/acorn-import-attributes": {
|
||||
"version": "1.9.5",
|
||||
"resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz",
|
||||
"integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==",
|
||||
"dev": true,
|
||||
"peerDependencies": {
|
||||
"acorn": "^8"
|
||||
|
@@ -92,6 +92,7 @@
|
||||
"react-grid-layout": "^1.3.4",
|
||||
"react-hook-form": "^7.43.0",
|
||||
"react-i18next": "^12.2.2",
|
||||
"react-icons": "^5.3.0",
|
||||
"react-mailchimp-subscribe": "^2.1.3",
|
||||
"react-markdown": "^8.0.3",
|
||||
"react-redux": "^8.0.2",
|
||||
|
@@ -9,36 +9,42 @@ import { Tooltip } from "../Tooltip";
|
||||
export type FormLabelProps = {
|
||||
id?: string;
|
||||
isRequired?: boolean;
|
||||
isOptional?:boolean;
|
||||
isOptional?: boolean;
|
||||
label?: ReactNode;
|
||||
icon?: ReactNode;
|
||||
className?: string;
|
||||
tooltipText?: string;
|
||||
tooltipClassName?: string;
|
||||
tooltipText?: ReactNode;
|
||||
};
|
||||
|
||||
export const FormLabel = ({ id, label, isRequired, icon, className,isOptional, tooltipText }: FormLabelProps) => (
|
||||
export const FormLabel = ({
|
||||
id,
|
||||
label,
|
||||
isRequired,
|
||||
icon,
|
||||
className,
|
||||
isOptional,
|
||||
tooltipClassName,
|
||||
tooltipText
|
||||
}: FormLabelProps) => (
|
||||
<Label.Root
|
||||
className={twMerge(
|
||||
"mb-0.5 ml-1 flex items-center text-sm font-normal text-mineshaft-400",
|
||||
"mb-0.5 flex items-center text-sm font-normal text-mineshaft-400",
|
||||
className
|
||||
)}
|
||||
htmlFor={id}
|
||||
>
|
||||
{label}
|
||||
{isRequired && <span className="ml-1 text-red">*</span>}
|
||||
{isOptional && <span className="ml-1 text-gray-500 italic text-xs">- Optional</span>}
|
||||
{isOptional && <span className="ml-1 text-xs italic text-gray-500">- Optional</span>}
|
||||
{icon && !tooltipText && (
|
||||
<span className="ml-2 cursor-default text-mineshaft-300 hover:text-mineshaft-200">
|
||||
{icon}
|
||||
</span>
|
||||
)}
|
||||
{tooltipText && (
|
||||
<Tooltip content={tooltipText}>
|
||||
<FontAwesomeIcon
|
||||
icon={faQuestionCircle}
|
||||
size="1x"
|
||||
className="ml-2"
|
||||
/>
|
||||
<Tooltip content={tooltipText} className={tooltipClassName}>
|
||||
<FontAwesomeIcon icon={faQuestionCircle} size="1x" className="ml-2" />
|
||||
</Tooltip>
|
||||
)}
|
||||
</Label.Root>
|
||||
|
@@ -20,7 +20,7 @@ export const useCreateAccessApprovalPolicy = () => {
|
||||
environment,
|
||||
projectSlug,
|
||||
approvals,
|
||||
approverUserIds,
|
||||
approvers,
|
||||
name,
|
||||
secretPath,
|
||||
enforcementLevel
|
||||
@@ -29,7 +29,7 @@ export const useCreateAccessApprovalPolicy = () => {
|
||||
environment,
|
||||
projectSlug,
|
||||
approvals,
|
||||
approverUserIds,
|
||||
approvers,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
|
@@ -130,7 +130,7 @@ export type TCreateAccessPolicyDTO = {
|
||||
projectSlug: string;
|
||||
name?: string;
|
||||
environment: string;
|
||||
approverUserIds?: string[];
|
||||
approvers?: string[];
|
||||
approvals?: number;
|
||||
secretPath?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
|
@@ -20,7 +20,9 @@ export enum DynamicSecretProviders {
|
||||
Cassandra = "cassandra",
|
||||
AwsIam = "aws-iam",
|
||||
Redis = "redis",
|
||||
AwsElastiCache = "aws-elasticache"
|
||||
AwsElastiCache = "aws-elasticache",
|
||||
MongoAtlas = "mongo-db-atlas",
|
||||
ElasticSearch = "elastic-search"
|
||||
}
|
||||
|
||||
export enum SqlProviders {
|
||||
@@ -97,6 +99,44 @@ export type TDynamicSecretProvider =
|
||||
revocationStatement: string;
|
||||
ca?: string | undefined;
|
||||
};
|
||||
}
|
||||
| {
|
||||
type: DynamicSecretProviders.MongoAtlas;
|
||||
inputs: {
|
||||
adminPublicKey: string;
|
||||
adminPrivateKey: string;
|
||||
groupId: string;
|
||||
roles: {
|
||||
databaseName: string;
|
||||
roleName: string;
|
||||
collectionName?: string;
|
||||
}[];
|
||||
scopes?: {
|
||||
name: string;
|
||||
type: string;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
| {
|
||||
type: DynamicSecretProviders.ElasticSearch;
|
||||
inputs: {
|
||||
host: string;
|
||||
port: number;
|
||||
ca?: string | undefined;
|
||||
roles: string[];
|
||||
|
||||
auth:
|
||||
| {
|
||||
type: "user";
|
||||
username: string;
|
||||
password: string;
|
||||
}
|
||||
| {
|
||||
type: "api-key";
|
||||
apiKey: string;
|
||||
apiKeyId: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
export type TCreateDynamicSecretDTO = {
|
||||
|
@@ -71,6 +71,8 @@ export const useCreateIntegration = () => {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
githubVisibility?: string;
|
||||
githubVisibilityRepoIds?: string[];
|
||||
kmsKeyId?: string;
|
||||
shouldDisableDelete?: boolean;
|
||||
shouldMaskSecrets?: boolean;
|
||||
|
@@ -34,6 +34,9 @@ export type TIntegration = {
|
||||
syncMessage?: string;
|
||||
__v: number;
|
||||
metadata?: {
|
||||
githubVisibility?: string;
|
||||
githubVisibilityRepoIds?: string[];
|
||||
|
||||
secretSuffix?: string;
|
||||
syncBehavior?: IntegrationSyncBehavior;
|
||||
mappingBehavior?: IntegrationMappingBehavior;
|
||||
|
@@ -7,7 +7,11 @@ import { TSharedSecret, TViewSharedSecretResponse } from "./types";
|
||||
export const secretSharingKeys = {
|
||||
allSharedSecrets: () => ["sharedSecrets"] as const,
|
||||
specificSharedSecrets: ({ offset, limit }: { offset: number; limit: number }) =>
|
||||
[...secretSharingKeys.allSharedSecrets(), { offset, limit }] as const
|
||||
[...secretSharingKeys.allSharedSecrets(), { offset, limit }] as const,
|
||||
getSecretById: (arg: { id: string; hashedHex: string; password?: string }) => [
|
||||
"shared-secret",
|
||||
arg
|
||||
]
|
||||
};
|
||||
|
||||
export const useGetSharedSecrets = ({
|
||||
@@ -38,31 +42,28 @@ export const useGetSharedSecrets = ({
|
||||
|
||||
export const useGetActiveSharedSecretById = ({
|
||||
sharedSecretId,
|
||||
hashedHex
|
||||
hashedHex,
|
||||
password
|
||||
}: {
|
||||
sharedSecretId: string;
|
||||
hashedHex: string;
|
||||
password?: string;
|
||||
}) => {
|
||||
return useQuery<TViewSharedSecretResponse, [string]>({
|
||||
enabled: Boolean(sharedSecretId) && Boolean(hashedHex),
|
||||
queryFn: async () => {
|
||||
const params = new URLSearchParams({
|
||||
hashedHex
|
||||
});
|
||||
|
||||
const { data } = await apiRequest.get<TViewSharedSecretResponse>(
|
||||
return useQuery<TViewSharedSecretResponse>(
|
||||
secretSharingKeys.getSecretById({ id: sharedSecretId, hashedHex, password }),
|
||||
async () => {
|
||||
const { data } = await apiRequest.post<TViewSharedSecretResponse>(
|
||||
`/api/v1/secret-sharing/public/${sharedSecretId}`,
|
||||
{
|
||||
params
|
||||
hashedHex,
|
||||
password
|
||||
}
|
||||
);
|
||||
return {
|
||||
encryptedValue: data.encryptedValue,
|
||||
iv: data.iv,
|
||||
tag: data.tag,
|
||||
accessType: data.accessType,
|
||||
orgName: data.orgName
|
||||
};
|
||||
|
||||
return data;
|
||||
},
|
||||
{
|
||||
enabled: Boolean(sharedSecretId) && Boolean(hashedHex)
|
||||
}
|
||||
});
|
||||
);
|
||||
};
|
||||
|
@@ -15,6 +15,7 @@ export type TSharedSecret = {
|
||||
|
||||
export type TCreateSharedSecretRequest = {
|
||||
name?: string;
|
||||
password?: string;
|
||||
encryptedValue: string;
|
||||
hashedHex: string;
|
||||
iv: string;
|
||||
@@ -25,11 +26,14 @@ export type TCreateSharedSecretRequest = {
|
||||
};
|
||||
|
||||
export type TViewSharedSecretResponse = {
|
||||
encryptedValue: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
accessType: SecretSharingAccessType;
|
||||
orgName?: string;
|
||||
isPasswordProtected: boolean;
|
||||
secret: {
|
||||
encryptedValue: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
accessType: SecretSharingAccessType;
|
||||
orgName?: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type TDeleteSharedSecretRequest = {
|
||||
@@ -40,3 +44,4 @@ export enum SecretSharingAccessType {
|
||||
Anyone = "anyone",
|
||||
Organization = "organization"
|
||||
}
|
||||
|
||||
|
@@ -41,4 +41,5 @@ export type SubscriptionPlan = {
|
||||
caCrl: boolean;
|
||||
instanceUserManagement: boolean;
|
||||
externalKms: boolean;
|
||||
pkiEst: boolean;
|
||||
};
|
||||
|
@@ -69,7 +69,7 @@ export default function AWSParameterStoreAuthorizeIntegrationPage() {
|
||||
subTitle="After adding the details below, you will be prompted to set up an integration for a particular Infisical project and environment."
|
||||
>
|
||||
<div className="flex flex-row items-center">
|
||||
<div className="inline flex items-center">
|
||||
<div className="flex items-center">
|
||||
<Image
|
||||
src="/images/integrations/Amazon Web Services.png"
|
||||
height={35}
|
||||
|