mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-22 13:29:55 +00:00
Compare commits
161 Commits
daniel/add
...
fix-azure-
Author | SHA1 | Date | |
---|---|---|---|
dcd0234fb5 | |||
c1cb85b49f | |||
ed71e651f6 | |||
1a11dd954b | |||
5d3574d3f6 | |||
9ce6fd3f8e | |||
a549c8b9e3 | |||
1bc1feb843 | |||
80ca115ccd | |||
5a6bb90870 | |||
de7a693a6a | |||
096417281e | |||
763a96faf8 | |||
870eaf9301 | |||
10abf192a1 | |||
508f697bdd | |||
8ea8a6f72e | |||
54e6f4b607 | |||
ea3b3c5cec | |||
a8fd83652d | |||
45f3675337 | |||
87a9a87dcd | |||
0b882ece8c | |||
e005e94165 | |||
0e07eaaa01 | |||
e10e313af3 | |||
e6c0bbb25b | |||
2b39d9e6c4 | |||
cf42279e5b | |||
fbc4b47198 | |||
4baa6b1d3d | |||
74ee77f41e | |||
ee1b12173a | |||
1bfbc7047c | |||
a410d560a7 | |||
99e150cc1d | |||
f6deb0969a | |||
1163e41e64 | |||
a0f93f995e | |||
50fcf97a36 | |||
8e68d21115 | |||
364302a691 | |||
c8dc29d59b | |||
3707b75349 | |||
6112bc9356 | |||
6c3156273c | |||
f09e18a706 | |||
5d9a43a3fd | |||
12154c869f | |||
8d66272ab2 | |||
0e44e630cb | |||
49c4929c9c | |||
da561e37c5 | |||
ebc584d36f | |||
656d979d7d | |||
a29fb613b9 | |||
5382f3de2d | |||
b2b858f7e8 | |||
8f3d328b9a | |||
b7d683ee1b | |||
9bd6ec19c4 | |||
03fd0a1eb9 | |||
97023e7714 | |||
1d23ed0680 | |||
302e068c74 | |||
95b92caff3 | |||
5d894b6d43 | |||
dab3e2efad | |||
5e0b78b104 | |||
04cbbccd25 | |||
7f48e9d62e | |||
8a0018eff2 | |||
e6a920caa3 | |||
11411ca4eb | |||
b7c79fa45b | |||
18951b99de | |||
bd05c440c3 | |||
9ca5013a59 | |||
b65b8bc362 | |||
f494c182ff | |||
2fae822e1f | |||
5df140cbd5 | |||
d93cbb023d | |||
9056d1be0c | |||
5f503949eb | |||
91ebcca0fd | |||
9cf917de07 | |||
0826b40e2a | |||
911b62c63a | |||
ce7bb82f02 | |||
7cd092c0cf | |||
cbfb9af0b9 | |||
ef236106b4 | |||
773a338397 | |||
afb5820113 | |||
5acc0fc243 | |||
c56469ecdb | |||
c59a53180c | |||
f56d265e62 | |||
cc0ff98d4f | |||
4a14c3efd2 | |||
b2d2297914 | |||
836bb6d835 | |||
177eb2afee | |||
594df18611 | |||
3bcb8bf6fc | |||
23c362f9cd | |||
a74c37c18b | |||
3ece81d663 | |||
f6d87ebf32 | |||
23483ab7e1 | |||
fe31d44d22 | |||
58bab4d163 | |||
8f48a64fd6 | |||
929dc059c3 | |||
45e471b16a | |||
7c540b6be8 | |||
7dbe8dd3c9 | |||
0dec602729 | |||
66ded779fc | |||
01d24291f2 | |||
55b36b033e | |||
8f461bf50c | |||
1847491cb3 | |||
541c7b63cd | |||
7e5e177680 | |||
40f552e4f1 | |||
ecb54ee3b3 | |||
35a63b8cc6 | |||
2a4596d415 | |||
35e476d916 | |||
ad49e9eaf1 | |||
fed60f7c03 | |||
1bc0e3087a | |||
80a4f838a1 | |||
3ddb4cd27a | |||
a5555c3816 | |||
8479c406a5 | |||
8e0b4254b1 | |||
069651bdb4 | |||
9061ec2dff | |||
b0a5023723 | |||
69fe5bf71d | |||
f12d4d80c6 | |||
56f2a3afa4 | |||
406da1b5f0 | |||
da45e132a3 | |||
fb719a9383 | |||
3c64359597 | |||
e420973dd2 | |||
15cc157c5f | |||
ad89ffe94d | |||
4de1713a18 | |||
1917e0fdb7 | |||
4b07234997 | |||
6a402950c3 | |||
63333159ca | |||
ce4ba24ef2 | |||
f606e31b98 | |||
ecdbb3eb53 | |||
0321ec32fb |
@ -6,9 +6,15 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
|
35
backend/e2e-test/routes/v1/secret-approval-policy.spec.ts
Normal file
35
backend/e2e-test/routes/v1/secret-approval-policy.spec.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: string[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [seedData1.id],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@ -1,73 +1,61 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: importEnv,
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
await deleteSecretImport(payload.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
@ -89,25 +77,60 @@ describe("Secret Import Router", async () => {
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
@ -161,22 +184,55 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@ -201,6 +257,552 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
406
backend/e2e-test/routes/v1/secret-replication.spec.ts
Normal file
406
backend/e2e-test/routes/v1/secret-replication.spec.ts
Normal file
@ -0,0 +1,406 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
330
backend/e2e-test/routes/v3/secret-reference.spec.ts
Normal file
330
backend/e2e-test/routes/v3/secret-reference.spec.ts
Normal file
@ -0,0 +1,330 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@ -8,6 +8,7 @@ type TRawSecret = {
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
|
73
backend/e2e-test/testUtils/folders.ts
Normal file
73
backend/e2e-test/testUtils/folders.ts
Normal file
@ -0,0 +1,73 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
93
backend/e2e-test/testUtils/secret-imports.ts
Normal file
93
backend/e2e-test/testUtils/secret-imports.ts
Normal file
@ -0,0 +1,93 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
128
backend/e2e-test/testUtils/secrets.ts
Normal file
128
backend/e2e-test/testUtils/secrets.ts
Normal file
@ -0,0 +1,128 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@ -11,10 +11,11 @@ import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -28,19 +29,31 @@ export default {
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -58,10 +71,12 @@ export default {
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
@ -80,6 +95,9 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
617
backend/package-lock.json
generated
617
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -50,6 +50,7 @@
|
||||
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@ -102,7 +103,6 @@
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
@ -112,6 +112,7 @@
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
@ -157,6 +158,7 @@
|
||||
"ldapjs": "^3.0.7",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
@ -176,6 +178,8 @@
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
|
@ -115,7 +115,14 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
@ -147,13 +154,27 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
committerId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
statusChangeBy: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
@ -177,8 +198,20 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalRequest,
|
||||
`${TableName.SecretApprovalRequest}.id`,
|
||||
`${TableName.SecretApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member").notNullable().alter();
|
||||
|
@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (!doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("password").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("password");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
|
||||
TableName.CertificateAuthority,
|
||||
"requireTemplateForIssuance"
|
||||
);
|
||||
if (!hasRequireTemplateForIssuanceColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.dropColumn("requireTemplateForIssuance");
|
||||
});
|
||||
}
|
||||
}
|
@ -28,7 +28,8 @@ export const CertificateAuthoritiesSchema = z.object({
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional(),
|
||||
activeCaCertId: z.string().uuid().nullable().optional()
|
||||
activeCaCertId: z.string().uuid().nullable().optional(),
|
||||
requireTemplateForIssuance: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
|
||||
|
@ -21,7 +21,8 @@ export const SecretSharingSchema = z.object({
|
||||
expiresAfterViews: z.number().nullable().optional(),
|
||||
accessType: z.string().default("anyone"),
|
||||
name: z.string().nullable().optional(),
|
||||
lastViewedAt: z.date().nullable().optional()
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@ -17,11 +17,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().default("/"),
|
||||
environment: z.string(),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
@ -127,11 +127,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.trim()
|
||||
.optional()
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
|
@ -1,6 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// TODO(akhilmhdh): Fix this when licence service gets it type
|
||||
// TODO(akhilmhdh): Fix this when license service gets it type
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
@ -118,7 +118,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
cb(null, {});
|
||||
cb(error as Error);
|
||||
}
|
||||
},
|
||||
() => {}
|
||||
|
@ -5,22 +5,47 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
if (!strBody) {
|
||||
done(null, undefined);
|
||||
return;
|
||||
}
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
const ScimUserSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
});
|
||||
|
||||
const ScimGroupSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
});
|
||||
|
||||
export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/scim-tokens",
|
||||
method: "POST",
|
||||
@ -127,25 +152,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
),
|
||||
Resources: z.array(ScimUserSchema),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -173,30 +180,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
201: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -216,10 +200,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
@ -229,28 +215,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
// displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
active: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -260,8 +228,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
externalId: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
@ -291,6 +259,116 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
active: req.body?.active,
|
||||
email: primaryEmail,
|
||||
externalId: req.body.userName
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PATCH",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any().optional()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.updateScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return user;
|
||||
}
|
||||
});
|
||||
server.route({
|
||||
url: "/Groups",
|
||||
method: "POST",
|
||||
@ -305,25 +383,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional() // okta-specific
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -344,26 +407,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
querystring: z.object({
|
||||
startIndex: z.coerce.number().default(1),
|
||||
count: z.coerce.number().default(20),
|
||||
filter: z.string().trim().optional()
|
||||
filter: z.string().trim().optional(),
|
||||
excludedAttributes: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
),
|
||||
Resources: z.array(ScimGroupSchema),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -377,7 +426,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgId: req.permission.orgId,
|
||||
startIndex: req.query.startIndex,
|
||||
filter: req.query.filter,
|
||||
limit: req.query.count
|
||||
limit: req.query.count,
|
||||
isMembersExcluded: req.query.excludedAttributes === "members"
|
||||
});
|
||||
|
||||
return groups;
|
||||
@ -392,20 +442,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -414,6 +451,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -437,25 +475,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePut({
|
||||
const group = await req.server.services.scim.replaceScimGroup({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
...req.body
|
||||
@ -476,55 +501,35 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
schemas: z.array(z.string()),
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("replace"), z.literal("Replace")]),
|
||||
value: z.object({
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim()
|
||||
})
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim()
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add")]),
|
||||
path: z.string().trim(),
|
||||
value: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim().optional()
|
||||
})
|
||||
)
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePatch({
|
||||
const group = await req.server.services.scim.updateScimGroup({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -550,60 +555,4 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
return group;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
active: req.body.active
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -44,7 +44,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
approverUserIds,
|
||||
approvers,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
@ -52,7 +52,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
if (approvals > approverUserIds.length)
|
||||
if (approvals > approvers.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -76,7 +76,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: approverUserIds
|
||||
userIds: approvers
|
||||
});
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
@ -91,7 +91,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approvers.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
@ -128,7 +128,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
const updateAccessApprovalPolicy = async ({
|
||||
policyId,
|
||||
approverUserIds,
|
||||
approvers,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@ -161,7 +161,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (approverUserIds) {
|
||||
if (approvers) {
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
@ -169,12 +169,12 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: approverUserIds
|
||||
userIds: approvers
|
||||
});
|
||||
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approvers.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
|
@ -17,7 +17,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
approverUserIds: string[];
|
||||
approvers: string[];
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
@ -26,7 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
policyId: string;
|
||||
approvals?: number;
|
||||
approverUserIds?: string[];
|
||||
approvers?: string[];
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
|
@ -2,10 +2,11 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateLocalIps } from "@app/lib/validator";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
@ -44,6 +45,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
}: TCreateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const appCfg = getConfig();
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams)
|
||||
throw new BadRequestError({
|
||||
@ -59,7 +61,9 @@ export const auditLogStreamServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
validateLocalIps(url);
|
||||
if (appCfg.isCloud) {
|
||||
blockLocalAndPrivateIpAddresses(url);
|
||||
}
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
@ -131,7 +135,8 @@ export const auditLogStreamServiceFactory = ({
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
if (url) validateLocalIps(url);
|
||||
const appCfg = getConfig();
|
||||
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
@ -5,6 +5,7 @@ import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@ -62,7 +63,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
@ -84,7 +87,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@ -140,6 +140,7 @@ export enum EventType {
|
||||
GET_CA_CRLS = "get-certificate-authority-crls",
|
||||
ISSUE_CERT = "issue-cert",
|
||||
SIGN_CERT = "sign-cert",
|
||||
GET_CA_CERTIFICATE_TEMPLATES = "get-ca-certificate-templates",
|
||||
GET_CERT = "get-cert",
|
||||
DELETE_CERT = "delete-cert",
|
||||
REVOKE_CERT = "revoke-cert",
|
||||
@ -1192,6 +1193,14 @@ interface SignCert {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCertificateTemplates {
|
||||
type: EventType.GET_CA_CERTIFICATE_TEMPLATES;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCert {
|
||||
type: EventType.GET_CERT;
|
||||
metadata: {
|
||||
@ -1547,6 +1556,7 @@ export type Event =
|
||||
| GetCaCrls
|
||||
| IssueCert
|
||||
| SignCert
|
||||
| GetCaCertificateTemplates
|
||||
| GetCert
|
||||
| DeleteCert
|
||||
| RevokeCert
|
||||
|
@ -0,0 +1,126 @@
|
||||
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
})
|
||||
},
|
||||
auth: {
|
||||
...(providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey
|
||||
? {
|
||||
apiKey: {
|
||||
api_key: providerInputs.auth.apiKey,
|
||||
id: providerInputs.auth.apiKeyId
|
||||
}
|
||||
}
|
||||
: {
|
||||
username: providerInputs.auth.username,
|
||||
password: providerInputs.auth.password
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
return connection;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,7 +1,11 @@
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchProvider } from "./elastic-search";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
|
||||
@ -10,5 +14,9 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),
|
||||
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider()
|
||||
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider(),
|
||||
[DynamicSecretProviders.MongoAtlas]: MongoAtlasProvider(),
|
||||
[DynamicSecretProviders.MongoDB]: MongoDBProvider(),
|
||||
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
|
||||
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider()
|
||||
});
|
||||
|
@ -7,12 +7,16 @@ export enum SqlProviders {
|
||||
MsSQL = "mssql"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
User = "user",
|
||||
ApiKey = "api-key"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
username: z.string().trim(), // this is often "default".
|
||||
password: z.string().trim().optional(),
|
||||
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
@ -30,6 +34,48 @@ export const DynamicSecretAwsElastiCacheSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretElasticSearchSchema = z.object({
|
||||
host: z.string().trim().min(1),
|
||||
port: z.number(),
|
||||
roles: z.array(z.string().trim().min(1)).min(1),
|
||||
|
||||
// two auth types "user, apikey"
|
||||
auth: z.discriminatedUnion("type", [
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.User),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.ApiKey),
|
||||
apiKey: z.string().trim(),
|
||||
apiKeyId: z.string().trim()
|
||||
})
|
||||
]),
|
||||
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretRabbitMqSchema = z.object({
|
||||
host: z.string().trim().min(1),
|
||||
port: z.number(),
|
||||
tags: z.array(z.string().trim()).default([]),
|
||||
|
||||
username: z.string().trim().min(1),
|
||||
password: z.string().trim().min(1),
|
||||
|
||||
ca: z.string().optional(),
|
||||
|
||||
virtualHost: z.object({
|
||||
name: z.string().trim().min(1),
|
||||
permissions: z.object({
|
||||
read: z.string().trim().min(1),
|
||||
write: z.string().trim().min(1),
|
||||
configure: z.string().trim().min(1)
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
client: z.nativeEnum(SqlProviders),
|
||||
host: z.string().trim().toLowerCase(),
|
||||
@ -67,12 +113,69 @@ export const DynamicSecretAwsIamSchema = z.object({
|
||||
policyArns: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretMongoAtlasSchema = z.object({
|
||||
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
|
||||
adminPrivateKey: z.string().trim().min(1).describe("Admin user private api key"),
|
||||
groupId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.describe("Unique 24-hexadecimal digit string that identifies your project. This is same as project id"),
|
||||
roles: z
|
||||
.object({
|
||||
collectionName: z.string().optional().describe("Collection on which this role applies."),
|
||||
databaseName: z.string().min(1).describe("Database to which the user is granted access privileges."),
|
||||
roleName: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
' Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
|
||||
)
|
||||
})
|
||||
.array()
|
||||
.min(1),
|
||||
scopes: z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
"Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access."
|
||||
),
|
||||
type: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe("Category of resource that this database user can access. Enum: CLUSTER, DATA_LAKE, STREAM")
|
||||
})
|
||||
.array()
|
||||
});
|
||||
|
||||
export const DynamicSecretMongoDBSchema = z.object({
|
||||
host: z.string().min(1).trim().toLowerCase(),
|
||||
port: z.number().optional(),
|
||||
username: z.string().min(1).trim(),
|
||||
password: z.string().min(1).trim(),
|
||||
database: z.string().min(1).trim(),
|
||||
ca: z.string().min(1).optional(),
|
||||
roles: z
|
||||
.string()
|
||||
.array()
|
||||
.min(1)
|
||||
.describe(
|
||||
'Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
|
||||
)
|
||||
});
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
AwsIam = "aws-iam",
|
||||
Redis = "redis",
|
||||
AwsElastiCache = "aws-elasticache"
|
||||
AwsElastiCache = "aws-elasticache",
|
||||
MongoAtlas = "mongo-db-atlas",
|
||||
ElasticSearch = "elastic-search",
|
||||
MongoDB = "mongo-db",
|
||||
RabbitMq = "rabbit-mq"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
@ -80,7 +183,11 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
146
backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts
Normal file
146
backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts
Normal file
@ -0,0 +1,146 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoAtlasSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
Accept: "application/vnd.atlas.2023-02-01+json",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
const digestAuth = createDigestAuthRequestInterceptor(
|
||||
client,
|
||||
providerInputs.adminPublicKey,
|
||||
providerInputs.adminPrivateKey
|
||||
);
|
||||
return digestAuth;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
})
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
method: "GET",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((err) => {
|
||||
if ((err as AxiosError).response?.status === 404) return false;
|
||||
throw err;
|
||||
});
|
||||
if (isExisting) {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
116
backend/src/ee/services/dynamic-secret/providers/mongo-db.ts
Normal file
116
backend/src/ee/services/dynamic-secret/providers/mongo-db.ts
Normal file
@ -0,0 +1,116 @@
|
||||
import { MongoClient } from "mongodb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
|
||||
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
},
|
||||
directConnection: !isSrv,
|
||||
ca: providerInputs.ca
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
.command({ ping: 1 })
|
||||
.then(() => true);
|
||||
|
||||
await client.close();
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
|
||||
await db.command({
|
||||
createUser: username,
|
||||
pwd: password,
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
await db.command({
|
||||
dropUser: username
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
172
backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts
Normal file
172
backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts
Normal file
@ -0,0 +1,172 @@
|
||||
import axios, { Axios } from "axios";
|
||||
import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
type TCreateRabbitMQUser = {
|
||||
axiosInstance: Axios;
|
||||
createUser: {
|
||||
username: string;
|
||||
password: string;
|
||||
tags: string[];
|
||||
};
|
||||
virtualHost: {
|
||||
name: string;
|
||||
permissions: {
|
||||
read: string;
|
||||
write: string;
|
||||
configure: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
type TDeleteRabbitMqUser = {
|
||||
axiosInstance: Axios;
|
||||
usernameToDelete: string;
|
||||
};
|
||||
|
||||
async function createRabbitMqUser({ axiosInstance, createUser, virtualHost }: TCreateRabbitMQUser): Promise<void> {
|
||||
try {
|
||||
// Create user
|
||||
const userUrl = `/users/${createUser.username}`;
|
||||
const userData = {
|
||||
password: createUser.password,
|
||||
tags: createUser.tags.join(",")
|
||||
};
|
||||
|
||||
await axiosInstance.put(userUrl, userData);
|
||||
|
||||
// Set permissions for the virtual host
|
||||
if (virtualHost) {
|
||||
const permissionData = {
|
||||
configure: virtualHost.permissions.configure,
|
||||
write: virtualHost.permissions.write,
|
||||
read: virtualHost.permissions.read
|
||||
};
|
||||
|
||||
await axiosInstance.put(
|
||||
`/permissions/${encodeURIComponent(virtualHost.name)}/${createUser.username}`,
|
||||
permissionData
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(error, "Error creating RabbitMQ user");
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRabbitMqUser) {
|
||||
await axiosInstance.delete(`users/${usernameToDelete}`);
|
||||
return { username: usernameToDelete };
|
||||
}
|
||||
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
},
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
|
||||
...(providerInputs.ca && {
|
||||
httpsAgent: new https.Agent({ ca: providerInputs.ca, rejectUnauthorized: false })
|
||||
})
|
||||
});
|
||||
|
||||
return axiosInstance;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
await createRabbitMqUser({
|
||||
axiosInstance: connection,
|
||||
virtualHost: providerInputs.virtualHost,
|
||||
createUser: {
|
||||
password,
|
||||
username,
|
||||
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
||||
}
|
||||
});
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,4 +1,3 @@
|
||||
/* eslint-disable no-console */
|
||||
import handlebars from "handlebars";
|
||||
import { Redis } from "ioredis";
|
||||
import { customAlphabet } from "nanoid";
|
||||
|
@ -26,8 +26,10 @@ export const getDefaultOnPremFeatures = () => {
|
||||
status: null,
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
secretApproval: false,
|
||||
secretApproval: true,
|
||||
secretRotation: true,
|
||||
caCrl: false
|
||||
};
|
||||
};
|
||||
|
||||
export const setupLicenseRequestWithStore = () => {};
|
@ -49,15 +49,15 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
pkiEst: false
|
||||
});
|
||||
|
||||
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
let token: string;
|
||||
const licenceReq = axios.create({
|
||||
const licenseReq = axios.create({
|
||||
baseURL,
|
||||
timeout: 35 * 1000
|
||||
// signal: AbortSignal.timeout(60 * 1000)
|
||||
});
|
||||
|
||||
const refreshLicence = async () => {
|
||||
const refreshLicense = async () => {
|
||||
const appCfg = getConfig();
|
||||
const {
|
||||
data: { token: authToken }
|
||||
@ -75,7 +75,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
return token;
|
||||
};
|
||||
|
||||
licenceReq.interceptors.request.use(
|
||||
licenseReq.interceptors.request.use(
|
||||
(config) => {
|
||||
if (token && config.headers) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
@ -86,7 +86,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(err) => Promise.reject(err)
|
||||
);
|
||||
|
||||
licenceReq.interceptors.response.use(
|
||||
licenseReq.interceptors.response.use(
|
||||
(response) => response,
|
||||
async (err) => {
|
||||
const originalRequest = (err as AxiosError).config;
|
||||
@ -97,15 +97,15 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(originalRequest as any)._retry = true; // injected
|
||||
|
||||
// refresh
|
||||
await refreshLicence();
|
||||
await refreshLicense();
|
||||
|
||||
licenceReq.defaults.headers.common.Authorization = `Bearer ${token}`;
|
||||
return licenceReq(originalRequest!);
|
||||
licenseReq.defaults.headers.common.Authorization = `Bearer ${token}`;
|
||||
return licenseReq(originalRequest!);
|
||||
}
|
||||
|
||||
return Promise.reject(err);
|
||||
}
|
||||
);
|
||||
|
||||
return { request: licenceReq, refreshLicence };
|
||||
return { request: licenseReq, refreshLicense };
|
||||
};
|
@ -16,8 +16,8 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { getDefaultOnPremFeatures, setupLicenceRequestWithStore } from "./licence-fns";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
InstanceType,
|
||||
TAddOrgPmtMethodDTO,
|
||||
@ -64,13 +64,13 @@ export const licenseServiceFactory = ({
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
|
||||
const appCfg = getConfig();
|
||||
const licenseServerCloudApi = setupLicenceRequestWithStore(
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
appCfg.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_CLOUD_LOGIN,
|
||||
appCfg.LICENSE_SERVER_KEY || ""
|
||||
);
|
||||
|
||||
const licenseServerOnPremApi = setupLicenceRequestWithStore(
|
||||
const licenseServerOnPremApi = setupLicenseRequestWithStore(
|
||||
appCfg.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_ON_PREM_LOGIN,
|
||||
appCfg.LICENSE_KEY || ""
|
||||
@ -79,7 +79,7 @@ export const licenseServiceFactory = ({
|
||||
const init = async () => {
|
||||
try {
|
||||
if (appCfg.LICENSE_SERVER_KEY) {
|
||||
const token = await licenseServerCloudApi.refreshLicence();
|
||||
const token = await licenseServerCloudApi.refreshLicense();
|
||||
if (token) instanceType = InstanceType.Cloud;
|
||||
logger.info(`Instance type: ${InstanceType.Cloud}`);
|
||||
isValidLicense = true;
|
||||
@ -87,7 +87,7 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
|
||||
if (appCfg.LICENSE_KEY) {
|
||||
const token = await licenseServerOnPremApi.refreshLicence();
|
||||
const token = await licenseServerOnPremApi.refreshLicense();
|
||||
if (token) {
|
||||
const {
|
||||
data: { currentPlan }
|
||||
|
@ -44,19 +44,18 @@ export const buildScimUser = ({
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
groups = [],
|
||||
active
|
||||
active,
|
||||
createdAt,
|
||||
updatedAt
|
||||
}: {
|
||||
orgMembershipId: string;
|
||||
username: string;
|
||||
email?: string | null;
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
groups?: {
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
active: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}): TScimUser => {
|
||||
const scimUser = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
@ -78,10 +77,10 @@ export const buildScimUser = ({
|
||||
]
|
||||
: [],
|
||||
active,
|
||||
groups,
|
||||
meta: {
|
||||
resourceType: "User",
|
||||
location: null
|
||||
created: createdAt,
|
||||
lastModified: updatedAt
|
||||
}
|
||||
};
|
||||
|
||||
@ -109,14 +108,18 @@ export const buildScimGroupList = ({
|
||||
export const buildScimGroup = ({
|
||||
groupId,
|
||||
name,
|
||||
members
|
||||
members,
|
||||
updatedAt,
|
||||
createdAt
|
||||
}: {
|
||||
groupId: string;
|
||||
name: string;
|
||||
members: {
|
||||
value: string;
|
||||
display: string;
|
||||
display?: string;
|
||||
}[];
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}): TScimGroup => {
|
||||
const scimGroup = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
@ -125,7 +128,8 @@ export const buildScimGroup = ({
|
||||
members,
|
||||
meta: {
|
||||
resourceType: "Group",
|
||||
location: null
|
||||
created: createdAt,
|
||||
lastModified: updatedAt
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { scimPatch } from "scim-patch";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, TableName, TOrgMemberships, TUsers } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
@ -9,7 +10,6 @@ import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-grou
|
||||
import { TScimDALFactory } from "@app/ee/services/scim/scim-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
@ -32,14 +32,7 @@ import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import {
|
||||
buildScimGroup,
|
||||
buildScimGroupList,
|
||||
buildScimUser,
|
||||
buildScimUserList,
|
||||
extractScimValueFromPath,
|
||||
parseScimFilter
|
||||
} from "./scim-fns";
|
||||
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns";
|
||||
import {
|
||||
TCreateScimGroupDTO,
|
||||
TCreateScimTokenDTO,
|
||||
@ -64,12 +57,18 @@ type TScimServiceFactoryDep = {
|
||||
scimDAL: Pick<TScimDALFactory, "create" | "find" | "findById" | "deleteById">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById"
|
||||
"find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById" | "updateById"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "findOne" | "create" | "delete">;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "findOne" | "create" | "delete" | "update">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction" | "updateMembershipById"
|
||||
| "createMembership"
|
||||
| "findById"
|
||||
| "findMembership"
|
||||
| "findMembershipWithScimFilter"
|
||||
| "deleteMembershipById"
|
||||
| "transaction"
|
||||
| "updateMembershipById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "findOne" | "create" | "updateById" | "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser">;
|
||||
@ -193,7 +192,12 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
// SCIM server endpoints
|
||||
const listScimUsers = async ({ startIndex, limit, filter, orgId }: TListScimUsersDTO): Promise<TListScimUsers> => {
|
||||
const listScimUsers = async ({
|
||||
startIndex = 0,
|
||||
limit = 100,
|
||||
filter,
|
||||
orgId
|
||||
}: TListScimUsersDTO): Promise<TListScimUsers> => {
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org.scimEnabled)
|
||||
@ -207,23 +211,20 @@ export const scimServiceFactory = ({
|
||||
...(limit && { limit })
|
||||
};
|
||||
|
||||
const users = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId,
|
||||
...parseScimFilter(filter)
|
||||
},
|
||||
findOpts
|
||||
);
|
||||
const users = await orgDAL.findMembershipWithScimFilter(orgId, filter, findOpts);
|
||||
|
||||
const scimUsers = users.map(({ id, externalId, username, firstName, lastName, email, isActive }) =>
|
||||
buildScimUser({
|
||||
orgMembershipId: id ?? "",
|
||||
username: externalId ?? username,
|
||||
firstName: firstName ?? "",
|
||||
lastName: lastName ?? "",
|
||||
email,
|
||||
active: isActive
|
||||
})
|
||||
const scimUsers = users.map(
|
||||
({ id, externalId, username, firstName, lastName, email, isActive, createdAt, updatedAt }) =>
|
||||
buildScimUser({
|
||||
orgMembershipId: id ?? "",
|
||||
username: externalId ?? username,
|
||||
firstName: firstName ?? "",
|
||||
lastName: lastName ?? "",
|
||||
email,
|
||||
active: isActive,
|
||||
createdAt,
|
||||
updatedAt
|
||||
})
|
||||
);
|
||||
|
||||
return buildScimUserList({
|
||||
@ -258,11 +259,6 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
const groupMembershipsInOrg = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(
|
||||
membership.userId,
|
||||
orgId
|
||||
);
|
||||
|
||||
return buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
@ -270,10 +266,8 @@ export const scimServiceFactory = ({
|
||||
firstName: membership.firstName,
|
||||
lastName: membership.lastName,
|
||||
active: membership.isActive,
|
||||
groups: groupMembershipsInOrg.map((group) => ({
|
||||
value: group.groupId,
|
||||
display: group.groupName
|
||||
}))
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@ -322,7 +316,7 @@ export const scimServiceFactory = ({
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
role: OrgMembershipRole.NoAccess,
|
||||
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
isActive: true
|
||||
},
|
||||
@ -349,7 +343,11 @@ export const scimServiceFactory = ({
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
const uniqueUsername = await normalizeUsername(`${firstName}-${lastName}`, userDAL);
|
||||
const uniqueUsername = await normalizeUsername(
|
||||
// external id is username
|
||||
`${firstName}-${lastName}`,
|
||||
userDAL
|
||||
);
|
||||
user = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
|
||||
@ -430,10 +428,13 @@ export const scimServiceFactory = ({
|
||||
firstName: createdUser.firstName,
|
||||
lastName: createdUser.lastName,
|
||||
email: createdUser.email ?? "",
|
||||
active: createdOrgMembership.isActive
|
||||
active: createdOrgMembership.isActive,
|
||||
createdAt: createdOrgMembership.createdAt,
|
||||
updatedAt: createdOrgMembership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
// partial
|
||||
const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
@ -459,37 +460,52 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
let active = true;
|
||||
|
||||
operations.forEach((operation) => {
|
||||
if (operation.op.toLowerCase() === "replace") {
|
||||
if (operation.path === "active" && operation.value === "False") {
|
||||
// azure scim op format
|
||||
active = false;
|
||||
} else if (typeof operation.value === "object" && operation.value.active === false) {
|
||||
// okta scim op format
|
||||
active = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!active) {
|
||||
await orgMembershipDAL.updateById(membership.id, {
|
||||
isActive: false
|
||||
});
|
||||
}
|
||||
|
||||
return buildScimUser({
|
||||
const scimUser = buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName,
|
||||
lastName: membership.lastName,
|
||||
active
|
||||
firstName: membership.firstName,
|
||||
active: membership.isActive,
|
||||
username: membership.externalId ?? membership.username,
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
scimPatch(scimUser, operations);
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
{
|
||||
isActive: scimUser.active
|
||||
},
|
||||
tx
|
||||
);
|
||||
const hasEmailChanged = scimUser.emails[0].value !== membership.email;
|
||||
await userDAL.updateById(
|
||||
membership.userId,
|
||||
{
|
||||
firstName: scimUser.name.givenName,
|
||||
email: scimUser.emails[0].value,
|
||||
lastName: scimUser.name.familyName,
|
||||
isEmailVerified: hasEmailChanged ? serverCfg.trustSamlEmails : true
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
return scimUser;
|
||||
};
|
||||
|
||||
const replaceScimUser = async ({ orgMembershipId, active, orgId }: TReplaceScimUserDTO) => {
|
||||
const replaceScimUser = async ({
|
||||
orgMembershipId,
|
||||
active,
|
||||
orgId,
|
||||
lastName,
|
||||
firstName,
|
||||
email,
|
||||
externalId
|
||||
}: TReplaceScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@ -514,26 +530,47 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
await orgMembershipDAL.updateById(membership.id, {
|
||||
isActive: active
|
||||
const serverCfg = await getServerCfg();
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await userAliasDAL.update(
|
||||
{
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML,
|
||||
userId: membership.userId
|
||||
},
|
||||
{
|
||||
externalId
|
||||
},
|
||||
tx
|
||||
);
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
{
|
||||
isActive: active
|
||||
},
|
||||
tx
|
||||
);
|
||||
await userDAL.updateById(
|
||||
membership.userId,
|
||||
{
|
||||
firstName,
|
||||
email,
|
||||
lastName,
|
||||
isEmailVerified: serverCfg.trustSamlEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const groupMembershipsInOrg = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(
|
||||
membership.userId,
|
||||
orgId
|
||||
);
|
||||
|
||||
return buildScimUser({
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
username: externalId,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName,
|
||||
lastName: membership.lastName,
|
||||
active,
|
||||
groups: groupMembershipsInOrg.map((group) => ({
|
||||
value: group.groupId,
|
||||
display: group.groupName
|
||||
}))
|
||||
createdAt: membership.createdAt,
|
||||
updatedAt: membership.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@ -570,7 +607,7 @@ export const scimServiceFactory = ({
|
||||
return {}; // intentionally return empty object upon success
|
||||
};
|
||||
|
||||
const listScimGroups = async ({ orgId, startIndex, limit, filter }: TListScimGroupsDTO) => {
|
||||
const listScimGroups = async ({ orgId, startIndex, limit, filter, isMembersExcluded }: TListScimGroupsDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
@ -603,6 +640,21 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
|
||||
const scimGroups: TScimGroup[] = [];
|
||||
if (isMembersExcluded) {
|
||||
return buildScimGroupList({
|
||||
scimGroups: groups.map((group) =>
|
||||
buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: [],
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
})
|
||||
),
|
||||
startIndex,
|
||||
limit
|
||||
});
|
||||
}
|
||||
|
||||
for await (const group of groups) {
|
||||
const members = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
@ -612,7 +664,9 @@ export const scimServiceFactory = ({
|
||||
members: members.map((member) => ({
|
||||
value: member.orgMembershipId,
|
||||
display: `${member.firstName ?? ""} ${member.lastName ?? ""}`
|
||||
}))
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
scimGroups.push(scimGroup);
|
||||
}
|
||||
@ -696,7 +750,9 @@ export const scimServiceFactory = ({
|
||||
members: orgMemberships.map(({ id, firstName, lastName }) => ({
|
||||
value: id,
|
||||
display: `${firstName} ${lastName}`
|
||||
}))
|
||||
})),
|
||||
createdAt: newGroup.group.createdAt,
|
||||
updatedAt: newGroup.group.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
@ -739,31 +795,17 @@ export const scimServiceFactory = ({
|
||||
members: orgMemberships.map(({ id, firstName, lastName }) => ({
|
||||
value: id,
|
||||
display: `${firstName} ${lastName}`
|
||||
}))
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
};
|
||||
|
||||
const updateScimGroupNamePut = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const $replaceGroupDAL = async (
|
||||
groupId: string,
|
||||
orgId: string,
|
||||
{ displayName, members = [] }: { displayName: string; members: { value: string }[] }
|
||||
) => {
|
||||
const updatedGroup = await groupDAL.transaction(async (tx) => {
|
||||
const [group] = await groupDAL.update(
|
||||
{
|
||||
@ -782,74 +824,96 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (members) {
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: members.map((member) => member.value)
|
||||
}
|
||||
const orgMemberships = members.length
|
||||
? await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: members.map((member) => member.value)
|
||||
}
|
||||
})
|
||||
: [];
|
||||
|
||||
const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId));
|
||||
const userGroupMembers = await userGroupMembershipDAL.find({
|
||||
groupId: group.id
|
||||
});
|
||||
const directMemberUserIds = userGroupMembers.filter((el) => !el.isPending).map((membership) => membership.userId);
|
||||
|
||||
const pendingGroupAdditionsUserIds = userGroupMembers
|
||||
.filter((el) => el.isPending)
|
||||
.map((pendingGroupAddition) => pendingGroupAddition.userId);
|
||||
|
||||
const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds);
|
||||
const allMembersUserIdsSet = new Set(allMembersUserIds);
|
||||
|
||||
const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string));
|
||||
const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId));
|
||||
|
||||
if (toAddUserIds.length) {
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: toAddUserIds.map((member) => member.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId));
|
||||
|
||||
const directMemberUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
isPending: false
|
||||
})
|
||||
).map((membership) => membership.userId);
|
||||
|
||||
const pendingGroupAdditionsUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
isPending: true
|
||||
})
|
||||
).map((pendingGroupAddition) => pendingGroupAddition.userId);
|
||||
|
||||
const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds);
|
||||
const allMembersUserIdsSet = new Set(allMembersUserIds);
|
||||
|
||||
const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string));
|
||||
const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId));
|
||||
|
||||
if (toAddUserIds.length) {
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: toAddUserIds.map((member) => member.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
if (toRemoveUserIds.length) {
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: toRemoveUserIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
if (toRemoveUserIds.length) {
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: toRemoveUserIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
return group;
|
||||
});
|
||||
|
||||
return updatedGroup;
|
||||
};
|
||||
|
||||
const replaceScimGroup = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const updatedGroup = await $replaceGroupDAL(groupId, orgId, { displayName, members });
|
||||
|
||||
return buildScimGroup({
|
||||
groupId: updatedGroup.id,
|
||||
name: updatedGroup.name,
|
||||
members
|
||||
members,
|
||||
updatedAt: updatedGroup.updatedAt,
|
||||
createdAt: updatedGroup.createdAt
|
||||
});
|
||||
};
|
||||
|
||||
const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
|
||||
const updateScimGroup = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
@ -871,7 +935,7 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
let group = await groupDAL.findOne({
|
||||
const group = await groupDAL.findOne({
|
||||
id: groupId,
|
||||
orgId
|
||||
});
|
||||
@ -883,64 +947,28 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
for await (const operation of operations) {
|
||||
if (operation.op === "replace" || operation.op === "Replace") {
|
||||
group = await groupDAL.updateById(group.id, {
|
||||
name: operation.value.displayName
|
||||
});
|
||||
} else if (operation.op === "add" || operation.op === "Add") {
|
||||
try {
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: operation.value.map((member) => member.value)
|
||||
}
|
||||
});
|
||||
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: orgMemberships.map((membership) => membership.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL
|
||||
});
|
||||
} catch {
|
||||
logger.info("Repeat SCIM user-group add operation");
|
||||
}
|
||||
} else if (operation.op === "remove" || operation.op === "Remove") {
|
||||
const orgMembershipId = extractScimValueFromPath(operation.path);
|
||||
if (!orgMembershipId) throw new ScimRequestError({ detail: "Invalid path value", status: 400 });
|
||||
const orgMembership = await orgMembershipDAL.findById(orgMembershipId);
|
||||
if (!orgMembership) throw new ScimRequestError({ detail: "Org Membership Not Found", status: 400 });
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: [orgMembership.userId as string],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL
|
||||
});
|
||||
} else {
|
||||
throw new ScimRequestError({
|
||||
detail: "Invalid Operation",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const members = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
|
||||
return buildScimGroup({
|
||||
const scimGroup = buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: members.map((member) => ({
|
||||
value: member.orgMembershipId
|
||||
})),
|
||||
createdAt: group.createdAt,
|
||||
updatedAt: group.updatedAt
|
||||
});
|
||||
scimPatch(scimGroup, operations);
|
||||
// remove members is a weird case not following scim convention
|
||||
await $replaceGroupDAL(groupId, orgId, { displayName: scimGroup.displayName, members: scimGroup.members });
|
||||
|
||||
const updatedScimMembers = await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(group.id, orgId);
|
||||
return {
|
||||
...scimGroup,
|
||||
members: updatedScimMembers.map((member) => ({
|
||||
value: member.orgMembershipId,
|
||||
display: `${member.firstName ?? ""} ${member.lastName ?? ""}`
|
||||
}))
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const deleteScimGroup = async ({ groupId, orgId }: TDeleteScimGroupDTO) => {
|
||||
@ -1016,8 +1044,8 @@ export const scimServiceFactory = ({
|
||||
createScimGroup,
|
||||
getScimGroup,
|
||||
deleteScimGroup,
|
||||
updateScimGroupNamePut,
|
||||
updateScimGroupNamePatch,
|
||||
replaceScimGroup,
|
||||
updateScimGroup,
|
||||
fnValidateScimToken
|
||||
};
|
||||
};
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { ScimPatchOperation } from "scim-patch";
|
||||
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
export type TCreateScimTokenDTO = {
|
||||
@ -34,29 +36,25 @@ export type TGetScimUserDTO = {
|
||||
export type TCreateScimUserDTO = {
|
||||
externalId: string;
|
||||
email?: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TUpdateScimUserDTO = {
|
||||
orgMembershipId: string;
|
||||
orgId: string;
|
||||
operations: {
|
||||
op: string;
|
||||
path?: string;
|
||||
value?:
|
||||
| string
|
||||
| {
|
||||
active: boolean;
|
||||
};
|
||||
}[];
|
||||
operations: ScimPatchOperation[];
|
||||
};
|
||||
|
||||
export type TReplaceScimUserDTO = {
|
||||
orgMembershipId: string;
|
||||
active: boolean;
|
||||
orgId: string;
|
||||
email?: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
externalId: string;
|
||||
};
|
||||
|
||||
export type TDeleteScimUserDTO = {
|
||||
@ -69,6 +67,7 @@ export type TListScimGroupsDTO = {
|
||||
filter?: string;
|
||||
limit: number;
|
||||
orgId: string;
|
||||
isMembersExcluded?: boolean;
|
||||
};
|
||||
|
||||
export type TListScimGroups = {
|
||||
@ -107,31 +106,7 @@ export type TUpdateScimGroupNamePutDTO = {
|
||||
export type TUpdateScimGroupNamePatchDTO = {
|
||||
groupId: string;
|
||||
orgId: string;
|
||||
operations: (TRemoveOp | TReplaceOp | TAddOp)[];
|
||||
};
|
||||
|
||||
// akhilmhdh: I know, this is done due to lack of time. Need to change later to support as normalized rather than like this
|
||||
// Forgive akhil blame tony
|
||||
type TReplaceOp = {
|
||||
op: "replace" | "Replace";
|
||||
value: {
|
||||
id: string;
|
||||
displayName: string;
|
||||
};
|
||||
};
|
||||
|
||||
type TRemoveOp = {
|
||||
op: "remove" | "Remove";
|
||||
path: string;
|
||||
};
|
||||
|
||||
type TAddOp = {
|
||||
op: "add" | "Add";
|
||||
path: string;
|
||||
value: {
|
||||
value: string;
|
||||
display?: string;
|
||||
}[];
|
||||
operations: ScimPatchOperation[];
|
||||
};
|
||||
|
||||
export type TDeleteScimGroupDTO = {
|
||||
@ -160,13 +135,10 @@ export type TScimUser = {
|
||||
type: string;
|
||||
}[];
|
||||
active: boolean;
|
||||
groups: {
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
meta: {
|
||||
resourceType: string;
|
||||
location: null;
|
||||
created: Date;
|
||||
lastModified: Date;
|
||||
};
|
||||
};
|
||||
|
||||
@ -176,10 +148,11 @@ export type TScimGroup = {
|
||||
displayName: string;
|
||||
members: {
|
||||
value: string;
|
||||
display: string;
|
||||
display?: string;
|
||||
}[];
|
||||
meta: {
|
||||
resourceType: string;
|
||||
location: null;
|
||||
created: Date;
|
||||
lastModified: Date;
|
||||
};
|
||||
};
|
||||
|
@ -16,6 +16,7 @@ import {
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
|
||||
|
||||
@ -599,6 +600,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
const pruneExcessSnapshots = async () => {
|
||||
const PRUNE_FOLDER_BATCH_SIZE = 10000;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots started`);
|
||||
try {
|
||||
let uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// cleanup snapshots from current folders
|
||||
@ -714,6 +716,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "SnapshotPrune" });
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots completed`);
|
||||
};
|
||||
|
||||
// special query for migration for secret v2
|
||||
|
@ -964,6 +964,10 @@ export const INTEGRATION = {
|
||||
shouldAutoRedeploy: "Used by Render to trigger auto deploy.",
|
||||
secretGCPLabel: "The label for GCP secrets.",
|
||||
secretAWSTag: "The tags for AWS secrets.",
|
||||
githubVisibility:
|
||||
"Define where the secrets from the Github Integration should be visible. Option 'selected' lets you directly define which repositories to sync secrets to.",
|
||||
githubVisibilityRepoIds:
|
||||
"The repository IDs to sync secrets to when using the Github Integration. Only applicable when using Organization scope, and visibility is set to 'selected'",
|
||||
kmsKeyId: "The ID of the encryption key from AWS KMS.",
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
|
||||
@ -1033,14 +1037,18 @@ export const CERTIFICATE_AUTHORITIES = {
|
||||
maxPathLength:
|
||||
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
|
||||
keyAlgorithm:
|
||||
"The type of public key algorithm and size, in bits, of the key pair for the CA; when you create an intermediate CA, you must use a key algorithm supported by the parent CA."
|
||||
"The type of public key algorithm and size, in bits, of the key pair for the CA; when you create an intermediate CA, you must use a key algorithm supported by the parent CA.",
|
||||
requireTemplateForIssuance:
|
||||
"Whether or not certificates for this CA can only be issued through certificate templates."
|
||||
},
|
||||
GET: {
|
||||
caId: "The ID of the CA to get"
|
||||
},
|
||||
UPDATE: {
|
||||
caId: "The ID of the CA to update",
|
||||
status: "The status of the CA to update to. This can be one of active or disabled"
|
||||
status: "The status of the CA to update to. This can be one of active or disabled",
|
||||
requireTemplateForIssuance:
|
||||
"Whether or not certificates for this CA can only be issued through certificate templates."
|
||||
},
|
||||
DELETE: {
|
||||
caId: "The ID of the CA to delete"
|
||||
|
57
backend/src/lib/axios/digest-auth.ts
Normal file
57
backend/src/lib/axios/digest-auth.ts
Normal file
@ -0,0 +1,57 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { AxiosError, AxiosInstance, AxiosRequestConfig } from "axios";
|
||||
|
||||
export const createDigestAuthRequestInterceptor = (
|
||||
axiosInstance: AxiosInstance,
|
||||
username: string,
|
||||
password: string
|
||||
) => {
|
||||
let nc = 0;
|
||||
|
||||
return async (opts: AxiosRequestConfig) => {
|
||||
try {
|
||||
return await axiosInstance.request(opts);
|
||||
} catch (err) {
|
||||
const error = err as AxiosError;
|
||||
const authHeader = (error?.response?.headers?.["www-authenticate"] as string) || "";
|
||||
|
||||
if (error?.response?.status !== 401 || !authHeader?.includes("nonce")) {
|
||||
return Promise.reject(error.message);
|
||||
}
|
||||
|
||||
if (!error.config) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
|
||||
const authDetails = authHeader.split(",").map((el) => el.split("="));
|
||||
nc += 1;
|
||||
const nonceCount = nc.toString(16).padStart(8, "0");
|
||||
const cnonce = crypto.randomBytes(24).toString("hex");
|
||||
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1].replace(/"/g, "");
|
||||
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1].replace(/"/g, "");
|
||||
const ha1 = crypto.createHash("md5").update(`${username}:${realm}:${password}`).digest("hex");
|
||||
const path = opts.url;
|
||||
|
||||
const ha2 = crypto
|
||||
.createHash("md5")
|
||||
.update(`${opts.method ?? "GET"}:${path}`)
|
||||
.digest("hex");
|
||||
|
||||
const response = crypto
|
||||
.createHash("md5")
|
||||
.update(`${ha1}:${nonce}:${nonceCount}:${cnonce}:auth:${ha2}`)
|
||||
.digest("hex");
|
||||
const authorization = `Digest username="${username}",realm="${realm}",nonce="${nonce}",uri="${path}",qop="auth",algorithm="MD5",response="${response}",nc="${nonceCount}",cnonce="${cnonce}"`;
|
||||
|
||||
if (opts.headers) {
|
||||
// eslint-disable-next-line
|
||||
opts.headers.authorization = authorization;
|
||||
} else {
|
||||
// eslint-disable-next-line
|
||||
opts.headers = { authorization };
|
||||
}
|
||||
return axiosInstance.request(opts);
|
||||
}
|
||||
};
|
||||
};
|
121
backend/src/lib/knex/scim.ts
Normal file
121
backend/src/lib/knex/scim.ts
Normal file
@ -0,0 +1,121 @@
|
||||
import { Knex } from "knex";
|
||||
import { Compare, Filter, parse } from "scim2-parse-filter";
|
||||
|
||||
const appendParentToGroupingOperator = (parentPath: string, filter: Filter) => {
|
||||
if (filter.op !== "[]" && filter.op !== "and" && filter.op !== "or" && filter.op !== "not") {
|
||||
return { ...filter, attrPath: `${parentPath}.${(filter as Compare).attrPath}` };
|
||||
}
|
||||
return filter;
|
||||
};
|
||||
|
||||
export const generateKnexQueryFromScim = (
|
||||
rootQuery: Knex.QueryBuilder,
|
||||
rootScimFilter: string,
|
||||
getAttributeField: (attr: string) => string | null
|
||||
) => {
|
||||
const scimRootFilterAst = parse(rootScimFilter);
|
||||
const stack = [
|
||||
{
|
||||
scimFilterAst: scimRootFilterAst,
|
||||
query: rootQuery
|
||||
}
|
||||
];
|
||||
|
||||
while (stack.length) {
|
||||
const { scimFilterAst, query } = stack.pop()!;
|
||||
switch (scimFilterAst.op) {
|
||||
case "eq": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "pr": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereNotNull(attrPath);
|
||||
break;
|
||||
}
|
||||
case "gt": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, ">", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "ge": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, ">=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "lt": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, "<", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "le": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, "<=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "sw": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `${scimFilterAst.compValue}%`);
|
||||
break;
|
||||
}
|
||||
case "ew": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}`);
|
||||
break;
|
||||
}
|
||||
case "co": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}%`);
|
||||
break;
|
||||
}
|
||||
case "ne": {
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereNot(attrPath, "=", scimFilterAst.compValue);
|
||||
break;
|
||||
}
|
||||
case "and": {
|
||||
void query.andWhere((subQueryBuilder) => {
|
||||
scimFilterAst.filters.forEach((el) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: el
|
||||
});
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "or": {
|
||||
void query.orWhere((subQueryBuilder) => {
|
||||
scimFilterAst.filters.forEach((el) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: el
|
||||
});
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "not": {
|
||||
void query.whereNot((subQueryBuilder) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: scimFilterAst.filter
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "[]": {
|
||||
void query.whereNot((subQueryBuilder) => {
|
||||
stack.push({
|
||||
query: subQueryBuilder,
|
||||
scimFilterAst: appendParentToGroupingOperator(scimFilterAst.attrPath, scimFilterAst.valFilter)
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
@ -1,2 +1,2 @@
|
||||
export { isDisposableEmail } from "./validate-email";
|
||||
export { validateLocalIps } from "./validate-url";
|
||||
export { blockLocalAndPrivateIpAddresses } from "./validate-url";
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { getConfig } from "../config/env";
|
||||
import { BadRequestError } from "../errors";
|
||||
|
||||
export const validateLocalIps = (url: string) => {
|
||||
export const blockLocalAndPrivateIpAddresses = (url: string) => {
|
||||
const validUrl = new URL(url);
|
||||
const appCfg = getConfig();
|
||||
// on cloud local ips are not allowed
|
||||
|
@ -10,7 +10,7 @@ import fastifyFormBody from "@fastify/formbody";
|
||||
import helmet from "@fastify/helmet";
|
||||
import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
|
||||
import ratelimiter from "@fastify/rate-limit";
|
||||
import fasitfy from "fastify";
|
||||
import fastify from "fastify";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
@ -39,7 +39,7 @@ type TMain = {
|
||||
// Run the server!
|
||||
export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
|
||||
const appCfg = getConfig();
|
||||
const server = fasitfy({
|
||||
const server = fastify({
|
||||
logger: appCfg.NODE_ENV === "test" ? false : logger,
|
||||
trustProxy: true,
|
||||
connectionTimeout: 30 * 1000,
|
||||
@ -49,6 +49,21 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
|
||||
server.setValidatorCompiler(validatorCompiler);
|
||||
server.setSerializerCompiler(serializerCompiler);
|
||||
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
if (!strBody) {
|
||||
done(null, undefined);
|
||||
return;
|
||||
}
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await server.register<FastifyCookieOptions>(cookie, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY
|
||||
|
@ -468,7 +468,7 @@ export const registerRoutes = async (
|
||||
projectMembershipDAL
|
||||
});
|
||||
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, tokenDAL: authTokenDAL });
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL });
|
||||
const passwordService = authPaswordServiceFactory({
|
||||
tokenService,
|
||||
smtpService,
|
||||
|
@ -1,7 +1,7 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { CertificateAuthoritiesSchema } from "@app/db/schemas";
|
||||
import { CertificateAuthoritiesSchema, CertificateTemplatesSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -42,7 +42,11 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
keyAlgorithm: z
|
||||
.nativeEnum(CertKeyAlgorithm)
|
||||
.default(CertKeyAlgorithm.RSA_2048)
|
||||
.describe(CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
|
||||
.describe(CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm),
|
||||
requireTemplateForIssuance: z
|
||||
.boolean()
|
||||
.default(false)
|
||||
.describe(CERTIFICATE_AUTHORITIES.CREATE.requireTemplateForIssuance)
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
@ -148,7 +152,11 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.UPDATE.caId)
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.enum([CaStatus.ACTIVE, CaStatus.DISABLED]).optional().describe(CERTIFICATE_AUTHORITIES.UPDATE.status)
|
||||
status: z.enum([CaStatus.ACTIVE, CaStatus.DISABLED]).optional().describe(CERTIFICATE_AUTHORITIES.UPDATE.status),
|
||||
requireTemplateForIssuance: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe(CERTIFICATE_AUTHORITIES.CREATE.requireTemplateForIssuance)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -700,6 +708,51 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/certificate-templates",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get list of certificate templates for the CA",
|
||||
params: z.object({
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.caId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificateTemplates: CertificateTemplatesSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { certificateTemplates, ca } = await server.services.certificateAuthority.getCaCertificateTemplates({
|
||||
caId: req.params.caId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CA_CERTIFICATE_TEMPLATES,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificateTemplates
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/crls",
|
||||
|
@ -48,7 +48,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
method: "POST",
|
||||
url: "/public/:id",
|
||||
config: {
|
||||
rateLimit: publicEndpointLimit
|
||||
@ -57,38 +57,37 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
params: z.object({
|
||||
id: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
hashedHex: z.string().min(1)
|
||||
body: z.object({
|
||||
hashedHex: z.string().min(1),
|
||||
password: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: SecretSharingSchema.pick({
|
||||
encryptedValue: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
expiresAt: true,
|
||||
expiresAfterViews: true,
|
||||
accessType: true
|
||||
}).extend({
|
||||
orgName: z.string().optional()
|
||||
200: z.object({
|
||||
isPasswordProtected: z.boolean(),
|
||||
secret: SecretSharingSchema.pick({
|
||||
encryptedValue: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
expiresAt: true,
|
||||
expiresAfterViews: true,
|
||||
accessType: true
|
||||
})
|
||||
.extend({
|
||||
orgName: z.string().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const sharedSecret = await req.server.services.secretSharing.getActiveSharedSecretById({
|
||||
const sharedSecret = await req.server.services.secretSharing.getSharedSecretById({
|
||||
sharedSecretId: req.params.id,
|
||||
hashedHex: req.query.hashedHex,
|
||||
hashedHex: req.body.hashedHex,
|
||||
password: req.body.password,
|
||||
orgId: req.permission?.orgId
|
||||
});
|
||||
if (!sharedSecret) return undefined;
|
||||
return {
|
||||
encryptedValue: sharedSecret.encryptedValue,
|
||||
iv: sharedSecret.iv,
|
||||
tag: sharedSecret.tag,
|
||||
expiresAt: sharedSecret.expiresAt,
|
||||
expiresAfterViews: sharedSecret.expiresAfterViews,
|
||||
accessType: sharedSecret.accessType,
|
||||
orgName: sharedSecret.orgName
|
||||
};
|
||||
|
||||
return sharedSecret;
|
||||
}
|
||||
});
|
||||
|
||||
@ -101,6 +100,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
schema: {
|
||||
body: z.object({
|
||||
encryptedValue: z.string(),
|
||||
password: z.string().optional(),
|
||||
hashedHex: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
@ -131,6 +131,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().max(50).optional(),
|
||||
password: z.string().optional(),
|
||||
encryptedValue: z.string(),
|
||||
hashedHex: z.string(),
|
||||
iv: z.string(),
|
||||
|
@ -57,7 +57,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
cb(null, false);
|
||||
cb(error as Error, false);
|
||||
}
|
||||
}
|
||||
)
|
||||
@ -91,7 +91,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
return cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
cb(null, false);
|
||||
cb(error as Error, false);
|
||||
}
|
||||
}
|
||||
)
|
||||
@ -126,7 +126,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
return cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
cb(null, false);
|
||||
cb(error as Error, false);
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -42,7 +42,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
organizationId: z.string().trim()
|
||||
organizationId: z.string().trim(),
|
||||
userAgent: z.enum(["cli"]).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -53,7 +54,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
handler: async (req, res) => {
|
||||
const cfg = getConfig();
|
||||
const tokens = await server.services.login.selectOrganization({
|
||||
userAgent: req.headers["user-agent"],
|
||||
userAgent: req.body.userAgent ?? req.headers["user-agent"],
|
||||
authJwtToken: req.headers.authorization,
|
||||
organizationId: req.body.organizationId,
|
||||
ipAddress: req.realIp
|
||||
|
@ -12,7 +12,6 @@ import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/erro
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
|
||||
import { TTokenDALFactory } from "../auth-token/auth-token-dal";
|
||||
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||
import { TokenType } from "../auth-token/auth-token-types";
|
||||
import { TOrgDALFactory } from "../org/org-dal";
|
||||
@ -34,7 +33,6 @@ type TAuthLoginServiceFactoryDep = {
|
||||
orgDAL: TOrgDALFactory;
|
||||
tokenService: TAuthTokenServiceFactory;
|
||||
smtpService: TSmtpService;
|
||||
tokenDAL: TTokenDALFactory;
|
||||
};
|
||||
|
||||
export type TAuthLoginFactory = ReturnType<typeof authLoginServiceFactory>;
|
||||
@ -42,8 +40,7 @@ export const authLoginServiceFactory = ({
|
||||
userDAL,
|
||||
tokenService,
|
||||
smtpService,
|
||||
orgDAL,
|
||||
tokenDAL
|
||||
orgDAL
|
||||
}: TAuthLoginServiceFactoryDep) => {
|
||||
/*
|
||||
* Private
|
||||
@ -376,8 +373,6 @@ export const authLoginServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
await tokenDAL.incrementTokenSessionVersion(user.id, decodedToken.tokenVersionId);
|
||||
|
||||
const tokens = await generateUserTokens({
|
||||
authMethod: decodedToken.authMethod,
|
||||
user,
|
||||
|
@ -41,6 +41,7 @@ import {
|
||||
TCreateCaDTO,
|
||||
TDeleteCaDTO,
|
||||
TGetCaCertDTO,
|
||||
TGetCaCertificateTemplatesDTO,
|
||||
TGetCaCertsDTO,
|
||||
TGetCaCsrDTO,
|
||||
TGetCaDTO,
|
||||
@ -64,7 +65,7 @@ type TCertificateAuthorityServiceFactoryDep = {
|
||||
>;
|
||||
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "create" | "findOne">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "create" | "findOne" | "update">;
|
||||
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "getById">;
|
||||
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "getById" | "find">;
|
||||
certificateAuthorityQueue: TCertificateAuthorityQueueFactory; // TODO: Pick
|
||||
certificateDAL: Pick<TCertificateDALFactory, "transaction" | "create" | "find">;
|
||||
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
|
||||
@ -108,6 +109,7 @@ export const certificateAuthorityServiceFactory = ({
|
||||
notAfter,
|
||||
maxPathLength,
|
||||
keyAlgorithm,
|
||||
requireTemplateForIssuance,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
@ -170,7 +172,8 @@ export const certificateAuthorityServiceFactory = ({
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: notAfterDate,
|
||||
serialNumber
|
||||
})
|
||||
}),
|
||||
requireTemplateForIssuance
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -213,7 +216,6 @@ export const certificateAuthorityServiceFactory = ({
|
||||
keys,
|
||||
extensions: [
|
||||
new x509.BasicConstraintsExtension(true, maxPathLength === -1 ? undefined : maxPathLength, true),
|
||||
new x509.ExtendedKeyUsageExtension(["1.2.3.4.5.6.7", "2.3.4.5.6.7.8"], true),
|
||||
// eslint-disable-next-line no-bitwise
|
||||
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
|
||||
await x509.SubjectKeyIdentifierExtension.create(keys.publicKey)
|
||||
@ -303,7 +305,15 @@ export const certificateAuthorityServiceFactory = ({
|
||||
* Update CA with id [caId].
|
||||
* Note: Used to enable/disable CA
|
||||
*/
|
||||
const updateCaById = async ({ caId, status, actorId, actorAuthMethod, actor, actorOrgId }: TUpdateCaDTO) => {
|
||||
const updateCaById = async ({
|
||||
caId,
|
||||
status,
|
||||
requireTemplateForIssuance,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TUpdateCaDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
@ -320,7 +330,7 @@ export const certificateAuthorityServiceFactory = ({
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
const updatedCa = await certificateAuthorityDAL.updateById(caId, { status });
|
||||
const updatedCa = await certificateAuthorityDAL.updateById(caId, { status, requireTemplateForIssuance });
|
||||
|
||||
return updatedCa;
|
||||
};
|
||||
@ -496,7 +506,6 @@ export const certificateAuthorityServiceFactory = ({
|
||||
ca.maxPathLength === -1 || !ca.maxPathLength ? undefined : ca.maxPathLength,
|
||||
true
|
||||
),
|
||||
new x509.ExtendedKeyUsageExtension(["1.2.3.4.5.6.7", "2.3.4.5.6.7.8"], true),
|
||||
// eslint-disable-next-line no-bitwise
|
||||
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
|
||||
await x509.SubjectKeyIdentifierExtension.create(caPublicKey)
|
||||
@ -1079,6 +1088,9 @@ export const certificateAuthorityServiceFactory = ({
|
||||
|
||||
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
|
||||
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
|
||||
if (ca.requireTemplateForIssuance && !certificateTemplate) {
|
||||
throw new BadRequestError({ message: "Certificate template is required for issuance" });
|
||||
}
|
||||
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
|
||||
|
||||
if (ca.notAfter && new Date() > new Date(ca.notAfter)) {
|
||||
@ -1349,6 +1361,9 @@ export const certificateAuthorityServiceFactory = ({
|
||||
|
||||
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
|
||||
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
|
||||
if (ca.requireTemplateForIssuance && !certificateTemplate) {
|
||||
throw new BadRequestError({ message: "Certificate template is required for issuance" });
|
||||
}
|
||||
|
||||
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
|
||||
|
||||
@ -1570,6 +1585,40 @@ export const certificateAuthorityServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of certificate templates for CA with id [caId].
|
||||
*/
|
||||
const getCaCertificateTemplates = async ({
|
||||
caId,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TGetCaCertificateTemplatesDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
ca.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSub.CertificateTemplates
|
||||
);
|
||||
|
||||
const certificateTemplates = await certificateTemplateDAL.find({ caId });
|
||||
|
||||
return {
|
||||
certificateTemplates,
|
||||
ca
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
createCa,
|
||||
getCaById,
|
||||
@ -1582,6 +1631,7 @@ export const certificateAuthorityServiceFactory = ({
|
||||
signIntermediate,
|
||||
importCertToCa,
|
||||
issueCertFromCa,
|
||||
signCertFromCa
|
||||
signCertFromCa,
|
||||
getCaCertificateTemplates
|
||||
};
|
||||
};
|
||||
|
@ -38,6 +38,7 @@ export type TCreateCaDTO = {
|
||||
notAfter?: string;
|
||||
maxPathLength: number;
|
||||
keyAlgorithm: CertKeyAlgorithm;
|
||||
requireTemplateForIssuance: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetCaDTO = {
|
||||
@ -47,6 +48,7 @@ export type TGetCaDTO = {
|
||||
export type TUpdateCaDTO = {
|
||||
caId: string;
|
||||
status?: CaStatus;
|
||||
requireTemplateForIssuance?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteCaDTO = {
|
||||
@ -125,6 +127,10 @@ export type TSignCertFromCaDTO =
|
||||
notAfter?: string;
|
||||
} & Omit<TProjectPermission, "projectId">);
|
||||
|
||||
export type TGetCaCertificateTemplatesDTO = {
|
||||
caId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDNParts = {
|
||||
commonName?: string;
|
||||
organization?: string;
|
||||
|
@ -7,7 +7,7 @@ const isValidDate = (dateString: string) => {
|
||||
|
||||
export const validateCaDateField = z.string().trim().refine(isValidDate, { message: "Invalid date format" });
|
||||
|
||||
export const hostnameRegex = /^(?!:\/\/)([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
|
||||
export const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
|
||||
export const validateAltNamesField = z
|
||||
.string()
|
||||
.trim()
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TIdentityAccessTokenDALFactory = ReturnType<typeof identityAccessTokenDALFactory>;
|
||||
|
||||
@ -95,6 +97,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const removeExpiredTokens = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token started`);
|
||||
try {
|
||||
const docs = (tx || db)(TableName.IdentityAccessToken)
|
||||
.where({
|
||||
@ -131,7 +134,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
});
|
||||
})
|
||||
.delete();
|
||||
return await docs;
|
||||
await docs;
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token completed`);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "IdentityAccessTokenPrune" });
|
||||
}
|
||||
|
@ -0,0 +1,4 @@
|
||||
import picomatch from "picomatch";
|
||||
|
||||
export const doesFieldValueMatchOidcPolicy = (fieldValue: string, policyValue: string) =>
|
||||
policyValue === fieldValue || picomatch.isMatch(fieldValue, policyValue);
|
@ -28,6 +28,7 @@ import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identit
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
import { TOrgBotDALFactory } from "../org/org-bot-dal";
|
||||
import { TIdentityOidcAuthDALFactory } from "./identity-oidc-auth-dal";
|
||||
import { doesFieldValueMatchOidcPolicy } from "./identity-oidc-auth-fns";
|
||||
import {
|
||||
TAttachOidcAuthDTO,
|
||||
TGetOidcAuthDTO,
|
||||
@ -123,7 +124,7 @@ export const identityOidcAuthServiceFactory = ({
|
||||
}) as Record<string, string>;
|
||||
|
||||
if (identityOidcAuth.boundSubject) {
|
||||
if (tokenData.sub !== identityOidcAuth.boundSubject) {
|
||||
if (!doesFieldValueMatchOidcPolicy(tokenData.sub, identityOidcAuth.boundSubject)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Access denied: OIDC subject not allowed."
|
||||
});
|
||||
@ -131,7 +132,11 @@ export const identityOidcAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
if (identityOidcAuth.boundAudiences) {
|
||||
if (!identityOidcAuth.boundAudiences.split(", ").includes(tokenData.aud)) {
|
||||
if (
|
||||
!identityOidcAuth.boundAudiences
|
||||
.split(", ")
|
||||
.some((policyValue) => doesFieldValueMatchOidcPolicy(tokenData.aud, policyValue))
|
||||
) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Access denied: OIDC audience not allowed."
|
||||
});
|
||||
@ -142,7 +147,9 @@ export const identityOidcAuthServiceFactory = ({
|
||||
Object.keys(identityOidcAuth.boundClaims).forEach((claimKey) => {
|
||||
const claimValue = (identityOidcAuth.boundClaims as Record<string, string>)[claimKey];
|
||||
// handle both single and multi-valued claims
|
||||
if (!claimValue.split(", ").some((claimEntry) => tokenData[claimKey] === claimEntry)) {
|
||||
if (
|
||||
!claimValue.split(", ").some((claimEntry) => doesFieldValueMatchOidcPolicy(tokenData[claimKey], claimEntry))
|
||||
) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Access denied: OIDC claim not allowed."
|
||||
});
|
||||
|
@ -5,6 +5,7 @@ import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TIdentityUaClientSecretDALFactory = ReturnType<typeof identityUaClientSecretDALFactory>;
|
||||
|
||||
@ -30,7 +31,9 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
|
||||
|
||||
let deletedClientSecret: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredClientSecretQuery = (tx || db)(TableName.IdentityUaClientSecret)
|
||||
@ -39,7 +42,7 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
|
||||
})
|
||||
.orWhere((qb) => {
|
||||
void qb
|
||||
.where("clientSecretNumUses", ">", 0)
|
||||
.where("clientSecretNumUsesLimit", ">", 0)
|
||||
.andWhere(
|
||||
"clientSecretNumUses",
|
||||
">=",
|
||||
@ -71,7 +74,9 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
} while (deletedClientSecret.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedClientSecret.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret completed`);
|
||||
};
|
||||
|
||||
return { ...uaClientSecretOrm, incrementUsage, removeExpiredClientSecrets };
|
||||
|
@ -460,16 +460,21 @@ const getAppsFlyio = async ({ accessToken }: { accessToken: string }) => {
|
||||
*/
|
||||
const getAppsCircleCI = async ({ accessToken }: { accessToken: string }) => {
|
||||
const res = (
|
||||
await request.get<{ reponame: string }[]>(`${IntegrationUrls.CIRCLECI_API_URL}/v1.1/projects`, {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json"
|
||||
await request.get<{ reponame: string; username: string; vcs_url: string }[]>(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v1.1/projects`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
).data;
|
||||
|
||||
const apps = res?.map((a) => ({
|
||||
name: a?.reponame
|
||||
const apps = res.map((a) => ({
|
||||
owner: a.username, // username maps to unique organization name in CircleCI
|
||||
name: a.reponame, // reponame maps to project name within an organization in CircleCI
|
||||
appId: a.vcs_url.split("/").pop() // vcs_url maps to the project id in CircleCI
|
||||
}));
|
||||
|
||||
return apps;
|
||||
|
@ -30,6 +30,7 @@ const getIntegrationSecretsV2 = async (
|
||||
environment: string;
|
||||
folderId: string;
|
||||
depth: number;
|
||||
secretPath: string;
|
||||
decryptor: (value: Buffer | null | undefined) => string;
|
||||
},
|
||||
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">,
|
||||
@ -306,6 +307,7 @@ export const deleteIntegrationSecrets = async ({
|
||||
? await getIntegrationSecretsV2(
|
||||
{
|
||||
environment: integration.environment.id,
|
||||
secretPath: integration.secretPath,
|
||||
projectId: integration.projectId,
|
||||
folderId: folder.id,
|
||||
depth: 1,
|
||||
|
@ -567,8 +567,8 @@ const syncSecretsAWSParameterStore = async ({
|
||||
});
|
||||
ssm.config.update(config);
|
||||
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
const awsParameterStoreSecretsObj: Record<string, AWS.SSM.Parameter> = {};
|
||||
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
|
||||
const awsParameterStoreSecretsObj: Record<string, AWS.SSM.Parameter & { KeyId?: string }> = {};
|
||||
logger.info(
|
||||
`getIntegrationSecrets: integration sync triggered for ssm with [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [shouldDisableDelete=${metadata.shouldDisableDelete}]`
|
||||
);
|
||||
@ -598,18 +598,57 @@ const syncSecretsAWSParameterStore = async ({
|
||||
nextToken = parameters.NextToken;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`getIntegrationSecrets: all fetched keys from AWS SSM [projectId=${projectId}] [environment=${
|
||||
integration.environment.slug
|
||||
}] [secretPath=${integration.secretPath}] [awsParameterStoreSecretsObj=${Object.keys(
|
||||
awsParameterStoreSecretsObj
|
||||
).join(",")}]`
|
||||
);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: all secrets from Infisical to send to AWS SSM [projectId=${projectId}] [environment=${
|
||||
integration.environment.slug
|
||||
}] [secretPath=${integration.secretPath}] [secrets=${Object.keys(secrets).join(",")}]`
|
||||
);
|
||||
let areParametersKmsKeysFetched = false;
|
||||
|
||||
if (metadata.kmsKeyId) {
|
||||
// we put this inside a try catch so that existing integrations without the ssm:DescribeParameters
|
||||
// AWS permission will not break
|
||||
try {
|
||||
let hasNextDescribePage = true;
|
||||
let describeNextToken: string | undefined;
|
||||
|
||||
while (hasNextDescribePage) {
|
||||
const parameters = await ssm
|
||||
.describeParameters({
|
||||
MaxResults: 10,
|
||||
NextToken: describeNextToken,
|
||||
ParameterFilters: [
|
||||
{
|
||||
Key: "Path",
|
||||
Option: "OneLevel",
|
||||
Values: [integration.path as string]
|
||||
}
|
||||
]
|
||||
})
|
||||
.promise();
|
||||
|
||||
if (parameters.Parameters) {
|
||||
parameters.Parameters.forEach((parameter) => {
|
||||
if (parameter.Name) {
|
||||
const secKey = parameter.Name.substring((integration.path as string).length);
|
||||
awsParameterStoreSecretsObj[secKey].KeyId = parameter.KeyId;
|
||||
}
|
||||
});
|
||||
}
|
||||
areParametersKmsKeysFetched = true;
|
||||
hasNextDescribePage = Boolean(parameters.NextToken);
|
||||
describeNextToken = parameters.NextToken;
|
||||
}
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
if ((error as any).code === "AccessDeniedException") {
|
||||
logger.error(
|
||||
`AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)`
|
||||
);
|
||||
}
|
||||
|
||||
response = {
|
||||
isSynced: false,
|
||||
syncMessage: (error as AWSError)?.message || "Error syncing with AWS Parameter Store"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Identify secrets to create
|
||||
// don't use Promise.all() and promise map here
|
||||
// it will cause rate limit
|
||||
@ -620,7 +659,7 @@ const syncSecretsAWSParameterStore = async ({
|
||||
// -> create secret
|
||||
if (secrets[key].value) {
|
||||
logger.info(
|
||||
`getIntegrationSecrets: create secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
|
||||
`getIntegrationSecrets: create secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]`
|
||||
);
|
||||
await ssm
|
||||
.putParameter({
|
||||
@ -648,7 +687,7 @@ const syncSecretsAWSParameterStore = async ({
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
err,
|
||||
`getIntegrationSecrets: create secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
|
||||
`getIntegrationSecrets: create secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]`
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
if ((err as any).code === "AccessDeniedException") {
|
||||
@ -667,16 +706,23 @@ const syncSecretsAWSParameterStore = async ({
|
||||
// case: secret exists in AWS parameter store
|
||||
} else {
|
||||
logger.info(
|
||||
`getIntegrationSecrets: update secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
|
||||
`getIntegrationSecrets: update secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]`
|
||||
);
|
||||
// -> update secret
|
||||
if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
|
||||
|
||||
const shouldUpdateKms =
|
||||
areParametersKmsKeysFetched &&
|
||||
Boolean(metadata.kmsKeyId) &&
|
||||
awsParameterStoreSecretsObj[key].KeyId !== metadata.kmsKeyId;
|
||||
|
||||
// we ensure that the KMS key configured in the integration is applied for ALL parameters on AWS
|
||||
if (shouldUpdateKms || awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
|
||||
await ssm
|
||||
.putParameter({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key].value,
|
||||
Overwrite: true
|
||||
Overwrite: true,
|
||||
...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId })
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
@ -698,7 +744,7 @@ const syncSecretsAWSParameterStore = async ({
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
err,
|
||||
`getIntegrationSecrets: update secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
|
||||
`getIntegrationSecrets: update secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]`
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
if ((err as any).code === "AccessDeniedException") {
|
||||
@ -728,11 +774,11 @@ const syncSecretsAWSParameterStore = async ({
|
||||
for (const key in awsParameterStoreSecretsObj) {
|
||||
if (Object.hasOwn(awsParameterStoreSecretsObj, key)) {
|
||||
logger.info(
|
||||
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=2]`
|
||||
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=2]`
|
||||
);
|
||||
if (!(key in secrets)) {
|
||||
logger.info(
|
||||
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=3]`
|
||||
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=3]`
|
||||
);
|
||||
// case:
|
||||
// -> delete secret
|
||||
@ -742,7 +788,7 @@ const syncSecretsAWSParameterStore = async ({
|
||||
})
|
||||
.promise();
|
||||
logger.info(
|
||||
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=4]`
|
||||
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=4]`
|
||||
);
|
||||
}
|
||||
await new Promise((resolve) => {
|
||||
@ -1625,7 +1671,11 @@ const syncSecretsGitHub = async ({
|
||||
await octokit.request("PUT /orgs/{org}/actions/secrets/{secret_name}", {
|
||||
org: integration.owner as string,
|
||||
secret_name: key,
|
||||
visibility: "all",
|
||||
visibility: metadata.githubVisibility ?? "all",
|
||||
...(metadata.githubVisibility === "selected" && {
|
||||
// we need to map the githubVisibilityRepoIds to numbers
|
||||
selected_repository_ids: metadata.githubVisibilityRepoIds?.map(Number) ?? []
|
||||
}),
|
||||
encrypted_value: encryptedSecret,
|
||||
key_id: repoPublicKey.key_id
|
||||
});
|
||||
@ -1925,22 +1975,62 @@ const syncSecretsCircleCI = async ({
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
const circleciOrganizationDetail = (
|
||||
await request.get(`${IntegrationUrls.CIRCLECI_API_URL}/v2/me/collaborations`, {
|
||||
const getProjectSlug = async () => {
|
||||
const requestConfig = {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
})
|
||||
).data[0];
|
||||
};
|
||||
|
||||
const { slug } = circleciOrganizationDetail;
|
||||
try {
|
||||
const projectDetails = (
|
||||
await request.get<{ slug: string }>(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${integration.appId}`,
|
||||
requestConfig
|
||||
)
|
||||
).data;
|
||||
|
||||
return projectDetails.slug;
|
||||
} catch (err) {
|
||||
if (err instanceof AxiosError) {
|
||||
if (err.response?.data?.message !== "Not Found") {
|
||||
throw new Error("Failed to get project slug from CircleCI during first attempt.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For backwards compatibility with old CircleCI integrations where we don't keep track of the organization name, so we can't filter by organization
|
||||
try {
|
||||
const circleCiOrganization = (
|
||||
await request.get<{ slug: string; name: string }[]>(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/me/collaborations`,
|
||||
requestConfig
|
||||
)
|
||||
).data;
|
||||
|
||||
// Case 1: This is a new integration where the organization name is stored under `integration.owner`
|
||||
if (integration.owner) {
|
||||
const org = circleCiOrganization.find((o) => o.name === integration.owner);
|
||||
if (org) {
|
||||
return `${org.slug}/${integration.app}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Case 2: This is an old integration where the organization name is not stored, so we have to assume the first organization is the correct one
|
||||
return `${circleCiOrganization[0].slug}/${integration.app}`;
|
||||
} catch (err) {
|
||||
throw new Error("Failed to get project slug from CircleCI during second attempt.");
|
||||
}
|
||||
};
|
||||
|
||||
const projectSlug = await getProjectSlug();
|
||||
|
||||
// sync secrets to CircleCI
|
||||
await Promise.all(
|
||||
Object.keys(secrets).map(async (key) =>
|
||||
request.post(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`,
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar`,
|
||||
{
|
||||
name: key,
|
||||
value: secrets[key].value
|
||||
@ -1958,7 +2048,7 @@ const syncSecretsCircleCI = async ({
|
||||
// get secrets from CircleCI
|
||||
const getSecretsRes = (
|
||||
await request.get<{ items: { name: string }[] }>(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar`,
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
@ -1972,15 +2062,12 @@ const syncSecretsCircleCI = async ({
|
||||
await Promise.all(
|
||||
getSecretsRes.map(async (sec) => {
|
||||
if (!(sec.name in secrets)) {
|
||||
return request.delete(
|
||||
`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${slug}/${integration.app}/envvar/${sec.name}`,
|
||||
{
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
return request.delete(`${IntegrationUrls.CIRCLECI_API_URL}/v2/project/${projectSlug}/envvar/${sec.name}`, {
|
||||
headers: {
|
||||
"Circle-Token": accessToken,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
|
@ -5,14 +5,18 @@ import { INTEGRATION } from "@app/lib/api-docs";
|
||||
import { IntegrationMappingBehavior } from "../integration-auth/integration-list";
|
||||
|
||||
export const IntegrationMetadataSchema = z.object({
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
|
||||
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
|
||||
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
|
||||
mappingBehavior: z
|
||||
.nativeEnum(IntegrationMappingBehavior)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
|
||||
|
||||
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
|
||||
|
||||
secretGCPLabel: z
|
||||
.object({
|
||||
labelName: z.string(),
|
||||
@ -20,6 +24,7 @@ export const IntegrationMetadataSchema = z.object({
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
|
||||
|
||||
secretAWSTag: z
|
||||
.array(
|
||||
z.object({
|
||||
@ -29,7 +34,15 @@ export const IntegrationMetadataSchema = z.object({
|
||||
)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
|
||||
|
||||
githubVisibility: z
|
||||
.union([z.literal("selected"), z.literal("private"), z.literal("all")])
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.githubVisibility),
|
||||
githubVisibilityRepoIds: z.array(z.string()).optional().describe(INTEGRATION.CREATE.metadata.githubVisibilityRepoIds),
|
||||
|
||||
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
|
||||
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
|
||||
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete),
|
||||
shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets),
|
||||
|
@ -27,6 +27,10 @@ export type TCreateIntegrationDTO = {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
|
||||
githubVisibility?: string;
|
||||
githubVisibilityRepoIds?: string[];
|
||||
|
||||
kmsKeyId?: string;
|
||||
shouldDisableDelete?: boolean;
|
||||
shouldMaskSecrets?: boolean;
|
||||
|
@ -12,6 +12,7 @@ import {
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt, withTransaction } from "@app/lib/knex";
|
||||
import { generateKnexQueryFromScim } from "@app/lib/knex/scim";
|
||||
|
||||
export type TOrgDALFactory = ReturnType<typeof orgDALFactory>;
|
||||
|
||||
@ -280,6 +281,67 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.ref("scimEnabled").withSchema(TableName.Organization),
|
||||
db.ref("externalId").withSchema(TableName.UserAliases)
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
|
||||
}
|
||||
const res = await query;
|
||||
return res;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find one" });
|
||||
}
|
||||
};
|
||||
|
||||
const findMembershipWithScimFilter = async (
|
||||
orgId: string,
|
||||
scimFilter: string | undefined,
|
||||
{ offset, limit, sort, tx }: TFindOpt<TOrgMemberships> = {}
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.OrgMembership)
|
||||
// eslint-disable-next-line
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.where((qb) => {
|
||||
if (scimFilter) {
|
||||
void generateKnexQueryFromScim(qb, scimFilter, (attrPath) => {
|
||||
switch (attrPath) {
|
||||
case "active":
|
||||
return `${TableName.OrgMembership}.isActive`;
|
||||
case "userName":
|
||||
return `${TableName.UserAliases}.externalId`;
|
||||
case "name.givenName":
|
||||
return `${TableName.Users}.firstName`;
|
||||
case "name.familyName":
|
||||
return `${TableName.Users}.lastName`;
|
||||
case "email.value":
|
||||
return `${TableName.Users}.email`;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.join(TableName.Users, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
|
||||
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
|
||||
.leftJoin(TableName.UserAliases, function joinUserAlias() {
|
||||
this.on(`${TableName.UserAliases}.userId`, "=", `${TableName.OrgMembership}.userId`)
|
||||
.andOn(`${TableName.UserAliases}.orgId`, "=", `${TableName.OrgMembership}.orgId`)
|
||||
.andOn(`${TableName.UserAliases}.aliasType`, "=", (tx || db).raw("?", ["saml"]));
|
||||
})
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
@ -314,6 +376,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
updateById,
|
||||
deleteById,
|
||||
findMembership,
|
||||
findMembershipWithScimFilter,
|
||||
createMembership,
|
||||
updateMembershipById,
|
||||
deleteMembershipById,
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretFolderVersions } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretFolderVersionDALFactory = ReturnType<typeof secretFolderVersionDALFactory>;
|
||||
|
||||
@ -65,6 +67,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions started`);
|
||||
try {
|
||||
await db(TableName.SecretFolderVersion)
|
||||
.with("folder_cte", (qb) => {
|
||||
@ -89,6 +92,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Folder Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions completed`);
|
||||
};
|
||||
|
||||
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions };
|
||||
|
@ -158,9 +158,12 @@ export const fnSecretsV2FromImports = async ({
|
||||
depth?: number;
|
||||
cyclicDetector?: Set<string>;
|
||||
decryptor: (value?: Buffer | null) => string;
|
||||
expandSecretReferences?: (
|
||||
secrets: Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => Promise<Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>>;
|
||||
expandSecretReferences?: (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => Promise<string | undefined>;
|
||||
}) => {
|
||||
// avoid going more than a depth
|
||||
if (depth >= LEVEL_BREAK) return [];
|
||||
@ -244,26 +247,21 @@ export const fnSecretsV2FromImports = async ({
|
||||
});
|
||||
|
||||
if (expandSecretReferences) {
|
||||
await Promise.all(
|
||||
processedImports.map(async (processedImport) => {
|
||||
const secretsGroupByKey = processedImport.secrets.reduce(
|
||||
(acc, item) => {
|
||||
acc[item.secretKey] = {
|
||||
value: item.secretValue,
|
||||
comment: item.secretComment,
|
||||
skipMultilineEncoding: item.skipMultilineEncoding
|
||||
};
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
);
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretsGroupByKey);
|
||||
processedImport.secrets.forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value;
|
||||
});
|
||||
})
|
||||
await Promise.allSettled(
|
||||
processedImports.map((processedImport) =>
|
||||
Promise.allSettled(
|
||||
processedImport.secrets.map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: processedImport.secretPath,
|
||||
environment: processedImport.environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
processedImport.secrets[index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretSharing } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretSharingDALFactory = ReturnType<typeof secretSharingDALFactory>;
|
||||
|
||||
@ -30,6 +32,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExpiredSharedSecrets = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret started`);
|
||||
try {
|
||||
const today = new Date();
|
||||
const docs = await (tx || db)(TableName.SecretSharing)
|
||||
@ -40,6 +43,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
tag: "",
|
||||
iv: ""
|
||||
});
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret completed`);
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "pruneExpiredSharedSecrets" });
|
||||
|
@ -1,3 +1,6 @@
|
||||
import bcrypt from "bcrypt";
|
||||
|
||||
import { TSecretSharing } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { SecretSharingAccessType } from "@app/lib/types";
|
||||
@ -36,6 +39,7 @@ export const secretSharingServiceFactory = ({
|
||||
iv,
|
||||
tag,
|
||||
name,
|
||||
password,
|
||||
accessType,
|
||||
expiresAt,
|
||||
expiresAfterViews
|
||||
@ -60,8 +64,10 @@ export const secretSharingServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
const hashedPassword = password ? await bcrypt.hash(password, 10) : null;
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
name,
|
||||
password: hashedPassword,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@ -77,6 +83,7 @@ export const secretSharingServiceFactory = ({
|
||||
};
|
||||
|
||||
const createPublicSharedSecret = async ({
|
||||
password,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@ -102,7 +109,9 @@ export const secretSharingServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
const hashedPassword = password ? await bcrypt.hash(password, 10) : null;
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
password: hashedPassword,
|
||||
encryptedValue,
|
||||
hashedHex,
|
||||
iv,
|
||||
@ -111,6 +120,7 @@ export const secretSharingServiceFactory = ({
|
||||
expiresAfterViews,
|
||||
accessType
|
||||
});
|
||||
|
||||
return { id: newSharedSecret.id };
|
||||
};
|
||||
|
||||
@ -152,7 +162,21 @@ export const secretSharingServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const getActiveSharedSecretById = async ({ sharedSecretId, hashedHex, orgId }: TGetActiveSharedSecretByIdDTO) => {
|
||||
const $decrementSecretViewCount = async (sharedSecret: TSecretSharing, sharedSecretId: string) => {
|
||||
const { expiresAfterViews } = sharedSecret;
|
||||
|
||||
if (expiresAfterViews) {
|
||||
// decrement view count if view count expiry set
|
||||
await secretSharingDAL.updateById(sharedSecretId, { $decr: { expiresAfterViews: 1 } });
|
||||
}
|
||||
|
||||
await secretSharingDAL.updateById(sharedSecretId, {
|
||||
lastViewedAt: new Date()
|
||||
});
|
||||
};
|
||||
|
||||
/** Get's passwordless secret. validates all secret's requested (must be fresh). */
|
||||
const getSharedSecretById = async ({ sharedSecretId, hashedHex, orgId, password }: TGetActiveSharedSecretByIdDTO) => {
|
||||
const sharedSecret = await secretSharingDAL.findOne({
|
||||
id: sharedSecretId,
|
||||
hashedHex
|
||||
@ -169,6 +193,8 @@ export const secretSharingServiceFactory = ({
|
||||
if (accessType === SecretSharingAccessType.Organization && orgId !== sharedSecret.orgId)
|
||||
throw new UnauthorizedError();
|
||||
|
||||
// all secrets pass through here, meaning we check if its expired first and then check if it needs verification
|
||||
// or can be safely sent to the client.
|
||||
if (expiresAt !== null && expiresAt < new Date()) {
|
||||
// check lifetime expiry
|
||||
await secretSharingDAL.softDeleteById(sharedSecretId);
|
||||
@ -185,21 +211,29 @@ export const secretSharingServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (expiresAfterViews) {
|
||||
// decrement view count if view count expiry set
|
||||
await secretSharingDAL.updateById(sharedSecretId, { $decr: { expiresAfterViews: 1 } });
|
||||
const isPasswordProtected = Boolean(sharedSecret.password);
|
||||
const hasProvidedPassword = Boolean(password);
|
||||
if (isPasswordProtected) {
|
||||
if (hasProvidedPassword) {
|
||||
const isMatch = await bcrypt.compare(password as string, sharedSecret.password as string);
|
||||
if (!isMatch) throw new UnauthorizedError({ message: "Invalid credentials" });
|
||||
} else {
|
||||
return { isPasswordProtected };
|
||||
}
|
||||
}
|
||||
|
||||
await secretSharingDAL.updateById(sharedSecretId, {
|
||||
lastViewedAt: new Date()
|
||||
});
|
||||
// decrement when we are sure the user will view secret.
|
||||
await $decrementSecretViewCount(sharedSecret, sharedSecretId);
|
||||
|
||||
return {
|
||||
...sharedSecret,
|
||||
orgName:
|
||||
sharedSecret.accessType === SecretSharingAccessType.Organization && orgId === sharedSecret.orgId
|
||||
? orgName
|
||||
: undefined
|
||||
isPasswordProtected,
|
||||
secret: {
|
||||
...sharedSecret,
|
||||
orgName:
|
||||
sharedSecret.accessType === SecretSharingAccessType.Organization && orgId === sharedSecret.orgId
|
||||
? orgName
|
||||
: undefined
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@ -216,6 +250,6 @@ export const secretSharingServiceFactory = ({
|
||||
createPublicSharedSecret,
|
||||
getSharedSecrets,
|
||||
deleteSharedSecretById,
|
||||
getActiveSharedSecretById
|
||||
getSharedSecretById
|
||||
};
|
||||
};
|
||||
|
@ -15,6 +15,7 @@ export type TSharedSecretPermission = {
|
||||
orgId: string;
|
||||
accessType?: SecretSharingAccessType;
|
||||
name?: string;
|
||||
password?: string;
|
||||
};
|
||||
|
||||
export type TCreatePublicSharedSecretDTO = {
|
||||
@ -24,6 +25,7 @@ export type TCreatePublicSharedSecretDTO = {
|
||||
tag: string;
|
||||
expiresAt: string;
|
||||
expiresAfterViews?: number;
|
||||
password?: string;
|
||||
accessType: SecretSharingAccessType;
|
||||
};
|
||||
|
||||
@ -31,6 +33,11 @@ export type TGetActiveSharedSecretByIdDTO = {
|
||||
sharedSecretId: string;
|
||||
hashedHex: string;
|
||||
orgId?: string;
|
||||
password?: string;
|
||||
};
|
||||
|
||||
export type TValidateActiveSharedSecretDTO = TGetActiveSharedSecretByIdDTO & {
|
||||
password: string;
|
||||
};
|
||||
|
||||
export type TCreateSharedSecretDTO = TSharedSecretPermission & TCreatePublicSharedSecretDTO;
|
||||
|
@ -377,150 +377,120 @@ type TInterpolateSecretArg = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
};
|
||||
|
||||
const MAX_SECRET_REFERENCE_DEPTH = 10;
|
||||
export const expandSecretReferencesFactory = ({
|
||||
projectId,
|
||||
decryptSecretValue: decryptSecret,
|
||||
secretDAL,
|
||||
folderDAL
|
||||
}: TInterpolateSecretArg) => {
|
||||
const fetchSecretFactory = () => {
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const getCacheUniqueKey = (environment: string, secretPath: string) => `${environment}-${secretPath}`;
|
||||
|
||||
return async (secRefEnv: string, secRefPath: string[], secRefKey: string) => {
|
||||
const referredSecretPathURL = path.join("/", ...secRefPath);
|
||||
const uniqueKey = `${secRefEnv}-${referredSecretPathURL}`;
|
||||
const fetchSecret = async (environment: string, secretPath: string, secretKey: string) => {
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
|
||||
if (secretCache?.[uniqueKey]) {
|
||||
return secretCache[uniqueKey][secRefKey];
|
||||
}
|
||||
if (secretCache?.[cacheKey]) {
|
||||
return secretCache[cacheKey][secretKey] || "";
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, secRefEnv, referredSecretPathURL);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
|
||||
const decryptedSecret = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
// eslint-disable-next-line
|
||||
prev[secret.key] = decryptSecret(secret.encryptedValue) || "";
|
||||
return prev;
|
||||
}, {});
|
||||
const decryptedSecret = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
prev[secret.key] = decryptSecret(secret.encryptedValue) || "";
|
||||
return prev;
|
||||
}, {});
|
||||
|
||||
secretCache[uniqueKey] = decryptedSecret;
|
||||
secretCache[cacheKey] = decryptedSecret;
|
||||
|
||||
return secretCache[uniqueKey][secRefKey];
|
||||
};
|
||||
return secretCache[cacheKey][secretKey] || "";
|
||||
};
|
||||
|
||||
const recursivelyExpandSecret = async (
|
||||
expandedSec: Record<string, string | undefined>,
|
||||
interpolatedSec: Record<string, string | undefined>,
|
||||
fetchSecret: (env: string, secPath: string[], secKey: string) => Promise<string>,
|
||||
recursionChainBreaker: Record<string, boolean>,
|
||||
key: string
|
||||
): Promise<string | undefined> => {
|
||||
if (expandedSec?.[key] !== undefined) {
|
||||
return expandedSec[key];
|
||||
}
|
||||
if (recursionChainBreaker?.[key]) {
|
||||
return "";
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
recursionChainBreaker[key] = true;
|
||||
const recursivelyExpandSecret = async (dto: { value?: string; secretPath: string; environment: string }) => {
|
||||
if (!dto.value) return "";
|
||||
|
||||
let interpolatedValue = interpolatedSec[key];
|
||||
if (!interpolatedValue) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`Couldn't find referenced value - ${key}`);
|
||||
return "";
|
||||
}
|
||||
const stack = [{ ...dto, depth: 0 }];
|
||||
let expandedValue = dto.value;
|
||||
|
||||
const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG);
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
while (stack.length) {
|
||||
const { value, secretPath, environment, depth } = stack.pop()!;
|
||||
// eslint-disable-next-line no-continue
|
||||
if (depth > MAX_SECRET_REFERENCE_DEPTH) continue;
|
||||
const refs = value?.match(INTERPOLATION_SYNTAX_REG);
|
||||
|
||||
if (entities.length === 1) {
|
||||
// eslint-disable-next-line
|
||||
const val = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
fetchSecret,
|
||||
recursionChainBreaker,
|
||||
interpolationKey
|
||||
);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
|
||||
if (entities.length > 1) {
|
||||
const secRefEnv = entities[0];
|
||||
const secRefPath = entities.slice(1, entities.length - 1);
|
||||
const secRefKey = entities[entities.length - 1];
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!entities.length) continue;
|
||||
|
||||
// eslint-disable-next-line
|
||||
const val = await fetchSecret(secRefEnv, secRefPath, secRefKey);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
if (entities.length === 1) {
|
||||
const [secretKey] = entities;
|
||||
|
||||
// eslint-disable-next-line no-continue,no-await-in-loop
|
||||
const referedValue = await fetchSecret(environment, secretPath, secretKey);
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
secretCache[cacheKey][secretKey] = referedValue;
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referedValue)) {
|
||||
stack.push({
|
||||
value: referedValue,
|
||||
secretPath,
|
||||
environment,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
if (referedValue) {
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referedValue);
|
||||
}
|
||||
} else {
|
||||
const secretReferenceEnvironment = entities[0];
|
||||
const secretReferencePath = path.join("/", ...entities.slice(1, entities.length - 1));
|
||||
const secretReferenceKey = entities[entities.length - 1];
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const referedValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey);
|
||||
const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath);
|
||||
secretCache[cacheKey][secretReferenceKey] = referedValue;
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referedValue)) {
|
||||
stack.push({
|
||||
value: referedValue,
|
||||
secretPath: secretReferencePath,
|
||||
environment: secretReferenceEnvironment,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
|
||||
if (referedValue) {
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
expandedSec[key] = interpolatedValue;
|
||||
return interpolatedValue;
|
||||
return expandedValue;
|
||||
};
|
||||
|
||||
const fetchSecret = fetchSecretFactory();
|
||||
const expandSecrets = async (
|
||||
inputSecrets: Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => {
|
||||
const expandedSecrets: Record<string, string | undefined> = {};
|
||||
const toBeExpandedSecrets: Record<string, string | undefined> = {};
|
||||
const expandSecret = async (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
if (!inputSecret.value) return inputSecret.value;
|
||||
|
||||
Object.keys(inputSecrets).forEach((key) => {
|
||||
if (inputSecrets[key].value?.match(INTERPOLATION_SYNTAX_REG)) {
|
||||
toBeExpandedSecrets[key] = inputSecrets[key].value;
|
||||
} else {
|
||||
expandedSecrets[key] = inputSecrets[key].value;
|
||||
}
|
||||
});
|
||||
const shouldExpand = Boolean(inputSecret.value?.match(INTERPOLATION_SYNTAX_REG));
|
||||
if (!shouldExpand) return inputSecret.value;
|
||||
|
||||
for (const key of Object.keys(inputSecrets)) {
|
||||
if (expandedSecrets?.[key]) {
|
||||
// should not do multi line encoding if user has set it to skip
|
||||
// eslint-disable-next-line
|
||||
inputSecrets[key].value = inputSecrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedSecrets[key])
|
||||
: expandedSecrets[key];
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
// this is to avoid recursion loop. So the graph should be direct graph rather than cyclic
|
||||
// so for any recursion building if there is an entity two times same key meaning it will be looped
|
||||
const recursionChainBreaker: Record<string, boolean> = {};
|
||||
// eslint-disable-next-line
|
||||
const expandedVal = await recursivelyExpandSecret(
|
||||
expandedSecrets,
|
||||
toBeExpandedSecrets,
|
||||
fetchSecret,
|
||||
recursionChainBreaker,
|
||||
key
|
||||
);
|
||||
|
||||
// eslint-disable-next-line
|
||||
inputSecrets[key].value = inputSecrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedVal)
|
||||
: expandedVal;
|
||||
}
|
||||
|
||||
return inputSecrets;
|
||||
const expandedSecretValue = await recursivelyExpandSecret(inputSecret);
|
||||
return inputSecret.skipMultilineEncoding ? formatMultiValueEnv(expandedSecretValue) : expandedSecretValue;
|
||||
};
|
||||
return expandSecrets;
|
||||
return expandSecret;
|
||||
};
|
||||
|
||||
export const reshapeBridgeSecret = (
|
||||
|
@ -521,27 +521,22 @@ export const secretV2BridgeServiceFactory = ({
|
||||
|
||||
if (shouldExpandSecretReferences) {
|
||||
const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath);
|
||||
for (const secretPathKey in secretsGroupByPath) {
|
||||
if (Object.hasOwn(secretsGroupByPath, secretPathKey)) {
|
||||
const secretsGroupByKey = secretsGroupByPath[secretPathKey].reduce(
|
||||
(acc, item) => {
|
||||
acc[item.secretKey] = {
|
||||
value: item.secretValue,
|
||||
comment: item.secretComment,
|
||||
skipMultilineEncoding: item.skipMultilineEncoding
|
||||
};
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, { value?: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
);
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretsGroupByKey);
|
||||
secretsGroupByPath[secretPathKey].forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value || "";
|
||||
});
|
||||
}
|
||||
}
|
||||
await Promise.allSettled(
|
||||
Object.keys(secretsGroupByPath).map((groupedPath) =>
|
||||
Promise.allSettled(
|
||||
secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: groupedPath,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (!includeImports) {
|
||||
@ -693,12 +688,14 @@ export const secretV2BridgeServiceFactory = ({
|
||||
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString()
|
||||
: "";
|
||||
if (shouldExpandSecretReferences && secretValue) {
|
||||
const secretReferenceExpandedRecord = {
|
||||
[secret.key]: { value: secretValue }
|
||||
};
|
||||
// eslint-disable-next-line
|
||||
await expandSecretReferences(secretReferenceExpandedRecord);
|
||||
secretValue = secretReferenceExpandedRecord[secret.key].value;
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment,
|
||||
secretPath: path,
|
||||
value: secretValue,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding
|
||||
});
|
||||
secretValue = expandedSecretValue || "";
|
||||
}
|
||||
|
||||
return reshapeBridgeSecret(projectId, environment, path, {
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretVersionsV2, TSecretVersionsV2Update } from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretVersionV2DALFactory = ReturnType<typeof secretVersionV2BridgeDALFactory>;
|
||||
|
||||
@ -87,6 +89,7 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 started`);
|
||||
try {
|
||||
await db(TableName.SecretVersionV2)
|
||||
.with("version_cte", (qb) => {
|
||||
@ -112,6 +115,7 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 completed`);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -196,6 +196,13 @@ export const recursivelyGetSecretPaths = ({
|
||||
return getPaths;
|
||||
};
|
||||
|
||||
// used to convert multi line ones to quotes ones with \n
|
||||
const formatMultiValueEnv = (val?: string) => {
|
||||
if (!val) return "";
|
||||
if (!val.match("\n")) return val;
|
||||
return `"${val.replace(/\n/g, "\\n")}"`;
|
||||
};
|
||||
|
||||
type TInterpolateSecretArg = {
|
||||
projectId: string;
|
||||
secretEncKey: string;
|
||||
@ -203,162 +210,128 @@ type TInterpolateSecretArg = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
};
|
||||
|
||||
const MAX_SECRET_REFERENCE_DEPTH = 5;
|
||||
const INTERPOLATION_SYNTAX_REG = /\${([^}]+)}/g;
|
||||
export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderDAL }: TInterpolateSecretArg) => {
|
||||
const fetchSecretsCrossEnv = () => {
|
||||
const fetchCache: Record<string, Record<string, string>> = {};
|
||||
const secretCache: Record<string, Record<string, string>> = {};
|
||||
const getCacheUniqueKey = (environment: string, secretPath: string) => `${environment}-${secretPath}`;
|
||||
|
||||
return async (secRefEnv: string, secRefPath: string[], secRefKey: string) => {
|
||||
const secRefPathUrl = path.join("/", ...secRefPath);
|
||||
const uniqKey = `${secRefEnv}-${secRefPathUrl}`;
|
||||
const fetchSecret = async (environment: string, secretPath: string, secretKey: string) => {
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
const uniqKey = `${environment}-${cacheKey}`;
|
||||
|
||||
if (fetchCache?.[uniqKey]) {
|
||||
return fetchCache[uniqKey][secRefKey];
|
||||
}
|
||||
if (secretCache?.[uniqKey]) {
|
||||
return secretCache[uniqKey][secretKey] || "";
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, secRefEnv, secRefPathUrl);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) return "";
|
||||
const secrets = await secretDAL.findByFolderId(folder.id);
|
||||
|
||||
const decryptedSec = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const decryptedSec = secrets.reduce<Record<string, string>>((prev, secret) => {
|
||||
const decryptedSecretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
const decryptedSecretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: secretEncKey
|
||||
});
|
||||
|
||||
// eslint-disable-next-line
|
||||
prev[secretKey] = secretValue;
|
||||
return prev;
|
||||
}, {});
|
||||
// eslint-disable-next-line
|
||||
prev[decryptedSecretKey] = decryptedSecretValue;
|
||||
return prev;
|
||||
}, {});
|
||||
|
||||
fetchCache[uniqKey] = decryptedSec;
|
||||
secretCache[uniqKey] = decryptedSec;
|
||||
|
||||
return fetchCache[uniqKey][secRefKey];
|
||||
};
|
||||
return secretCache[uniqKey][secretKey] || "";
|
||||
};
|
||||
|
||||
const recursivelyExpandSecret = async (
|
||||
expandedSec: Record<string, string>,
|
||||
interpolatedSec: Record<string, string>,
|
||||
fetchCrossEnv: (env: string, secPath: string[], secKey: string) => Promise<string>,
|
||||
recursionChainBreaker: Record<string, boolean>,
|
||||
key: string
|
||||
) => {
|
||||
if (expandedSec?.[key] !== undefined) {
|
||||
return expandedSec[key];
|
||||
}
|
||||
if (recursionChainBreaker?.[key]) {
|
||||
return "";
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
recursionChainBreaker[key] = true;
|
||||
const recursivelyExpandSecret = async ({
|
||||
value,
|
||||
secretPath,
|
||||
environment,
|
||||
depth = 0
|
||||
}: {
|
||||
value?: string;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
depth?: number;
|
||||
}) => {
|
||||
if (!value) return "";
|
||||
if (depth > MAX_SECRET_REFERENCE_DEPTH) return "";
|
||||
|
||||
let interpolatedValue = interpolatedSec[key];
|
||||
if (!interpolatedValue) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`Couldn't find referenced value - ${key}`);
|
||||
return "";
|
||||
}
|
||||
|
||||
const refs = interpolatedValue.match(INTERPOLATION_SYNTAX_REG);
|
||||
const refs = value.match(INTERPOLATION_SYNTAX_REG);
|
||||
let expandedValue = value;
|
||||
if (refs) {
|
||||
for (const interpolationSyntax of refs) {
|
||||
const interpolationKey = interpolationSyntax.slice(2, interpolationSyntax.length - 1);
|
||||
const entities = interpolationKey.trim().split(".");
|
||||
|
||||
if (entities.length === 1) {
|
||||
const val = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
fetchCrossEnv,
|
||||
recursionChainBreaker,
|
||||
interpolationKey
|
||||
);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
}
|
||||
const [secretKey] = entities;
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
let referenceValue = await fetchSecret(environment, secretPath, secretKey);
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referenceValue)) {
|
||||
// eslint-disable-next-line
|
||||
referenceValue = await recursivelyExpandSecret({
|
||||
environment,
|
||||
secretPath,
|
||||
value: referenceValue,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
secretCache[cacheKey][secretKey] = referenceValue;
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referenceValue);
|
||||
}
|
||||
|
||||
if (entities.length > 1) {
|
||||
const secRefEnv = entities[0];
|
||||
const secRefPath = entities.slice(1, entities.length - 1);
|
||||
const secRefKey = entities[entities.length - 1];
|
||||
const secretReferenceEnvironment = entities[0];
|
||||
const secretReferencePath = path.join("/", ...entities.slice(1, entities.length - 1));
|
||||
const secretReferenceKey = entities[entities.length - 1];
|
||||
|
||||
const val = await fetchCrossEnv(secRefEnv, secRefPath, secRefKey);
|
||||
if (val) {
|
||||
interpolatedValue = interpolatedValue.replaceAll(interpolationSyntax, val);
|
||||
// eslint-disable-next-line
|
||||
let referenceValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey);
|
||||
if (INTERPOLATION_SYNTAX_REG.test(referenceValue)) {
|
||||
// eslint-disable-next-line
|
||||
referenceValue = await recursivelyExpandSecret({
|
||||
environment: secretReferenceEnvironment,
|
||||
secretPath: secretReferencePath,
|
||||
value: referenceValue,
|
||||
depth: depth + 1
|
||||
});
|
||||
}
|
||||
const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath);
|
||||
secretCache[cacheKey][secretReferenceKey] = referenceValue;
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referenceValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
expandedSec[key] = interpolatedValue;
|
||||
return interpolatedValue;
|
||||
return expandedValue;
|
||||
};
|
||||
|
||||
// used to convert multi line ones to quotes ones with \n
|
||||
const formatMultiValueEnv = (val?: string) => {
|
||||
if (!val) return "";
|
||||
if (!val.match("\n")) return val;
|
||||
return `"${val.replace(/\n/g, "\\n")}"`;
|
||||
const expandSecret = async (inputSecret: {
|
||||
value?: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
if (!inputSecret.value) return inputSecret.value;
|
||||
|
||||
const shouldExpand = Boolean(inputSecret.value?.match(INTERPOLATION_SYNTAX_REG));
|
||||
if (!shouldExpand) return inputSecret.value;
|
||||
|
||||
const expandedSecretValue = await recursivelyExpandSecret(inputSecret);
|
||||
return inputSecret.skipMultilineEncoding ? formatMultiValueEnv(expandedSecretValue) : expandedSecretValue;
|
||||
};
|
||||
|
||||
const expandSecrets = async (
|
||||
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
|
||||
) => {
|
||||
const expandedSec: Record<string, string> = {};
|
||||
const interpolatedSec: Record<string, string> = {};
|
||||
|
||||
const crossSecEnvFetch = fetchSecretsCrossEnv();
|
||||
|
||||
Object.keys(secrets).forEach((key) => {
|
||||
if (secrets[key].value.match(INTERPOLATION_SYNTAX_REG)) {
|
||||
interpolatedSec[key] = secrets[key].value;
|
||||
} else {
|
||||
expandedSec[key] = secrets[key].value;
|
||||
}
|
||||
});
|
||||
|
||||
for (const key of Object.keys(secrets)) {
|
||||
if (expandedSec?.[key]) {
|
||||
// should not do multi line encoding if user has set it to skip
|
||||
// eslint-disable-next-line
|
||||
secrets[key].value = secrets[key].skipMultilineEncoding
|
||||
? formatMultiValueEnv(expandedSec[key])
|
||||
: expandedSec[key];
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
// this is to avoid recursion loop. So the graph should be direct graph rather than cyclic
|
||||
// so for any recursion building if there is an entity two times same key meaning it will be looped
|
||||
const recursionChainBreaker: Record<string, boolean> = {};
|
||||
const expandedVal = await recursivelyExpandSecret(
|
||||
expandedSec,
|
||||
interpolatedSec,
|
||||
crossSecEnvFetch,
|
||||
recursionChainBreaker,
|
||||
key
|
||||
);
|
||||
|
||||
// eslint-disable-next-line
|
||||
secrets[key].value = secrets[key].skipMultilineEncoding ? formatMultiValueEnv(expandedVal) : expandedVal;
|
||||
}
|
||||
|
||||
return secrets;
|
||||
};
|
||||
return expandSecrets;
|
||||
return expandSecret;
|
||||
};
|
||||
|
||||
export const decryptSecretRaw = (
|
||||
|
@ -258,6 +258,7 @@ export const secretQueueFactory = ({
|
||||
const getIntegrationSecretsV2 = async (dto: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
folderId: string;
|
||||
depth: number;
|
||||
decryptor: (value: Buffer | null | undefined) => string;
|
||||
@ -269,30 +270,36 @@ export const secretQueueFactory = ({
|
||||
);
|
||||
return content;
|
||||
}
|
||||
|
||||
// process secrets in current folder
|
||||
const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId);
|
||||
secrets.forEach((secret) => {
|
||||
const secretKey = secret.key;
|
||||
const secretValue = dto.decryptor(secret.encryptedValue);
|
||||
content[secretKey] = { value: secretValue };
|
||||
|
||||
if (secret.encryptedComment) {
|
||||
const commentValue = dto.decryptor(secret.encryptedComment);
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
});
|
||||
|
||||
const expandSecretReferences = expandSecretReferencesFactory({
|
||||
decryptSecretValue: dto.decryptor,
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
folderDAL,
|
||||
projectId: dto.projectId
|
||||
});
|
||||
// process secrets in current folder
|
||||
const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId);
|
||||
|
||||
await Promise.allSettled(
|
||||
secrets.map(async (secret) => {
|
||||
const secretKey = secret.key;
|
||||
const secretValue = dto.decryptor(secret.encryptedValue);
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment: dto.environment,
|
||||
secretPath: dto.secretPath,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
value: secretValue
|
||||
});
|
||||
content[secretKey] = { value: expandedSecretValue || "" };
|
||||
|
||||
if (secret.encryptedComment) {
|
||||
const commentValue = dto.decryptor(secret.encryptedComment);
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
})
|
||||
);
|
||||
|
||||
await expandSecretReferences(content);
|
||||
// check if current folder has any imports from other folders
|
||||
const secretImports = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
|
||||
|
||||
@ -329,6 +336,7 @@ export const secretQueueFactory = ({
|
||||
const getIntegrationSecrets = async (dto: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
folderId: string;
|
||||
key: string;
|
||||
depth: number;
|
||||
@ -341,46 +349,52 @@ export const secretQueueFactory = ({
|
||||
return content;
|
||||
}
|
||||
|
||||
// process secrets in current folder
|
||||
const secrets = await secretDAL.findByFolderId(dto.folderId);
|
||||
secrets.forEach((secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
content[secretKey] = { value: secretValue };
|
||||
|
||||
if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) {
|
||||
const commentValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretCommentCiphertext,
|
||||
iv: secret.secretCommentIV,
|
||||
tag: secret.secretCommentTag,
|
||||
key: dto.key
|
||||
});
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
});
|
||||
|
||||
const expandSecrets = interpolateSecrets({
|
||||
const expandSecretReferences = interpolateSecrets({
|
||||
projectId: dto.projectId,
|
||||
secretEncKey: dto.key,
|
||||
folderDAL,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
await expandSecrets(content);
|
||||
// process secrets in current folder
|
||||
const secrets = await secretDAL.findByFolderId(dto.folderId);
|
||||
await Promise.allSettled(
|
||||
secrets.map(async (secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: dto.key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: dto.key
|
||||
});
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
environment: dto.environment,
|
||||
secretPath: dto.secretPath,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
value: secretValue
|
||||
});
|
||||
|
||||
content[secretKey] = { value: expandedSecretValue || "" };
|
||||
|
||||
if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) {
|
||||
const commentValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretCommentCiphertext,
|
||||
iv: secret.secretCommentIV,
|
||||
tag: secret.secretCommentTag,
|
||||
key: dto.key
|
||||
});
|
||||
content[secretKey].comment = commentValue;
|
||||
}
|
||||
|
||||
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
|
||||
})
|
||||
);
|
||||
|
||||
// check if current folder has any imports from other folders
|
||||
const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
|
||||
@ -404,7 +418,8 @@ export const secretQueueFactory = ({
|
||||
projectId: dto.projectId,
|
||||
folderId: folder.id,
|
||||
key: dto.key,
|
||||
depth: dto.depth + 1
|
||||
depth: dto.depth + 1,
|
||||
secretPath: dto.secretPath
|
||||
});
|
||||
|
||||
// add the imported secrets to the current folder secrets
|
||||
@ -686,6 +701,7 @@ export const secretQueueFactory = ({
|
||||
projectId,
|
||||
folderId: folder.id,
|
||||
depth: 1,
|
||||
secretPath,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
|
||||
})
|
||||
: await getIntegrationSecrets({
|
||||
@ -693,7 +709,8 @@ export const secretQueueFactory = ({
|
||||
projectId,
|
||||
folderId: folder.id,
|
||||
key: botKey as string,
|
||||
depth: 1
|
||||
depth: 1,
|
||||
secretPath
|
||||
});
|
||||
|
||||
for (const integration of toBeSyncedIntegrations) {
|
||||
|
@ -482,7 +482,7 @@ export const secretServiceFactory = ({
|
||||
projectId,
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
// TODO(akhilmhdh-pg): license check, posthog service and snapshot
|
||||
return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment, secretPath: path };
|
||||
};
|
||||
|
||||
@ -1047,74 +1047,47 @@ export const secretServiceFactory = ({
|
||||
};
|
||||
});
|
||||
|
||||
const expandSecret = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
if (expandSecretReferences) {
|
||||
const expandSecrets = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
const batchSecretsExpand = async (
|
||||
secretBatch: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
secretPath: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}[]
|
||||
) => {
|
||||
// Group secrets by secretPath
|
||||
const secretsByPath: Record<
|
||||
string,
|
||||
{
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}[]
|
||||
> = {};
|
||||
|
||||
secretBatch.forEach((secret) => {
|
||||
if (!secretsByPath[secret.secretPath]) {
|
||||
secretsByPath[secret.secretPath] = [];
|
||||
}
|
||||
secretsByPath[secret.secretPath].push(secret);
|
||||
});
|
||||
|
||||
// Expand secrets for each group
|
||||
for (const secPath in secretsByPath) {
|
||||
if (!Object.hasOwn(secretsByPath, path)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const secretRecord: Record<
|
||||
string,
|
||||
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
|
||||
> = {};
|
||||
secretsByPath[secPath].forEach((decryptedSecret) => {
|
||||
secretRecord[decryptedSecret.secretKey] = {
|
||||
value: decryptedSecret.secretValue,
|
||||
comment: decryptedSecret.secretComment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
};
|
||||
});
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
|
||||
secretsByPath[secPath].forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretRecord[decryptedSecret.secretKey].value;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// expand secrets
|
||||
await batchSecretsExpand(filteredSecrets);
|
||||
|
||||
// expand imports by batch
|
||||
await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets)));
|
||||
const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath);
|
||||
await Promise.allSettled(
|
||||
Object.keys(secretsGroupByPath).map((groupedPath) =>
|
||||
Promise.allSettled(
|
||||
secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecret({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: groupedPath,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.allSettled(
|
||||
processedImports.map((processedImport) =>
|
||||
Promise.allSettled(
|
||||
processedImport.secrets.map(async (decryptedSecret, index) => {
|
||||
const expandedSecretValue = await expandSecret({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: path,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
processedImport.secrets[index].secretValue = expandedSecretValue || "";
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
@ -1177,40 +1150,19 @@ export const secretServiceFactory = ({
|
||||
const decryptedSecret = decryptSecretRaw(encryptedSecret, botKey);
|
||||
|
||||
if (expandSecretReferences) {
|
||||
const expandSecrets = interpolateSecrets({
|
||||
const expandSecret = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
const expandSingleSecret = async (secret: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
secretPath: string;
|
||||
skipMultilineEncoding: boolean | null | undefined;
|
||||
}) => {
|
||||
const secretRecord: Record<
|
||||
string,
|
||||
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
|
||||
> = {
|
||||
[secret.secretKey]: {
|
||||
value: secret.secretValue,
|
||||
comment: secret.secretComment,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding
|
||||
}
|
||||
};
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
|
||||
// Update the secret with the expanded value
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secret.secretValue = secretRecord[secret.secretKey].value;
|
||||
};
|
||||
|
||||
// Expand the secret
|
||||
await expandSingleSecret(decryptedSecret);
|
||||
const expandedSecretValue = await expandSecret({
|
||||
environment,
|
||||
secretPath: path,
|
||||
value: decryptedSecret.secretValue,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
});
|
||||
decryptedSecret.secretValue = expandedSecretValue || "";
|
||||
}
|
||||
|
||||
return decryptedSecret;
|
||||
|
@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretVersions, TSecretVersionsUpdate } from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TSecretVersionDALFactory = ReturnType<typeof secretVersionDALFactory>;
|
||||
|
||||
@ -112,6 +114,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v1 started`);
|
||||
try {
|
||||
await db(TableName.SecretVersion)
|
||||
.with("version_cte", (qb) => {
|
||||
@ -137,6 +140,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
name: "Secret Version Prune"
|
||||
});
|
||||
}
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v1 completed`);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import tsconfigPaths from "vite-tsconfig-paths"; // only if you are using custom tsconfig paths
|
||||
import path from "path";
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
@ -15,7 +15,14 @@ export default defineConfig({
|
||||
useAtomics: true,
|
||||
isolate: false
|
||||
}
|
||||
},
|
||||
alias: {
|
||||
"./license-fns": path.resolve(__dirname, "./src/ee/services/license/__mocks__/license-fns")
|
||||
}
|
||||
},
|
||||
plugins: [tsconfigPaths()] // only if you are using custom tsconfig paths,
|
||||
resolve: {
|
||||
alias: {
|
||||
"@app": path.resolve(__dirname, "./src")
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -4,22 +4,27 @@ Copyright (c) 2023 Infisical Inc.
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/fatih/color"
|
||||
"github.com/posthog/posthog-go"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var ErrManualSignalInterrupt = errors.New("signal: interrupt")
|
||||
var watcherWaitGroup = new(sync.WaitGroup)
|
||||
|
||||
// runCmd represents the run command
|
||||
var runCmd = &cobra.Command{
|
||||
Example: `
|
||||
@ -77,11 +82,35 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
command, err := cmd.Flags().GetString("command")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
watchMode, err := cmd.Flags().GetBool("watch")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
watchModeInterval, err := cmd.Flags().GetInt("watch-interval")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
// If the --watch flag has been set, the --watch-interval flag should also be set
|
||||
if watchMode && watchModeInterval < 5 {
|
||||
util.HandleError(fmt.Errorf("watch interval must be at least 5 seconds, you passed %d seconds", watchModeInterval))
|
||||
}
|
||||
|
||||
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@ -116,108 +145,50 @@ var runCmd = &cobra.Command{
|
||||
Recursive: recursive,
|
||||
}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
request.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(request, projectConfigDir)
|
||||
|
||||
injectableEnvironment, err := fetchAndFormatSecretsForShell(request, projectConfigDir, secretOverriding, shouldExpandSecrets, token)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
|
||||
}
|
||||
|
||||
if secretOverriding {
|
||||
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
|
||||
log.Debug().Msgf("injecting the following environment variables into shell: %v", injectableEnvironment.Variables)
|
||||
|
||||
if watchMode {
|
||||
executeCommandWithWatchMode(command, args, watchModeInterval, request, projectConfigDir, shouldExpandSecrets, secretOverriding, token)
|
||||
} else {
|
||||
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
|
||||
}
|
||||
if cmd.Flags().Changed("command") {
|
||||
command := cmd.Flag("command").Value.String()
|
||||
err = executeMultipleCommandWithEnvs(command, injectableEnvironment.SecretsCount, injectableEnvironment.Variables)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if shouldExpandSecrets {
|
||||
|
||||
authParams := models.ExpandSecretsAuthentication{}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
authParams.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
authParams.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets = util.ExpandSecrets(secrets, authParams, projectConfigDir)
|
||||
}
|
||||
|
||||
secretsByKey := getSecretsByKeys(secrets)
|
||||
environmentVariables := make(map[string]string)
|
||||
|
||||
// add all existing environment vars
|
||||
for _, s := range os.Environ() {
|
||||
kv := strings.SplitN(s, "=", 2)
|
||||
key := kv[0]
|
||||
value := kv[1]
|
||||
environmentVariables[key] = value
|
||||
}
|
||||
|
||||
// check to see if there are any reserved key words in secrets to inject
|
||||
filterReservedEnvVars(secretsByKey)
|
||||
|
||||
// now add infisical secrets
|
||||
for k, v := range secretsByKey {
|
||||
environmentVariables[k] = v.Value
|
||||
}
|
||||
|
||||
// turn it back into a list of envs
|
||||
var env []string
|
||||
for key, value := range environmentVariables {
|
||||
s := key + "=" + value
|
||||
env = append(env, s)
|
||||
}
|
||||
|
||||
log.Debug().Msgf("injecting the following environment variables into shell: %v", env)
|
||||
|
||||
Telemetry.CaptureEvent("cli-command:run",
|
||||
posthog.NewProperties().
|
||||
Set("secretsCount", len(secrets)).
|
||||
Set("environment", environmentName).
|
||||
Set("isUsingServiceToken", token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER).
|
||||
Set("isUsingUniversalAuthToken", token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER).
|
||||
Set("single-command", strings.Join(args, " ")).
|
||||
Set("multi-command", cmd.Flag("command").Value.String()).
|
||||
Set("version", util.CLI_VERSION))
|
||||
|
||||
if cmd.Flags().Changed("command") {
|
||||
command := cmd.Flag("command").Value.String()
|
||||
|
||||
err = executeMultipleCommandWithEnvs(command, len(secretsByKey), env)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
} else {
|
||||
err = executeSingleCommandWithEnvs(args, len(secretsByKey), env)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
} else {
|
||||
err = executeSingleCommandWithEnvs(args, injectableEnvironment.SecretsCount, injectableEnvironment.Variables)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
}
|
||||
|
||||
var (
|
||||
reservedEnvVars = []string{
|
||||
"HOME", "PATH", "PS1", "PS2",
|
||||
"PWD", "EDITOR", "XAUTHORITY", "USER",
|
||||
"TERM", "TERMINFO", "SHELL", "MAIL",
|
||||
}
|
||||
|
||||
reservedEnvVarPrefixes = []string{
|
||||
"XDG_",
|
||||
"LC_",
|
||||
}
|
||||
)
|
||||
|
||||
func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
|
||||
var (
|
||||
reservedEnvVars = []string{
|
||||
"HOME", "PATH", "PS1", "PS2",
|
||||
"PWD", "EDITOR", "XAUTHORITY", "USER",
|
||||
"TERM", "TERMINFO", "SHELL", "MAIL",
|
||||
}
|
||||
|
||||
reservedEnvVarPrefixes = []string{
|
||||
"XDG_",
|
||||
"LC_",
|
||||
}
|
||||
)
|
||||
|
||||
for _, reservedEnvName := range reservedEnvVars {
|
||||
if _, ok := env[reservedEnvName]; ok {
|
||||
delete(env, reservedEnvName)
|
||||
@ -237,13 +208,15 @@ func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(runCmd)
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
runCmd.Flags().String("token", "", "fetch secrets using service token or machine identity access token")
|
||||
runCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth")
|
||||
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("include-imports", true, "Import linked secrets ")
|
||||
runCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders")
|
||||
runCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
runCmd.Flags().StringP("env", "e", "dev", "set the environment (dev, prod, etc.) from which your secrets should be pulled from")
|
||||
runCmd.Flags().Bool("expand", true, "parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("include-imports", true, "import linked secrets ")
|
||||
runCmd.Flags().Bool("recursive", false, "fetch secrets from all sub-folders")
|
||||
runCmd.Flags().Bool("secret-overriding", true, "prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
runCmd.Flags().Bool("watch", false, "enable reload of application when secrets change")
|
||||
runCmd.Flags().Int("watch-interval", 10, "interval in seconds to check for secret changes")
|
||||
runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")")
|
||||
runCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs ")
|
||||
runCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
@ -263,7 +236,7 @@ func executeSingleCommandWithEnvs(args []string, secretsCount int, env []string)
|
||||
cmd.Stderr = os.Stderr
|
||||
cmd.Env = env
|
||||
|
||||
return execCmd(cmd)
|
||||
return execBasicCmd(cmd)
|
||||
}
|
||||
|
||||
func executeMultipleCommandWithEnvs(fullCommand string, secretsCount int, env []string) error {
|
||||
@ -286,11 +259,10 @@ func executeMultipleCommandWithEnvs(fullCommand string, secretsCount int, env []
|
||||
log.Info().Msgf(color.GreenString("Injecting %v Infisical secrets into your application process", secretsCount))
|
||||
log.Debug().Msgf("executing command: %s %s %s \n", shell[0], shell[1], fullCommand)
|
||||
|
||||
return execCmd(cmd)
|
||||
return execBasicCmd(cmd)
|
||||
}
|
||||
|
||||
// Credit: inspired by AWS Valut
|
||||
func execCmd(cmd *exec.Cmd) error {
|
||||
func execBasicCmd(cmd *exec.Cmd) error {
|
||||
sigChannel := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChannel)
|
||||
|
||||
@ -314,3 +286,217 @@ func execCmd(cmd *exec.Cmd) error {
|
||||
os.Exit(waitStatus.ExitStatus())
|
||||
return nil
|
||||
}
|
||||
|
||||
func waitForExitCommand(cmd *exec.Cmd) (int, error) {
|
||||
if err := cmd.Wait(); err != nil {
|
||||
// ignore errors
|
||||
cmd.Process.Signal(os.Kill) // #nosec G104
|
||||
|
||||
if exitError, ok := err.(*exec.ExitError); ok {
|
||||
return exitError.ExitCode(), exitError
|
||||
}
|
||||
|
||||
return 2, err
|
||||
}
|
||||
|
||||
waitStatus, ok := cmd.ProcessState.Sys().(syscall.WaitStatus)
|
||||
if !ok {
|
||||
return 2, fmt.Errorf("unexpected ProcessState type, expected syscall.WaitStatus, got %T", waitStatus)
|
||||
}
|
||||
return waitStatus.ExitStatus(), nil
|
||||
}
|
||||
|
||||
func executeCommandWithWatchMode(commandFlag string, args []string, watchModeInterval int, request models.GetAllSecretsParameters, projectConfigDir string, expandSecrets bool, secretOverriding bool, token *models.TokenDetails) {
|
||||
|
||||
var cmd *exec.Cmd
|
||||
var err error
|
||||
var lastSecretsFetch time.Time
|
||||
var lastUpdateEvent time.Time
|
||||
var watchMutex sync.Mutex
|
||||
var processMutex sync.Mutex
|
||||
var beingTerminated = false
|
||||
var currentETag string
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Failed to fetch secrets")
|
||||
}
|
||||
|
||||
runCommandWithWatcher := func(environmentVariables models.InjectableEnvironmentResult) {
|
||||
currentETag = environmentVariables.ETag
|
||||
secretsFetchedAt := time.Now()
|
||||
if secretsFetchedAt.After(lastSecretsFetch) {
|
||||
lastSecretsFetch = secretsFetchedAt
|
||||
}
|
||||
|
||||
shouldRestartProcess := cmd != nil
|
||||
// terminate the old process before starting a new one
|
||||
if shouldRestartProcess {
|
||||
log.Info().Msg(color.HiMagentaString("[HOT RELOAD] Environment changes detected. Reloading process..."))
|
||||
beingTerminated = true
|
||||
|
||||
log.Debug().Msgf(color.HiMagentaString("[HOT RELOAD] Sending SIGTERM to PID %d", cmd.Process.Pid))
|
||||
if e := cmd.Process.Signal(syscall.SIGTERM); e != nil {
|
||||
log.Error().Err(e).Msg(color.HiMagentaString("[HOT RELOAD] Failed to send SIGTERM"))
|
||||
}
|
||||
// wait up to 10 sec for the process to exit
|
||||
for i := 0; i < 10; i++ {
|
||||
if !util.IsProcessRunning(cmd.Process) {
|
||||
// process has been killed so we break out
|
||||
break
|
||||
}
|
||||
if i == 5 {
|
||||
log.Debug().Msg(color.HiMagentaString("[HOT RELOAD] Still waiting for process exit status"))
|
||||
}
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
|
||||
// SIGTERM may not work on Windows so we try SIGKILL
|
||||
if util.IsProcessRunning(cmd.Process) {
|
||||
log.Debug().Msg(color.HiMagentaString("[HOT RELOAD] Process still hasn't fully exited, attempting SIGKILL"))
|
||||
if e := cmd.Process.Kill(); e != nil {
|
||||
log.Error().Err(e).Msg(color.HiMagentaString("[HOT RELOAD] Failed to send SIGKILL"))
|
||||
}
|
||||
}
|
||||
|
||||
cmd = nil
|
||||
} else {
|
||||
// If `cmd` is nil, we know this is the first time we are starting the process
|
||||
log.Info().Msg(color.HiMagentaString("[HOT RELOAD] Watching for secret changes..."))
|
||||
}
|
||||
|
||||
processMutex.Lock()
|
||||
|
||||
if lastUpdateEvent.After(secretsFetchedAt) {
|
||||
processMutex.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
beingTerminated = false
|
||||
watcherWaitGroup.Add(1)
|
||||
|
||||
// start the process
|
||||
log.Info().Msgf(color.GreenString("Injecting %v Infisical secrets into your application process", environmentVariables.SecretsCount))
|
||||
|
||||
cmd, err = util.RunCommand(commandFlag, args, environmentVariables.Variables, false)
|
||||
if err != nil {
|
||||
defer watcherWaitGroup.Done()
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer processMutex.Unlock()
|
||||
defer watcherWaitGroup.Done()
|
||||
|
||||
exitCode, err := waitForExitCommand(cmd)
|
||||
|
||||
// ignore errors if we are being terminated
|
||||
if !beingTerminated {
|
||||
if err != nil {
|
||||
if strings.HasPrefix(err.Error(), "exec") || strings.HasPrefix(err.Error(), "fork/exec") {
|
||||
log.Error().Err(err).Msg("Failed to execute command")
|
||||
}
|
||||
if err.Error() != ErrManualSignalInterrupt.Error() {
|
||||
log.Error().Err(err).Msg("Process exited with error")
|
||||
}
|
||||
}
|
||||
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
recheckSecretsChannel := make(chan bool, 1)
|
||||
recheckSecretsChannel <- true
|
||||
|
||||
// a simple goroutine that triggers the recheckSecretsChan every watch interval (defaults to 10 seconds)
|
||||
go func() {
|
||||
for {
|
||||
time.Sleep(time.Duration(watchModeInterval) * time.Second)
|
||||
recheckSecretsChannel <- true
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
<-recheckSecretsChannel
|
||||
watchMutex.Lock()
|
||||
|
||||
newEnvironmentVariables, err := fetchAndFormatSecretsForShell(request, projectConfigDir, secretOverriding, expandSecrets, token)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("[HOT RELOAD] Failed to fetch secrets")
|
||||
continue
|
||||
}
|
||||
|
||||
if newEnvironmentVariables.ETag != currentETag {
|
||||
runCommandWithWatcher(newEnvironmentVariables)
|
||||
} else {
|
||||
log.Debug().Msg("[HOT RELOAD] No changes detected in secrets, not reloading process")
|
||||
}
|
||||
|
||||
watchMutex.Unlock()
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func fetchAndFormatSecretsForShell(request models.GetAllSecretsParameters, projectConfigDir string, secretOverriding bool, shouldExpandSecrets bool, token *models.TokenDetails) (models.InjectableEnvironmentResult, error) {
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
request.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(request, projectConfigDir)
|
||||
|
||||
if err != nil {
|
||||
return models.InjectableEnvironmentResult{}, err
|
||||
}
|
||||
|
||||
if secretOverriding {
|
||||
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
|
||||
} else {
|
||||
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
|
||||
}
|
||||
|
||||
if shouldExpandSecrets {
|
||||
|
||||
authParams := models.ExpandSecretsAuthentication{}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
authParams.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
authParams.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets = util.ExpandSecrets(secrets, authParams, projectConfigDir)
|
||||
}
|
||||
|
||||
secretsByKey := getSecretsByKeys(secrets)
|
||||
environmentVariables := make(map[string]string)
|
||||
|
||||
// add all existing environment vars
|
||||
for _, s := range os.Environ() {
|
||||
kv := strings.SplitN(s, "=", 2)
|
||||
key := kv[0]
|
||||
value := kv[1]
|
||||
environmentVariables[key] = value
|
||||
}
|
||||
|
||||
// check to see if there are any reserved key words in secrets to inject
|
||||
filterReservedEnvVars(secretsByKey)
|
||||
|
||||
// now add infisical secrets
|
||||
for k, v := range secretsByKey {
|
||||
environmentVariables[k] = v.Value
|
||||
}
|
||||
|
||||
env := make([]string, 0, len(environmentVariables))
|
||||
for key, value := range environmentVariables {
|
||||
env = append(env, key+"="+value)
|
||||
}
|
||||
|
||||
return models.InjectableEnvironmentResult{
|
||||
Variables: env,
|
||||
ETag: util.GenerateETagFromSecrets(secrets),
|
||||
SecretsCount: len(secretsByKey),
|
||||
}, nil
|
||||
}
|
||||
|
@ -104,6 +104,12 @@ type GetAllSecretsParameters struct {
|
||||
Recursive bool
|
||||
}
|
||||
|
||||
type InjectableEnvironmentResult struct {
|
||||
Variables []string
|
||||
ETag string
|
||||
SecretsCount int
|
||||
}
|
||||
|
||||
type GetAllFoldersParameters struct {
|
||||
WorkspaceId string
|
||||
Environment string
|
||||
|
92
cli/packages/util/exec.go
Normal file
92
cli/packages/util/exec.go
Normal file
@ -0,0 +1,92 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"runtime"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func RunCommand(singleCommand string, args []string, env []string, waitForExit bool) (*exec.Cmd, error) {
|
||||
var c *exec.Cmd
|
||||
var err error
|
||||
|
||||
if singleCommand != "" {
|
||||
c, err = RunCommandFromString(singleCommand, env, waitForExit)
|
||||
} else {
|
||||
c, err = RunCommandFromArgs(args, env, waitForExit)
|
||||
}
|
||||
|
||||
return c, err
|
||||
}
|
||||
|
||||
func IsProcessRunning(p *os.Process) bool {
|
||||
err := p.Signal(syscall.Signal(0))
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// For "infisical run -- COMMAND"
|
||||
func RunCommandFromArgs(args []string, env []string, waitForExit bool) (*exec.Cmd, error) {
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
cmd.Env = env
|
||||
|
||||
err := execCommand(cmd, waitForExit)
|
||||
|
||||
return cmd, err
|
||||
}
|
||||
|
||||
func execCommand(cmd *exec.Cmd, waitForExit bool) error {
|
||||
sigChannel := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChannel)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
go func() {
|
||||
for {
|
||||
sig := <-sigChannel
|
||||
_ = cmd.Process.Signal(sig) // process all sigs
|
||||
}
|
||||
}()
|
||||
|
||||
if !waitForExit {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := cmd.Wait(); err != nil {
|
||||
_ = cmd.Process.Signal(os.Kill)
|
||||
return fmt.Errorf("failed to wait for command termination: %v", err)
|
||||
}
|
||||
|
||||
waitStatus := cmd.ProcessState.Sys().(syscall.WaitStatus)
|
||||
os.Exit(waitStatus.ExitStatus())
|
||||
return nil
|
||||
}
|
||||
|
||||
// For "infisical run --command=COMMAND"
|
||||
func RunCommandFromString(command string, env []string, waitForExit bool) (*exec.Cmd, error) {
|
||||
shell := [2]string{"sh", "-c"}
|
||||
if runtime.GOOS == "windows" {
|
||||
shell = [2]string{"cmd", "/C"}
|
||||
} else {
|
||||
currentShell := os.Getenv("SHELL")
|
||||
if currentShell != "" {
|
||||
shell[0] = currentShell
|
||||
}
|
||||
}
|
||||
|
||||
cmd := exec.Command(shell[0], shell[1], command) // #nosec G204 nosemgrep: semgrep_configs.prohibit-exec-command
|
||||
cmd.Env = env
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
err := execCommand(cmd, waitForExit)
|
||||
return cmd, err
|
||||
}
|
@ -4,6 +4,7 @@ import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"os"
|
||||
@ -298,3 +299,16 @@ func GenerateRandomString(length int) string {
|
||||
}
|
||||
return string(b)
|
||||
}
|
||||
|
||||
func GenerateETagFromSecrets(secrets []models.SingleEnvironmentVariable) string {
|
||||
sortedSecrets := SortSecretsByKeys(secrets)
|
||||
content := []byte{}
|
||||
|
||||
for _, secret := range sortedSecrets {
|
||||
content = append(content, []byte(secret.Key)...)
|
||||
content = append(content, []byte(secret.Value)...)
|
||||
}
|
||||
|
||||
hash := sha256.Sum256(content)
|
||||
return fmt.Sprintf(`"%s"`, hex.EncodeToString(hash[:]))
|
||||
}
|
||||
|
@ -47,20 +47,20 @@ $ infisical run -- npm run dev
|
||||
Used to fetch secrets via a [machine identity](/documentation/platform/identities/machine-identities) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=<identity-client-id> --client-secret=<identity-client-secret> --silent --plain) # --plain flag will output only the token, so it can be fed to an environment variable. --silent will disable any update messages.
|
||||
# Example
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=<identity-client-id> --client-secret=<identity-client-secret> --silent --plain) # --plain flag will output only the token, so it can be fed to an environment variable. --silent will disable any update messages.
|
||||
```
|
||||
|
||||
<Info>
|
||||
Alternatively, you may use service tokens.
|
||||
|
||||
Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys).
|
||||
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_TOKEN=<service-token>
|
||||
# Example
|
||||
export INFISICAL_TOKEN=<service-token>
|
||||
```
|
||||
|
||||
</Info>
|
||||
</Info>
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="INFISICAL_DISABLE_UPDATE_CHECK">
|
||||
@ -69,22 +69,30 @@ $ infisical run -- npm run dev
|
||||
To use, simply export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_DISABLE_UPDATE_CHECK=true
|
||||
# Example
|
||||
export INFISICAL_DISABLE_UPDATE_CHECK=true
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
### Flags
|
||||
|
||||
<Accordion title="--project-config-dir">
|
||||
<Accordion title="--watch">
|
||||
By passing the `watch` flag, you are telling the CLI to watch for changes that happen in your Infisical project.
|
||||
If secret changes happen, the command you provided will automatically be restarted with the new environment variables attached.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical run --watch -- printenv
|
||||
```
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--project-config-dir">
|
||||
Explicitly set the directory where the .infisical.json resides. This is useful for some monorepo setups.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical run --project-config-dir=/some-dir -- printenv
|
||||
# Example
|
||||
infisical run --project-config-dir=/some-dir -- printenv
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--command">
|
||||
@ -172,3 +180,19 @@ $ infisical run -- npm run dev
|
||||
</Accordion>
|
||||
|
||||
</Accordion>
|
||||
|
||||
|
||||
## Automatically reload command when secrets change
|
||||
|
||||
To automatically reload your command when secrets change, use the `--watch` flag.
|
||||
|
||||
```bash
|
||||
infisical run --watch -- npm run dev
|
||||
```
|
||||
|
||||
This will watch for changes in your secrets and automatically restart your command with the new secrets.
|
||||
When your command restarts, it will have the new environment variables injeceted into it.
|
||||
|
||||
<Note>
|
||||
Please note that this feature is intended for development purposes. It is not recommended to use this in production environments. Generally it's not recommended to automatically reload your application in production when remote changes are made.
|
||||
</Note>
|
@ -0,0 +1,61 @@
|
||||
---
|
||||
title: "Stream to Non-HTTP providers"
|
||||
description: "How to stream Infisical Audit Logs to Non-HTTP log providers"
|
||||
---
|
||||
|
||||
<Info>
|
||||
Audit log streams is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact team@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
This guide will demonstrate how you can send Infisical Audit log streams to storage solutions that do not support direct HTTP-based ingestion, such as AWS S3.
|
||||
To achieve this, you will learn how you can use a log collector like Fluent Bit to capture and forward logs from Infisical to non-HTTP storage options.
|
||||
In this pattern, Fluent Bit acts as an intermediary, accepting HTTP log streams from Infisical and transforming them into a format that can be sent to your desired storage provider.
|
||||
|
||||
## Overview
|
||||
|
||||
Log collectors are tools used to collect, analyze, transform, and send logs to storage.
|
||||
For the purposes of this guide, we will use [Fluent Bit](https://fluentbit.io) as our log collector and send logs from Infisical to AWS S3.
|
||||
However, this is just a example and you can use any log collector of your choice.
|
||||
|
||||
## Deploy Fluent Bit
|
||||
|
||||
You can deploy Fluent Bit in one of two ways:
|
||||
1. As a sidecar to your self-hosted Infisical instance
|
||||
2. As a standalone service in any deployment/compute service (e.g., AWS EC2, ECS, or GCP Compute Engine)
|
||||
|
||||
To view all deployment methods, visit the [Fluent Bit Getting Started guide](https://docs.fluentbit.io/manual/installation/getting-started-with-fluent-bit).
|
||||
|
||||
## Configure Fluent Bit
|
||||
|
||||
To set up Fluent Bit, you'll need to provide a configuration file that establishes an HTTP listener and configures an output to send JSON data to your chosen storage solution.
|
||||
|
||||
The following Fluent Bit configuration sets up an HTTP listener on port `8888` and sends logs to AWS S3:
|
||||
|
||||
```ini
|
||||
[SERVICE]
|
||||
Flush 1
|
||||
Log_Level info
|
||||
Daemon off
|
||||
|
||||
[INPUT]
|
||||
Name http
|
||||
Listen 0.0.0.0
|
||||
Port 8888
|
||||
|
||||
[OUTPUT]
|
||||
Name s3
|
||||
Match *
|
||||
bucket my-bucket
|
||||
region us-west-2
|
||||
total_file_size 50M
|
||||
use_put_object Off
|
||||
compression gzip
|
||||
s3_key_format /$TAG/%Y/%m/%d/%H_%M_%S.gz
|
||||
```
|
||||
### Connecting Infisical Audit Log Stream
|
||||
|
||||
Once Fluent Bit is set up and configured, you can point the Infisical [audit log stream](/documentation/platform/audit-log-streams/audit-log-streams) to Fluent Bit's HTTP listener, which will then forward the logs to your chosen provider.
|
||||
Using this pattern, you are able to send Infisical Audit logs to various providers that do not support HTTP based log ingestion by default.
|
@ -1,5 +1,5 @@
|
||||
---
|
||||
title: "Audit Logs"
|
||||
title: "Overview"
|
||||
description: "Track evert event action performed within Infisical projects."
|
||||
---
|
||||
|
||||
|
@ -1,9 +1,9 @@
|
||||
---
|
||||
title: "AWS Elasticahe"
|
||||
description: "Learn how to dynamically generate Redis Database user credentials."
|
||||
title: "AWS ElastiCache"
|
||||
description: "Learn how to dynamically generate AWS ElastiCache user credentials."
|
||||
---
|
||||
|
||||
The Infisical Redis dynamic secret allows you to generate Redis Database credentials on demand based on configured role.
|
||||
The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCache credentials on demand based on configured role.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -38,7 +38,7 @@ The Infisical Redis dynamic secret allows you to generate Redis Database credent
|
||||
|
||||
<Note>
|
||||
New leases may take up-to a couple of minutes before ElastiCache has the chance to complete their configuration.
|
||||
It is recommended to use a retry strategy when establishing new Redis ElastiCache connections.
|
||||
It is recommended to use a retry strategy when establishing new ElastiCache connections.
|
||||
This may prevent errors when trying to use a password that isn't yet live on the targeted ElastiCache cluster.
|
||||
|
||||
While a leasing is being created, you will be unable to create new leases for the same dynamic secret.
|
||||
@ -51,17 +51,17 @@ The Infisical Redis dynamic secret allows you to generate Redis Database credent
|
||||
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with Redis
|
||||
## Set up Dynamic Secrets with AWS ElastiCache
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||

|
||||
</Step>
|
||||
<Step title="Select 'Redis'">
|
||||

|
||||
<Step title="Select 'AWS ElastiCache'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
@ -116,7 +116,7 @@ The Infisical Redis dynamic secret allows you to generate Redis Database credent
|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
@ -125,7 +125,7 @@ The Infisical Redis dynamic secret allows you to generate Redis Database credent
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
@ -133,11 +133,11 @@ The Infisical Redis dynamic secret allows you to generate Redis Database credent
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
|
127
docs/documentation/platform/dynamic-secrets/elastic-search.mdx
Normal file
127
docs/documentation/platform/dynamic-secrets/elastic-search.mdx
Normal file
@ -0,0 +1,127 @@
|
||||
---
|
||||
title: "Elasticsearch"
|
||||
description: "Learn how to dynamically generate Elasticsearch user credentials."
|
||||
---
|
||||
|
||||
The Infisical Elasticsearch dynamic secret allows you to generate Elasticsearch credentials on demand based on configured role.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
|
||||
|
||||
1. Create a role with at least `manage_security` and `monitor` permissions.
|
||||
2. Assign the newly created role to your API key or user that you'll use later in the dynamic secret configuration.
|
||||
|
||||
<Note>
|
||||
For testing purposes, you can also use a highly privileged role like `superuser`, that will have full control over the cluster. This is not recommended in production environments following the principle of least privilege.
|
||||
</Note>
|
||||
|
||||
## Set up Dynamic Secrets with Elasticsearch
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select 'Elasticsearch'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Your Elasticsearch host. This is the endpoint that your instance runs on. _(Example: https://your-cluster-ip)_
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="string" required>
|
||||
The port that your Elasticsearch instance is running on. _(Example: 9200)_
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Roles" type="string[]" required>
|
||||
The roles that the new user that is created when a lease is provisioned will be assigned to. This is a required field. This defaults to `superuser`, which is highly privileged. It is recommended to create a new role with the least privileges required for the lease.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Authentication Method" type="API Key | Username/Password" required>
|
||||
Select the authentication method you want to use to connect to your Elasticsearch instance.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Username" type="string" required>
|
||||
The username of the user that will be used to provision new dynamic secret leases. Only required if you selected the `Username/Password` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
The password of the user that will be used to provision new dynamic secret leases. Only required if you selected the `Username/Password` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="API Key ID" required>
|
||||
The ID of the API key that will be used to provision new dynamic secret leases. Only required if you selected the `API Key` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="API Key" required>
|
||||
The API key that will be used to provision new dynamic secret leases. Only required if you selected the `API Key` authentication method.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service.
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
|
||||
</Step>
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certificate.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
114
docs/documentation/platform/dynamic-secrets/mongo-atlas.mdx
Normal file
114
docs/documentation/platform/dynamic-secrets/mongo-atlas.mdx
Normal file
@ -0,0 +1,114 @@
|
||||
---
|
||||
title: "Mongo Atlas"
|
||||
description: "Learn how to dynamically generate Mongo Atlas Database user credentials."
|
||||
---
|
||||
|
||||
The Infisical Mongo Atlas dynamic secret allows you to generate Mongo Atlas Database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
Create a project scopped API Key with the required permission in your Mongo Atlas following the [official doc](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-a-project).
|
||||
|
||||
<Info>
|
||||
The API Key must have permission to manage users in the project.
|
||||
</Info>
|
||||
|
||||
## Set up Dynamic Secrets with Mongo Atlas
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select Mongo Atlas">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Admin public key" type="string" required>
|
||||
The public key of your generated Atlas API Key. This acts as a username.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Admin private key" type="string" required>
|
||||
The private key of your generated Atlas API Key. This acts as a password.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Group ID" type="number" required>
|
||||
Unique 24-hexadecimal digit string that identifies your project. This is same as project id
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Roles" type="string" required>
|
||||
List that provides the pairings of one role with one applicable database.
|
||||
- **Database Name**: Database to which the user is granted access privileges.
|
||||
- **Collection**: Collection on which this role applies.
|
||||
- **Role Name**: Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.
|
||||
- Enum: `atlasAdmin` `backup` `clusterMonitor` `dbAdmin` `dbAdminAnyDatabase` `enableSharding` `read` `readAnyDatabase` `readWrite` `readWriteAnyDatabase` `<a custom role name>`.
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify Access Scope">
|
||||
List that contains clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances that this database user can access. If omitted, MongoDB Cloud grants the database user access to all the clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances in the project.
|
||||
|
||||

|
||||
- **Label**: Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access.
|
||||
- **Type**: Category of resource that this database user can access.
|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certficate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
116
docs/documentation/platform/dynamic-secrets/mongo-db.mdx
Normal file
116
docs/documentation/platform/dynamic-secrets/mongo-db.mdx
Normal file
@ -0,0 +1,116 @@
|
||||
---
|
||||
title: "Mongo DB"
|
||||
description: "Learn how to dynamically generate Mongo DB Database user credentials."
|
||||
---
|
||||
|
||||
The Infisical Mongo DB dynamic secret allows you to generate Mongo DB Database credentials on demand based on configured role.
|
||||
|
||||
|
||||
<Info>
|
||||
If your using Mongo Atlas, please use [Atlas Dynamic Secret](./mongo-atlas) as MongoDB commands are not supported by atlas.
|
||||
</Info>
|
||||
|
||||
## Prerequisite
|
||||
Create a user with the required permission in your MongoDB instance. This user will be used to create new accounts on-demand.
|
||||
|
||||
## Set up Dynamic Secrets with Mongo DB
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select Mongo DB">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Database host URL.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="number">
|
||||
Database port number. If your Mongo DB is cluster you can omit this.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="User" type="string" required>
|
||||
Username of the admin user that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
Password of the admin user that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Database Name" type="string" required>
|
||||
Name of the database for which you want to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Roles" type="list" required>
|
||||
Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.
|
||||
- Enum: `atlasAdmin` `backup` `clusterMonitor` `dbAdmin` `dbAdminAnyDatabase` `enableSharding` `read` `readAnyDatabase` `readWrite` `readWriteAnyDatabase` `<a custom role name>`.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections.
|
||||
</ParamField>
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certificate.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
116
docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx
Normal file
116
docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx
Normal file
@ -0,0 +1,116 @@
|
||||
---
|
||||
title: "RabbitMQ"
|
||||
description: "Learn how to dynamically generate RabbitMQ user credentials."
|
||||
---
|
||||
|
||||
The Infisical RabbitMQ dynamic secret allows you to generate RabbitMQ credentials on demand based on configured role.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. Ensure that the `management` plugin is enabled on your RabbitMQ instance. This is required for the dynamic secret to work.
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with RabbitMQ
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select 'RabbitMQ'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Your RabbitMQ host. This must be in HTTP format. _(Example: http://your-cluster-ip)_
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="string" required>
|
||||
The port that the RabbitMQ management plugin is listening on. This is `15672` by default.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Virtual host name" type="string" required>
|
||||
The name of the virtual host that the user will be assigned to. This defaults to `/`.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Virtual host permissions (Read/Write/Configure)" type="string" required>
|
||||
The permissions that the user will have on the virtual host. This defaults to `.*`.
|
||||
|
||||
The three permission fields all take a regular expression _(regex)_, that should match resource names for which the user is granted read / write / configuration permissions
|
||||
</ParamField>
|
||||
|
||||
|
||||
<ParamField path="Username" type="string" required>
|
||||
The username of the user that will be used to provision new dynamic secret leases.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
The password of the user that will be used to provision new dynamic secret leases.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service.
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
|
||||
</Step>
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certificate.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
@ -16,7 +16,7 @@ Create a user with the required permission in your Redis instance. This user wil
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||

|
||||
</Step>
|
||||
<Step title="Select 'Redis'">
|
||||

|
||||
@ -78,7 +78,7 @@ Create a user with the required permission in your Redis instance. This user wil
|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
@ -87,7 +87,7 @@ Create a user with the required permission in your Redis instance. This user wil
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
@ -95,11 +95,11 @@ Create a user with the required permission in your Redis instance. This user wil
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
|
@ -93,7 +93,12 @@ In the following steps, we explore how to create and use identities to access th
|
||||
- Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses.
|
||||
- Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address.
|
||||
<Info>
|
||||
The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible.
|
||||
</Info>
|
||||
|
||||
</Step>
|
||||
|
||||
<Step title="Adding an identity to a project">
|
||||
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
|
||||
|
||||
|
@ -92,8 +92,8 @@ In the following steps, we explore how to create and use identities to access th
|
||||
- Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses.
|
||||
- Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address.
|
||||
|
||||
<Tip>If you are unsure about what to configure for the subject, audience, and claims fields you can use [github/actions-oidc-debugger](https://github.com/github/actions-oidc-debugger) to get the appropriate values. Alternatively, you can fetch the JWT from the workflow and inspect the fields manually.</Tip>
|
||||
<Info>The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible.</Info>
|
||||
</Step>
|
||||
<Step title="Adding an identity to a project">
|
||||
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
|
||||
|
@ -1,111 +0,0 @@
|
||||
---
|
||||
title: "Certificate Templates"
|
||||
sidebarTitle: "Certificate Templates"
|
||||
description: "Learn how to use certificate templates to enforce policies."
|
||||
---
|
||||
|
||||
## Concept
|
||||
|
||||
In order to ensure your certificates follow certain policies, you can use certificate templates during the issuance and signing flows.
|
||||
|
||||
A certificate template is linked to a certificate authority. It contains custom policies for certificate fields, allowing you to define rules based on your security policies.
|
||||
|
||||
## Workflow
|
||||
|
||||
The typical workflow for using certificate templates consists of the following steps:
|
||||
|
||||
1. Creating a certificate template attached to an existing CA along with defining custom rules for certificate fields.
|
||||
2. Selecting the certificate template during the creation of new certificates.
|
||||
|
||||
<Note>
|
||||
Note that this workflow can be executed via the Infisical UI or manually such
|
||||
as via API.
|
||||
</Note>
|
||||
|
||||
## Guide to using Certificate Templates
|
||||
|
||||
In the following steps, we explore how to issue a X.509 certificate using a certificate template.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Infisical UI">
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating the certificate template">
|
||||
To create a certificate template, head to your Project > Internal PKI > Certificate Templates and press **Create Certificate Template**.
|
||||
|
||||

|
||||
|
||||
Here, set the **Issuing CA** to the CA you want to issue certificates under when the certificate template is used.
|
||||
|
||||

|
||||
|
||||
Here's some guidance on each field:
|
||||
- Template Name: A descriptive name for the certificate template.
|
||||
- Issuing CA: The Certificate Authority (CA) that will issue certificates based on this template.
|
||||
- Certificate Collection: The collection where certificates issued with this template will be added.
|
||||
- Common Name (CN): The regular expression used to validate the common name in certificate requests.
|
||||
- Alternative Names (SANs): The regular expression used to validate subject alternative names in certificate requests.
|
||||
- TTL: The maximum Time-to-Live (TTL) for certificates issued using this template.
|
||||
|
||||
</Step>
|
||||
<Step title="Using the certificate template">
|
||||
Once you have created the certificate template from step 1, you can select it when issuing certificates.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
</Tab>
|
||||
<Tab title="API">
|
||||
<Steps>
|
||||
<Step title="Creating the certificate template">
|
||||
To create a certificate template, make an API request to the [Create Certificate Template](/api-reference/endpoints/certificate-templates/create) API endpoint.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --request POST \
|
||||
--url https://app.infisical.com/api/v1/pki/certificate-templates \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"caId": "<string>",
|
||||
"pkiCollectionId": "<string>",
|
||||
"name": "<string>",
|
||||
"commonName": "<string>",
|
||||
"subjectAlternativeName": "<string>",
|
||||
"ttl": "<string>"
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```bash Response
|
||||
{
|
||||
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"caId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"name": "certificate-template-1",
|
||||
"commonName": "<string>",
|
||||
...
|
||||
}
|
||||
```
|
||||
</Step>
|
||||
<Step title="Using the certificate template">
|
||||
To use the certificate template, attach the certificate template ID when invoking the API endpoint for [issuing](/api-reference/endpoints/certificates/issue-certificate) or [signing](/api-reference/endpoints/certificates/sign-certificate) new certificates.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --request POST \
|
||||
--url https://app.infisical.com/api/v1/pki/certificates/issue-certificate \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"certificateTemplateId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"friendlyName": "my-new-certificate",
|
||||
"commonName": "CERT",
|
||||
...
|
||||
}'
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
@ -25,7 +25,7 @@ graph TD
|
||||
|
||||
The typical workflow for managing certificates consists of the following steps:
|
||||
|
||||
1. Issuing a certificate under an intermediate CA with details like name and validity period.
|
||||
1. Issuing a certificate under an intermediate CA with details like name and validity period. As part of certificate issuance, you can either issue a certificate directly from a CA or do it via a certificate template.
|
||||
2. Managing certificate lifecycle events such as certificate renewal and revocation. As part of the certificate revocation flow,
|
||||
you can also query for a Certificate Revocation List [CRL](https://en.wikipedia.org/wiki/Certificate_revocation_list), a time-stamped, signed
|
||||
data structure issued by a CA containing a list of revoked certificates to check if a certificate has been revoked.
|
||||
@ -43,28 +43,51 @@ In the following steps, we explore how to issue a X.509 certificate under a CA.
|
||||
<Tab title="Infisical UI">
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating a certificate template">
|
||||
A certificate template is a set of policies for certificates issued under that template; each template is bound to a specific CA and can also be bound to a certificate collection for alerting such that any certificate issued under the template is automatically added to the collection.
|
||||
|
||||
With certificate templates, you can specify, for example, that issued certificates must have a common name (CN) adhering to a specific format like `.*.acme.com` or perhaps that the max TTL cannot be more than 1 year.
|
||||
|
||||
Head to your Project > Certificate Authorities > Your Issuing CA and create a certificate template.
|
||||
|
||||

|
||||
|
||||
Here's some guidance on each field:
|
||||
|
||||
- Template Name: A name for the certificate template.
|
||||
- Issuing CA: The Certificate Authority (CA) that will issue certificates based on this template.
|
||||
- Certificate Collection (Optional): The certificate collection that certificates should be added to when issued under the template.
|
||||
- Common Name (CN): A regular expression used to validate the common name in certificate requests.
|
||||
- Alternative Names (SANs): A regular expression used to validate subject alternative names in certificate requests.
|
||||
- TTL: The maximum Time-to-Live (TTL) for certificates issued using this template.
|
||||
</Step>
|
||||
<Step title="Creating a certificate">
|
||||
To create a certificate, head to your Project > Internal PKI > Certificates and press **Create Certificate**.
|
||||
To create a certificate, head to your Project > Internal PKI > Certificates and press **Issue** under the Certificates section.
|
||||
|
||||

|
||||

|
||||
|
||||
Here, set the **CA** to the CA you want to issue the certificate under and fill out details for the certificate.
|
||||
Here, set the **Certificate Template** to the template from step 1 and fill out the rest of the details for the certificate to be issued.
|
||||
|
||||

|
||||

|
||||
|
||||
Here's some guidance on each field:
|
||||
|
||||
- Issuing CA: The CA under which to issue the certificate.
|
||||
- Friendly Name: A friendly name for the certificate; this is only for display and defaults to the common name of the certificate if left empty.
|
||||
- Common Name (CN): The (common) name for the certificate like `service.acme.com`.
|
||||
- Alternative Names (SANs): A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses like `app1.acme.com, app2.acme.com`.
|
||||
- TTL: The lifetime of the certificate in seconds.
|
||||
|
||||
|
||||
<Note>
|
||||
Note that Infisical PKI supports issuing certificates without certificate templates as well. If this is desired, then you can set the **Certificate Template** field to **None**
|
||||
and specify the **Issuing CA** and optional **Certificate Collection** fields; the rest of the fields for the issued certificate remain the same.
|
||||
|
||||
That said, we recommend using certificate templates to enforce policies and attach expiration monitoring on issued certificates.
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Copying the certificate details">
|
||||
Once you have created the certificate from step 1, you'll be presented with the certificate details including the **Certificate Body**, **Certificate Chain**, and **Private Key**.
|
||||
|
||||

|
||||

|
||||
|
||||
<Note>
|
||||
Make sure to download and store the **Private Key** in a secure location as it will only be displayed once at the time of certificate issuance.
|
||||
@ -74,16 +97,54 @@ In the following steps, we explore how to issue a X.509 certificate under a CA.
|
||||
</Steps>
|
||||
</Tab>
|
||||
<Tab title="API">
|
||||
To create a certificate, make an API request to the [Issue Certificate](/api-reference/endpoints/certificates/issue-cert) API endpoint,
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating a certificate template">
|
||||
A certificate template is a set of policies for certificates issued under that template; each template is bound to a specific CA and can also be bound to a certificate collection for alerting such that any certificate issued under the template is automatically added to the collection.
|
||||
|
||||
With certificate templates, you can specify, for example, that issued certificates must have a common name (CN) adhering to a specific format like .*.acme.com or perhaps that the max TTL cannot be more than 1 year.
|
||||
|
||||
To create a certificate template, make an API request to the [Create Certificate Template](/api-reference/endpoints/certificate-templates/create) API endpoint, specifying the issuing CA.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/pki/certificate-templates' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"caId": "<ca-id>",
|
||||
"name": "My Certificate Template",
|
||||
"commonName": ".*.acme.com",
|
||||
"subjectAlternativeName": ".*.acme.com",
|
||||
"ttl": "1y",
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```bash Response
|
||||
{
|
||||
id: "...",
|
||||
caId: "...",
|
||||
name: "...",
|
||||
commonName: "...",
|
||||
subjectAlternativeName: "...",
|
||||
ttl: "...",
|
||||
}
|
||||
```
|
||||
</Step>
|
||||
<Step title="Creating a certificate">
|
||||
To create a certificate under the certificate template, make an API request to the [Issue Certificate](/api-reference/endpoints/certificates/issue-cert) API endpoint,
|
||||
specifying the issuing CA.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/pki/ca/<ca-id>/issue-certificate' \
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/pki/certificates/issue-certificate' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"commonName": "My Certificate",
|
||||
"certificateTemplateId": "<certificate-template-id>",
|
||||
"commonName": "service.acme.com",
|
||||
"ttl": "1y",
|
||||
}'
|
||||
```
|
||||
@ -100,18 +161,26 @@ In the following steps, we explore how to issue a X.509 certificate under a CA.
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
Note that Infisical PKI supports issuing certificates without certificate templates as well. If this is desired, then you can set the **Certificate Template** field to **None**
|
||||
and specify the **Issuing CA** and optional **Certificate Collection** fields; the rest of the fields for the issued certificate remain the same.
|
||||
|
||||
That said, we recommend using certificate templates to enforce policies and attach expiration monitoring on issued certificates.
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
Make sure to store the `privateKey` as it is only returned once here at the time of certificate issuance. The `certificate` and `certificateChain` will remain accessible and can be retrieved at any time.
|
||||
</Note>
|
||||
|
||||
If you have an external private key, you can also create a certificate by making an API request containing a pem-encoded CSR (Certificate Signing Request) to the [Sign Certificate](/api-reference/endpoints/certificates/sign-cert) API endpoint, specifying the issuing CA.
|
||||
If you have an external private key, you can also create a certificate by making an API request containing a pem-encoded CSR (Certificate Signing Request) to the [Sign Certificate](/api-reference/endpoints/certificates/sign-certificate) API endpoint, specifying the issuing CA.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/pki/ca/<ca-id>/sign-certificate' \
|
||||
curl --location --request POST 'https://app.infisical.com/api/v1/pki/certificates/sign-certificate' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"certificateTemplateId": "<certificate-template-id>",
|
||||
"csr": "...",
|
||||
"ttl": "1y",
|
||||
}'
|
||||
@ -128,7 +197,8 @@ In the following steps, we explore how to issue a X.509 certificate under a CA.
|
||||
serialNumber: "..."
|
||||
}
|
||||
```
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
|
@ -26,7 +26,7 @@ These endpoints are exposed on port 8443 under the .well-known/est path e.g.
|
||||
|
||||
## Guide to configuring EST
|
||||
|
||||
1. Set up a certificate template with your selected issuing CA. This template will define the policies and parameters for certificates issued through EST. For detailed instructions on configuring a certificate template, refer to the certificate templates [documentation](/documentation/platform/pki/certificate-templates).
|
||||
1. Set up a certificate template with your selected issuing CA. This template will define the policies and parameters for certificates issued through EST. For detailed instructions on configuring a certificate template, refer to the certificate templates [documentation](/documentation/platform/pki/certificates#guide-to-issuing-certificates).
|
||||
|
||||
2. Proceed to the certificate template's enrollment settings
|
||||

|
||||
|
250
docs/documentation/platform/pki/pki-issuer.mdx
Normal file
250
docs/documentation/platform/pki/pki-issuer.mdx
Normal file
@ -0,0 +1,250 @@
|
||||
---
|
||||
title: "Kubernetes Issuer"
|
||||
sidebarTitle: "Certificates for Kubernetes"
|
||||
description: "Learn how to automatically provision and manage TLS certificates for in Kubernetes using Infisical PKI"
|
||||
---
|
||||
|
||||
## Concept
|
||||
|
||||
The Infisical PKI Issuer is an installable Kubernetes [cert-manager](https://cert-manager.io/) controller that uses Infisical PKI to sign certificate requests. The issuer is perfect for getting X.509 certificates for ingresses and other Kubernetes resources and capable of automatically renewing certificates as needed.
|
||||
|
||||
As part of the workflow, you install `cert-manager`, the Infisical PKI Issuer, and configure resources to represent the connection details to your Infisical PKI and the certificates you wish to issue. Each issued certificate and corresponding private key is made available in a Kubernetes secret.
|
||||
|
||||
We recommend reading the [cert-manager documentation](https://cert-manager.io/docs/) for a fuller understanding of all the moving parts.
|
||||
|
||||
## Workflow
|
||||
|
||||
A typical workflow for using the Infisical PKI Issuer to issue certificates for your Kubernetes resources consists of the following steps:
|
||||
|
||||
1. Creating a machine identity in Infisical.
|
||||
2. Creating a Kubernetes secret to store the credentials of the machine identity.
|
||||
3. Installing `cert-manager` into your Kubernetes cluster.
|
||||
4. Installing the Infisical PKI Issuer controller into your Kubernetes cluster.
|
||||
5. Creating an `Issuer` or `ClusterIssuer` resource in your Kubernetes cluster to represent the Infisical PKI issuer you wish to use.
|
||||
6. Creating a `Certificate` resource in your Kubernetes cluster to represent a certificate you wish to issue. As part of this step, you specify the Kubernetes `Secret` to create and store the issued certificate and private key.
|
||||
7. Consuming the issued certificate across your Kubernetes resources from the specified Kubernetes `Secret`.
|
||||
|
||||
## Guide
|
||||
|
||||
In the following steps, we explore how to install the Infisical PKI Issuer using [kubectl](https://github.com/kubernetes/kubectl) and use it to obtain certificates for your Kubernetes resources.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create an identity in Infisical">
|
||||
|
||||
Follow the instructions [here](/documentation/platform/identities/universal-auth) to configure a [machine identity](/documentation/platform/identities/machine-identities) in Infisical with Universal Auth.
|
||||
|
||||
By the end of this step, you should have a **Client ID** and **Client Secret** on hand as part of the Universal Auth configuration for the Infisical PKI Issuer to authenticate with Infisical; this will be useful in steps 4 and 5.
|
||||
|
||||
<Note>
|
||||
Currently, the Infisical PKI Issuer only supports authenticating with Infisical via the [Universal Auth](/documentation/platform/identities/universal-auth) authentication method.
|
||||
|
||||
We're planning to add support for [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth) in the near future.
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Install cert-manager">
|
||||
Install `cert-manager` into your Kubernetes cluster by following the instructions [here](https://cert-manager.io/docs/installation/) or by running the following command:
|
||||
|
||||
```bash
|
||||
kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml
|
||||
```
|
||||
</Step>
|
||||
<Step title="Install the Issuer Controller">
|
||||
Install the Infisical PKI Issuer controller into your Kubernetes cluster by running the following command:
|
||||
|
||||
```bash
|
||||
kubectl apply -f https://raw.githubusercontent.com/Infisical/infisical-issuer/main/build/install.yaml
|
||||
```
|
||||
</Step>
|
||||
<Step title="Create Kubernetes Secret for Infisical PKI Issuer">
|
||||
Start by creating a Kubernetes `Secret` containing the **Client Secret** from step 1. As mentioned previously, this will be used by the Infisical PKI issuer to authenticate with Infisical.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="kubectl command">
|
||||
```bash
|
||||
kubectl create secret generic issuer-infisical-client-secret \
|
||||
--namespace <namespace_you_want_to_issue_certificates_in> \
|
||||
--from-literal=clientSecret=<client_secret>
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Configuration file">
|
||||
```yaml secret-issuer.yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: issuer-infisical-client-secret
|
||||
namespace: <namespace_you_want_to_issue_certificates_in>
|
||||
data:
|
||||
clientSecret: <client_secret>
|
||||
```
|
||||
|
||||
```bash
|
||||
kubectl apply -f secret-issuer.yaml
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
</Step>
|
||||
<Step title="Create Infisical PKI Issuer">
|
||||
Next, create the Infisical PKI Issuer by filling out `url`, `clientId`, either `caId` or `certificateTemplateId`, and applying the following configuration file for the `Issuer` resource.
|
||||
This configuration file specifies the connection details to your Infisical PKI CA to be used for issuing certificates.
|
||||
|
||||
```yaml infisical-issuer.yaml
|
||||
apiVersion: infisical-issuer.infisical.com/v1alpha1
|
||||
kind: Issuer
|
||||
metadata:
|
||||
name: issuer-infisical
|
||||
namespace: <namespace_you_want_to_issue_certificates_in>
|
||||
spec:
|
||||
url: "https://app.infisical.com" # the URL of your Infisical instance
|
||||
caId: <ca_id> # the ID of the CA you want to use to issue certificates
|
||||
certificateTemplateId: <certificate_template_id> # the ID of the certificate template you want to use to issue certificates against
|
||||
authentication:
|
||||
universalAuth:
|
||||
clientId: <client_id> # the Client ID from step 1
|
||||
secretRef: # reference to the Secret created in step 4
|
||||
name: "issuer-infisical-client-secret"
|
||||
key: "clientSecret"
|
||||
```
|
||||
|
||||
```
|
||||
kubectl apply -f infisical-issuer.yaml
|
||||
```
|
||||
|
||||
<Warning>
|
||||
The Infisical PKI Issuer supports issuing certificates against a specific CA or a specific certificate template.
|
||||
|
||||
For this reason, you should only fill in the `caId` or the `certificateTemplateId` field but not both.
|
||||
|
||||
We recommend using the `certificateTemplateId` field to issue certificates against a specific [certificate template](/documentation/platform/pki/certificate-templates)
|
||||
since templates let you enforce constraints on issued certificates and may have alerting policies bound to them.
|
||||
</Warning>
|
||||
|
||||
You can check that the issuer was created successfully by running the following command:
|
||||
|
||||
```bash
|
||||
kubectl get issuers.infisical-issuer.infisical.com -n <namespace_of_issuer> -o wide
|
||||
```
|
||||
|
||||
```bash
|
||||
NAME AGE
|
||||
issuer-infisical 21h
|
||||
```
|
||||
|
||||
<Note>
|
||||
An `Issuer` is a namespaced resource, and it is not possible to issue certificates from an `Issuer` in a different namespace.
|
||||
This means you will need to create an `Issuer` in each namespace you wish to obtain `Certificates` in.
|
||||
|
||||
If you want to create a single `Issuer` that can be consumed in multiple namespaces, you should consider creating a `ClusterIssuer` resource. This is almost identical to the `Issuer` resource, however is non-namespaced so it can be used to issue `Certificates` across all namespaces.
|
||||
|
||||
You can read more about the `Issuer` and `ClusterIssuer` resources [here](https://cert-manager.io/docs/configuration/).
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Create Certificate">
|
||||
|
||||
Finally, create a `Certificate` by applying the following configuration file.
|
||||
This configuration file specifies the details of the (end-entity/leaf) certificate to be issued.
|
||||
|
||||
```yaml certificate-issuer.yaml
|
||||
apiVersion: cert-manager.io/v1
|
||||
kind: Certificate
|
||||
metadata:
|
||||
name: certificate-by-issuer
|
||||
namespace: <namespace_you_want_to_issue_certificates_in>
|
||||
spec:
|
||||
commonName: certificate-by-issuer.example.com # the common name for the certificate
|
||||
secretName: certificate-by-issuer # the name of the Kubernetes Secret to create and store the certificate and private key in
|
||||
issuerRef:
|
||||
name: issuer-infisical
|
||||
group: infisical-issuer.infisical.com
|
||||
kind: Issuer
|
||||
privateKey: # the algorithm and key size to use
|
||||
algorithm: ECDSA
|
||||
size: 256
|
||||
duration: 48h # the ttl for the certificate
|
||||
renewBefore: 12h # the time before the certificate expiry that the certificate should be automatically renewed
|
||||
```
|
||||
|
||||
The above sample configuration file specifies a certificate to be issued with the common name `certificate-by-issuer.example.com` and ECDSA private key using the P-256 curve, valid for 48 hours; the certificate will be automatically renewed by `cert-manager` 12 hours before expiry.
|
||||
The certificate is issued by the issuer `issuer-infisical` created in the previous step and the resulting certificate and private key will be stored in a secret named `certificate-by-issuer`.
|
||||
|
||||
Note that the full list of the fields supported on the `Certificate` resource can be found in the API reference documentation [here](https://cert-manager.io/docs/reference/api-docs/#cert-manager.io/v1.CertificateSpec).
|
||||
|
||||
You can check that the certificate was created successfully by running the following command:
|
||||
|
||||
```bash
|
||||
kubectl get certificates -n <namespace_of_your_certificate> -o wide
|
||||
```
|
||||
|
||||
```bash
|
||||
NAME READY SECRET ISSUER STATUS AGE
|
||||
certificate-by-issuer True certificate-by-issuer issuer-infisical Certificate is up to date and has not expired 20h
|
||||
```
|
||||
</Step>
|
||||
<Step title="Use Certificate in Kubernetes Secret">
|
||||
Since the actual certificate and private key are stored in a Kubernetes secret, we can check that the secret was created successfully by running the following command:
|
||||
|
||||
```bash
|
||||
kubectl get secret certificate-by-issuer -n <namespace_of_your_certificate>
|
||||
```
|
||||
|
||||
```bash
|
||||
NAME TYPE DATA AGE
|
||||
certificate-by-issuer kubernetes.io/tls 2 26h
|
||||
```
|
||||
|
||||
We can `describe` the secret to get more information about it:
|
||||
|
||||
```bash
|
||||
kubectl describe secret certificate-by-issuer -n default
|
||||
```
|
||||
|
||||
```bash
|
||||
Name: certificate-by-issuer
|
||||
Namespace: default
|
||||
Labels: controller.cert-manager.io/fao=true
|
||||
Annotations: cert-manager.io/alt-names:
|
||||
cert-manager.io/certificate-name: certificate-by-issuer
|
||||
cert-manager.io/common-name: certificate-by-issuer.example.com
|
||||
cert-manager.io/ip-sans:
|
||||
cert-manager.io/issuer-group: infisical-issuer.infisical.com
|
||||
cert-manager.io/issuer-kind: Issuer
|
||||
cert-manager.io/issuer-name: issuer-infisical
|
||||
cert-manager.io/uri-sans:
|
||||
|
||||
Type: kubernetes.io/tls
|
||||
|
||||
Data
|
||||
====
|
||||
ca.crt: 1306 bytes
|
||||
tls.crt: 2380 bytes
|
||||
tls.key: 227 bytes
|
||||
```
|
||||
|
||||
Here, `ca.crt` is the Root CA certificate, `tls.crt` is the requested certificate followed by the certificate chain, and `tls.key` is the private key for the certificate.
|
||||
|
||||
We can decode the certificate and print it out using `openssl`:
|
||||
|
||||
```bash
|
||||
kubectl get secret certificate-by-issuer -n default -o jsonpath='{.data.tls\.crt}' | base64 --decode | openssl x509 -text -noout
|
||||
```
|
||||
|
||||
In any case, the certificate is ready to be used as Kubernetes Secret by your Kubernetes resources.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## FAQ
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="What fields can be configured on the Certificate resource?">
|
||||
The full list of the fields supported on the `Certificate` resource can be found in the API reference documentation [here](https://cert-manager.io/docs/reference/api-docs/#cert-manager.io/v1.CertificateSpec).
|
||||
|
||||
<Note>
|
||||
Currently, not all fields are supported by the Infisical PKI Issuer.
|
||||
</Note>
|
||||
</Accordion>
|
||||
<Accordion title="Can certificates be renewed automatically?">
|
||||
Yes. `cert-manager` will automatically renew certificates according to the `renewBefore` threshold of expiry as
|
||||
specified in the corresponding `Certificate` resource.
|
||||
|
||||
You can read more about the `renewBefore` field [here](https://cert-manager.io/docs/reference/api-docs/#cert-manager.io/v1.CertificateSpec).
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
@ -66,6 +66,7 @@ consisting of an (optional) root CA and an intermediate CA.
|
||||
- State or Province Name: The state or province.
|
||||
- Locality Name: The city or locality.
|
||||
- Common Name: The name of the CA.
|
||||
- Require Template for Certificate Issuance: Whether or not certificates for this CA can only be issued through certificate templates (recommended).
|
||||
|
||||
<Note>
|
||||
The Organization, Country, State or Province Name, Locality Name, and Common Name make up the **Distinguished Name (DN)** or **subject** of the CA.
|
||||
|
@ -49,8 +49,8 @@ description: "Learn how to configure Microsoft Entra ID for Infisical SSO."
|
||||
Back in the **Set up Single Sign-On with SAML** screen, select **Edit** in the **Attributes & Claims** section and configure the following map:
|
||||
|
||||
- `email -> user.userprinciplename`
|
||||
- `firstName -> user.firstName`
|
||||
- `lastName -> user.lastName`
|
||||
- `firstName -> user.givenname`
|
||||
- `lastName -> user.surname`
|
||||
|
||||

|
||||
|
||||
@ -62,7 +62,7 @@ description: "Learn how to configure Microsoft Entra ID for Infisical SSO."
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Retrieve Identity Provider (IdP) Information from Okta">
|
||||
<Step title="Retrieve Identity Provider (IdP) Information from Azure">
|
||||
In the **Set up Single Sign-On with SAML** screen, copy the **Login URL** and **SAML Certificate** to use when finishing configuring Azure SAML in Infisical.
|
||||
|
||||

|
||||
@ -115,4 +115,4 @@ description: "Learn how to configure Microsoft Entra ID for Infisical SSO."
|
||||
|
||||
- `AUTH_SECRET`: A secret key used for signing and verifying JWT. This can be a random 32-byte base64 string generated with `openssl rand -base64 32`.
|
||||
- `SITE_URL`: The URL of your self-hosted instance of Infisical - should be an absolute URL including the protocol (e.g. https://app.infisical.com)
|
||||
</Note>
|
||||
</Note>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user