mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-13 09:35:39 +00:00
Compare commits
40 Commits
databricks
...
doc/add-ca
Author | SHA1 | Date | |
---|---|---|---|
0d4d73b61d | |||
198b607e2e | |||
1f5a73047d | |||
0366df6e19 | |||
c77e0c0666 | |||
8e70731c4c | |||
21c6700db2 | |||
619062033b | |||
36973b1b5c | |||
1ca578ee03 | |||
8a7f7ac9fd | |||
049fd8e769 | |||
2c825616a6 | |||
febbd4ade5 | |||
874dc01692 | |||
b44b8bf647 | |||
258e561b84 | |||
5802638fc4 | |||
e2e7004583 | |||
7826324435 | |||
2bfc1caec5 | |||
4b9e3e44e2 | |||
b2a680ebd7 | |||
b269bb81fe | |||
5ca7ff4f2d | |||
ec12d57862 | |||
2d16f5f258 | |||
93912da528 | |||
ffc5e61faa | |||
70e68f4441 | |||
a004934a28 | |||
0811192eed | |||
1e09487572 | |||
86202caa95 | |||
285fca4ded | |||
da35ec90bc | |||
ecf2cb6e51 | |||
1e5a9a6020 | |||
00e69e6632 | |||
cedb22a39a |
@ -535,6 +535,107 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
mode: "upsert",
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[0].path
|
||||
}));
|
||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[1].path
|
||||
}));
|
||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
mode: "upsert",
|
||||
secrets: testSecrets
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
firstBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
secondBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
|
103
backend/package-lock.json
generated
103
backend/package-lock.json
generated
@ -21,7 +21,7 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "^8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
@ -48,8 +48,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -81,7 +81,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
@ -5423,13 +5423,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.1.1.tgz",
|
||||
"integrity": "sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/cookie": {
|
||||
"version": "9.3.1",
|
||||
@ -5502,19 +5499,41 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/multipart": {
|
||||
"version": "8.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.0.tgz",
|
||||
"integrity": "sha512-A8h80TTyqUzaMVH0Cr9Qcm6RxSkVqmhK/MVBYHYeRRSUbUYv08WecjWKSlG2aSnD4aGI841pVxAjC+G1GafUeQ==",
|
||||
"version": "8.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.1.tgz",
|
||||
"integrity": "sha512-pncbnG28S6MIskFSVRtzTKE9dK+GrKAJl0NbaQ/CG8ded80okWFsYKzSlP9haaLNQhNRDOoHqmGQNvgbiPVpWQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.1.0",
|
||||
"@fastify/deepmerge": "^1.0.0",
|
||||
"@fastify/error": "^3.0.0",
|
||||
"@fastify/busboy": "^3.0.0",
|
||||
"@fastify/deepmerge": "^2.0.0",
|
||||
"@fastify/error": "^4.0.0",
|
||||
"fastify-plugin": "^4.0.0",
|
||||
"secure-json-parse": "^2.4.0",
|
||||
"stream-wormhole": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/multipart/node_modules/@fastify/deepmerge": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/deepmerge/-/deepmerge-2.0.1.tgz",
|
||||
"integrity": "sha512-hx+wJQr9Ph1hY/dyzY0SxqjumMyqZDlIF6oe71dpRKDHUg7dFQfjG94qqwQ274XRjmUrwKiYadex8XplNHx3CA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/multipart/node_modules/@fastify/error": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.0.0.tgz",
|
||||
"integrity": "sha512-OO/SA8As24JtT1usTUTKgGH7uLvhfwZPwlptRi2Dp5P4KKmJI3gvsZ8MIHnNwDs4sLf/aai5LzTyl66xr7qMxA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/passport": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/passport/-/passport-2.4.0.tgz",
|
||||
@ -9049,6 +9068,7 @@
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/logger/-/logger-4.0.0.tgz",
|
||||
"integrity": "sha512-Wz7QYfPAlG/DR+DfABddUZeNgoeY7d1J39OCR2jR+v7VBsB8ezulDK5szTnDDPDwLH5IWhLvXIHlCFZV7MSKgA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": ">=18.0.0"
|
||||
},
|
||||
@ -9058,12 +9078,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/oauth": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.1.tgz",
|
||||
"integrity": "sha512-TuR9PI6bYKX6qHC7FQI4keMnhj45TNfSNQtTU3mtnHUX4XLM2dYLvRkUNADyiLTle2qu2rsOQtCIsZJw6H0sDA==",
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.2.tgz",
|
||||
"integrity": "sha512-MdPS8AP9n3u/hBeqRFu+waArJLD/q+wOSZ48ktMTwxQLc6HJyaWPf8soqAyS/b0D6IlvI5TxAdyRyyv3wQ5IVw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@slack/logger": "^4",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@types/jsonwebtoken": "^9",
|
||||
"@types/node": ">=18",
|
||||
"jsonwebtoken": "^9",
|
||||
@ -9075,24 +9096,26 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/types": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.12.0.tgz",
|
||||
"integrity": "sha512-yFewzUomYZ2BYaGJidPuIgjoYj5wqPDmi7DLSaGIkf+rCi4YZ2Z3DaiYIbz7qb/PL2NmamWjCvB7e9ArI5HkKg==",
|
||||
"version": "2.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.14.0.tgz",
|
||||
"integrity": "sha512-n0EGm7ENQRxlXbgKSrQZL69grzg1gHLAVd+GlRVQJ1NSORo0FrApR7wql/gaKdu2n4TO83Sq/AmeUOqD60aXUA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 12.13.0",
|
||||
"npm": ">= 6.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/web-api": {
|
||||
"version": "7.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.3.4.tgz",
|
||||
"integrity": "sha512-KwLK8dlz2lhr3NO7kbYQ7zgPTXPKrhq1JfQc0etJ0K8LSJhYYnf8GbVznvgDT/Uz1/pBXfFQnoXjrQIOKAdSuw==",
|
||||
"version": "7.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.8.0.tgz",
|
||||
"integrity": "sha512-d4SdG+6UmGdzWw38a4sN3lF/nTEzsDxhzU13wm10ejOpPehtmRoqBKnPztQUfFiWbNvSb4czkWYJD4kt+5+Fuw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@slack/logger": "^4.0.0",
|
||||
"@slack/types": "^2.9.0",
|
||||
"@types/node": ">=18.0.0",
|
||||
"@types/retry": "0.12.0",
|
||||
"axios": "^1.7.4",
|
||||
"axios": "^1.7.8",
|
||||
"eventemitter3": "^5.0.1",
|
||||
"form-data": "^4.0.0",
|
||||
"is-electron": "2.2.2",
|
||||
@ -9110,6 +9133,7 @@
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
@ -10526,7 +10550,8 @@
|
||||
"node_modules/@types/retry": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz",
|
||||
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA=="
|
||||
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/safe-regex": {
|
||||
"version": "1.1.6",
|
||||
@ -11969,9 +11994,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.4",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
|
||||
"integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
@ -13926,7 +13952,8 @@
|
||||
"node_modules/eventemitter3": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz",
|
||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="
|
||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/events": {
|
||||
"version": "3.3.0",
|
||||
@ -15942,7 +15969,8 @@
|
||||
"node_modules/is-electron": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/is-electron/-/is-electron-2.2.2.tgz",
|
||||
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg=="
|
||||
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-extglob": {
|
||||
"version": "2.1.1",
|
||||
@ -18182,6 +18210,7 @@
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
|
||||
"integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
@ -18228,6 +18257,7 @@
|
||||
"version": "6.6.2",
|
||||
"resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz",
|
||||
"integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"eventemitter3": "^4.0.4",
|
||||
"p-timeout": "^3.2.0"
|
||||
@ -18242,12 +18272,14 @@
|
||||
"node_modules/p-queue/node_modules/eventemitter3": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/p-retry": {
|
||||
"version": "4.6.2",
|
||||
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz",
|
||||
"integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/retry": "0.12.0",
|
||||
"retry": "^0.13.1"
|
||||
@ -18271,6 +18303,7 @@
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz",
|
||||
"integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"p-finally": "^1.0.0"
|
||||
},
|
||||
|
@ -129,7 +129,7 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
@ -156,8 +156,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -189,7 +189,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
|
@ -42,7 +42,7 @@ export const getMigrationEnvConfig = () => {
|
||||
console.error("Invalid environment variables. Check the error below");
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(
|
||||
"Migration is now automatic at startup. Please remove this step from your workflow and start the application as normal."
|
||||
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
|
||||
);
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(parsedEnv.error.issues);
|
||||
|
@ -352,6 +352,7 @@ interface CreateSecretBatchEvent {
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretPath?: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
}>;
|
||||
@ -374,8 +375,14 @@ interface UpdateSecretBatchEvent {
|
||||
type: EventType.UPDATE_SECRETS;
|
||||
metadata: {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number; secretMetadata?: TSecretMetadata }>;
|
||||
secretPath?: string;
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
secretPath?: string;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -721,7 +721,8 @@ export const RAW_SECRETS = {
|
||||
secretName: "The name of the secret to update.",
|
||||
secretComment: "Update comment to the secret.",
|
||||
environment: "The slug of the environment where the secret is located.",
|
||||
secretPath: "The path of the secret to update.",
|
||||
mode: "Defines how the system should handle missing secrets during an update.",
|
||||
secretPath: "The default path for secrets to update or upsert, if not provided in the secret details.",
|
||||
secretValue: "The new value of the secret.",
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to update.",
|
||||
|
@ -20,6 +20,7 @@ import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ProjectFilterType } from "@app/services/project/project-types";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
import { SecretOperations, SecretProtectionType } from "@app/services/secret/secret-types";
|
||||
import { SecretUpdateMode } from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import { secretRawSchema } from "../sanitizedSchemas";
|
||||
@ -2030,6 +2031,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.UPDATE.secretPath),
|
||||
mode: z
|
||||
.nativeEnum(SecretUpdateMode)
|
||||
.optional()
|
||||
.default(SecretUpdateMode.FailOnNotFound)
|
||||
.describe(RAW_SECRETS.UPDATE.mode),
|
||||
secrets: z
|
||||
.object({
|
||||
secretKey: SecretNameSchema.describe(RAW_SECRETS.UPDATE.secretName),
|
||||
@ -2037,6 +2043,12 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.string()
|
||||
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
|
||||
.describe(RAW_SECRETS.UPDATE.secretValue),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.transform(removeTrailingSlash)
|
||||
.optional()
|
||||
.describe(RAW_SECRETS.UPDATE.secretPath),
|
||||
secretComment: z.string().trim().optional().describe(RAW_SECRETS.UPDATE.secretComment),
|
||||
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding),
|
||||
newSecretName: SecretNameSchema.optional().describe(RAW_SECRETS.UPDATE.newSecretName),
|
||||
@ -2073,7 +2085,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
environment,
|
||||
projectSlug,
|
||||
projectId: req.body.workspaceId,
|
||||
secrets: inputSecrets
|
||||
secrets: inputSecrets,
|
||||
mode: req.body.mode
|
||||
});
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
return { approval: secretOperation.approval };
|
||||
@ -2092,8 +2105,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: secrets.map((secret) => ({
|
||||
secrets: secrets
|
||||
.filter((el) => el.version > 1)
|
||||
.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretPath: secret.secretPath,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version,
|
||||
secretMetadata: secretMetadataMap.get(secret.secretKey)
|
||||
@ -2101,6 +2117,27 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
}
|
||||
});
|
||||
const createdSecrets = secrets.filter((el) => el.version === 1);
|
||||
if (createdSecrets.length) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: secrets[0].workspace,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.CREATE_SECRETS,
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: createdSecrets.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretPath: secret.secretPath,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version,
|
||||
secretMetadata: secretMetadataMap.get(secret.secretKey)
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretUpdated,
|
||||
|
@ -1,7 +1,15 @@
|
||||
import { ForbiddenError, PureAbility, subject } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ActionProjectType, ProjectMembershipRole, SecretsV2Schema, SecretType, TableName } from "@app/db/schemas";
|
||||
import {
|
||||
ActionProjectType,
|
||||
ProjectMembershipRole,
|
||||
SecretsV2Schema,
|
||||
SecretType,
|
||||
TableName,
|
||||
TSecretsV2
|
||||
} from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@ -36,6 +44,7 @@ import {
|
||||
} from "./secret-v2-bridge-fns";
|
||||
import {
|
||||
SecretOperations,
|
||||
SecretUpdateMode,
|
||||
TBackFillSecretReferencesDTO,
|
||||
TCreateManySecretDTO,
|
||||
TCreateSecretDTO,
|
||||
@ -103,12 +112,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const $validateSecretReferences = async (
|
||||
projectId: string,
|
||||
permission: PureAbility,
|
||||
references: ReturnType<typeof getAllSecretReferences>["nestedReferences"]
|
||||
references: ReturnType<typeof getAllSecretReferences>["nestedReferences"],
|
||||
tx?: Knex
|
||||
) => {
|
||||
if (!references.length) return;
|
||||
|
||||
const uniqueReferenceEnvironmentSlugs = Array.from(new Set(references.map((el) => el.environment)));
|
||||
const referencesEnvironments = await projectEnvDAL.findBySlugs(projectId, uniqueReferenceEnvironmentSlugs);
|
||||
const referencesEnvironments = await projectEnvDAL.findBySlugs(projectId, uniqueReferenceEnvironmentSlugs, tx);
|
||||
if (referencesEnvironments.length !== uniqueReferenceEnvironmentSlugs.length)
|
||||
throw new BadRequestError({
|
||||
message: `Referenced environment not found. Missing ${diff(
|
||||
@ -122,16 +132,19 @@ export const secretV2BridgeServiceFactory = ({
|
||||
references.map((el) => ({
|
||||
secretPath: el.secretPath,
|
||||
envId: referencesEnvironmentGroupBySlug[el.environment][0].id
|
||||
}))
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const referencesFolderGroupByPath = groupBy(referredFolders.filter(Boolean), (i) => `${i?.envId}-${i?.path}`);
|
||||
const referredSecrets = await secretDAL.find({
|
||||
const referredSecrets = await secretDAL.find(
|
||||
{
|
||||
$complex: {
|
||||
operator: "or",
|
||||
value: references.map((el) => {
|
||||
const folderId =
|
||||
referencesFolderGroupByPath[`${referencesEnvironmentGroupBySlug[el.environment][0].id}-${el.secretPath}`][0]
|
||||
?.id;
|
||||
referencesFolderGroupByPath[
|
||||
`${referencesEnvironmentGroupBySlug[el.environment][0].id}-${el.secretPath}`
|
||||
][0]?.id;
|
||||
if (!folderId) throw new BadRequestError({ message: `Referenced path ${el.secretPath} doesn't exist` });
|
||||
|
||||
return {
|
||||
@ -151,7 +164,9 @@ export const secretV2BridgeServiceFactory = ({
|
||||
};
|
||||
})
|
||||
}
|
||||
});
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (
|
||||
referredSecrets.length !==
|
||||
@ -1245,8 +1260,9 @@ export const secretV2BridgeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
environment,
|
||||
projectId,
|
||||
secretPath,
|
||||
secrets: inputSecrets
|
||||
secretPath: defaultSecretPath = "/",
|
||||
secrets: inputSecrets,
|
||||
mode: updateMode
|
||||
}: TUpdateManySecretDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
@ -1257,22 +1273,43 @@ export const secretV2BridgeServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder)
|
||||
const secretsToUpdateGroupByPath = groupBy(inputSecrets, (el) => el.secretPath || defaultSecretPath);
|
||||
const projectEnvironment = await projectEnvDAL.findOne({ projectId, slug: environment });
|
||||
if (!projectEnvironment) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`,
|
||||
message: `Environment with slug '${environment}' in project with ID '${projectId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const folders = await folderDAL.findByManySecretPath(
|
||||
Object.keys(secretsToUpdateGroupByPath).map((el) => ({ envId: projectEnvironment.id, secretPath: el }))
|
||||
);
|
||||
if (folders.length !== Object.keys(secretsToUpdateGroupByPath).length)
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${null}' in environment with slug '${environment}' not found`,
|
||||
name: "UpdateManySecret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const secretsToUpdate = await secretDAL.find({
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.SecretManager, projectId });
|
||||
|
||||
const updatedSecrets: Array<TSecretsV2 & { secretPath: string }> = [];
|
||||
await secretDAL.transaction(async (tx) => {
|
||||
for await (const folder of folders) {
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
|
||||
const folderId = folder.id;
|
||||
const secretPath = folder.path;
|
||||
let secretsToUpdate = secretsToUpdateGroupByPath[secretPath];
|
||||
const secretsToUpdateInDB = await secretDAL.find(
|
||||
{
|
||||
folderId,
|
||||
$complex: {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "or",
|
||||
value: inputSecrets.map((el) => ({
|
||||
value: secretsToUpdate.map((el) => ({
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
@ -1290,17 +1327,22 @@ export const secretV2BridgeServiceFactory = ({
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
if (secretsToUpdate.length !== inputSecrets.length) {
|
||||
const secretsToUpdateNames = secretsToUpdate.map((secret) => secret.key);
|
||||
const invalidSecrets = inputSecrets.filter((secret) => !secretsToUpdateNames.includes(secret.secretKey));
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (secretsToUpdateInDB.length !== secretsToUpdate.length && updateMode === SecretUpdateMode.FailOnNotFound)
|
||||
throw new NotFoundError({
|
||||
message: `Secret does not exist: ${invalidSecrets.map((el) => el.secretKey).join(",")}`
|
||||
message: `Secret does not exist: ${diff(
|
||||
secretsToUpdate.map((el) => el.secretKey),
|
||||
secretsToUpdateInDB.map((el) => el.key)
|
||||
).join(", ")} in path ${folder.path}`
|
||||
});
|
||||
}
|
||||
const secretsToUpdateInDBGroupedByKey = groupBy(secretsToUpdate, (i) => i.key);
|
||||
|
||||
secretsToUpdate.forEach((el) => {
|
||||
const secretsToUpdateInDBGroupedByKey = groupBy(secretsToUpdateInDB, (i) => i.key);
|
||||
const secretsToCreate = secretsToUpdate.filter((el) => !secretsToUpdateInDBGroupedByKey?.[el.secretKey]);
|
||||
secretsToUpdate = secretsToUpdate.filter((el) => secretsToUpdateInDBGroupedByKey?.[el.secretKey]);
|
||||
|
||||
secretsToUpdateInDB.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
@ -1313,13 +1355,28 @@ export const secretV2BridgeServiceFactory = ({
|
||||
});
|
||||
|
||||
// get all tags
|
||||
const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds);
|
||||
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : [];
|
||||
const sanitizedTagIds = secretsToUpdate.flatMap(({ tagIds = [] }) => tagIds);
|
||||
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds, tx) : [];
|
||||
if (tags.length !== sanitizedTagIds.length) throw new NotFoundError({ message: "Tag not found" });
|
||||
const tagsGroupByID = groupBy(tags, (i) => i.id);
|
||||
|
||||
// check create permission allowed in upsert mode
|
||||
if (updateMode === SecretUpdateMode.Upsert) {
|
||||
secretsToCreate.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.secretKey,
|
||||
secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// check again to avoid non authorized tags are removed
|
||||
inputSecrets.forEach((el) => {
|
||||
secretsToUpdate.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
@ -1333,9 +1390,10 @@ export const secretV2BridgeServiceFactory = ({
|
||||
|
||||
// now find any secret that needs to update its name
|
||||
// same process as above
|
||||
const secretsWithNewName = inputSecrets.filter(({ newSecretName }) => Boolean(newSecretName));
|
||||
const secretsWithNewName = secretsToUpdate.filter(({ newSecretName }) => Boolean(newSecretName));
|
||||
if (secretsWithNewName.length) {
|
||||
const secrets = await secretDAL.find({
|
||||
const secrets = await secretDAL.find(
|
||||
{
|
||||
folderId,
|
||||
$complex: {
|
||||
operator: "and",
|
||||
@ -1360,10 +1418,14 @@ export const secretV2BridgeServiceFactory = ({
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (secrets.length)
|
||||
throw new BadRequestError({
|
||||
message: `Secret with new name already exists: ${secretsWithNewName.map((el) => el.newSecretName).join(",")}`
|
||||
message: `Secret with new name already exists: ${secretsWithNewName
|
||||
.map((el) => el.newSecretName)
|
||||
.join(", ")}`
|
||||
});
|
||||
|
||||
secretsWithNewName.forEach((el) => {
|
||||
@ -1381,7 +1443,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
// now get all secret references made and validate the permission
|
||||
const secretReferencesGroupByInputSecretKey: Record<string, ReturnType<typeof getAllSecretReferences>> = {};
|
||||
const secretReferences: TSecretReference[] = [];
|
||||
inputSecrets.forEach((el) => {
|
||||
secretsToUpdate.concat(SecretUpdateMode.Upsert === updateMode ? secretsToCreate : []).forEach((el) => {
|
||||
if (el.secretValue) {
|
||||
const references = getAllSecretReferences(el.secretValue);
|
||||
secretReferencesGroupByInputSecretKey[el.secretKey] = references;
|
||||
@ -1391,17 +1453,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
});
|
||||
}
|
||||
});
|
||||
await $validateSecretReferences(projectId, permission, secretReferences);
|
||||
await $validateSecretReferences(projectId, permission, secretReferences, tx);
|
||||
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.SecretManager, projectId });
|
||||
|
||||
const secrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkUpdate({
|
||||
const bulkUpdatedSecrets = await fnSecretBulkUpdate({
|
||||
folderId,
|
||||
orgId: actorOrgId,
|
||||
tx,
|
||||
inputSecrets: inputSecrets.map((el) => {
|
||||
inputSecrets: secretsToUpdate.map((el) => {
|
||||
const originalSecret = secretsToUpdateInDBGroupedByKey[el.secretKey][0];
|
||||
const encryptedValue =
|
||||
typeof el.secretValue !== "undefined"
|
||||
@ -1433,20 +1491,62 @@ export const secretV2BridgeServiceFactory = ({
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
resourceMetadataDAL
|
||||
})
|
||||
);
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath,
|
||||
projectId,
|
||||
});
|
||||
updatedSecrets.push(...bulkUpdatedSecrets.map((el) => ({ ...el, secretPath: folder.path })));
|
||||
if (updateMode === SecretUpdateMode.Upsert) {
|
||||
const bulkInsertedSecrets = await fnSecretBulkInsert({
|
||||
inputSecrets: secretsToCreate.map((el) => {
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences;
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
encryptedComment: setKnexStringValue(
|
||||
el.secretComment,
|
||||
(value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob
|
||||
),
|
||||
encryptedValue: el.secretValue
|
||||
? secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob
|
||||
: undefined,
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
key: el.secretKey,
|
||||
tagIds: el.tagIds,
|
||||
references,
|
||||
secretMetadata: el.secretMetadata,
|
||||
type: SecretType.Shared
|
||||
};
|
||||
}),
|
||||
folderId,
|
||||
orgId: actorOrgId,
|
||||
environmentSlug: folder.environment.slug
|
||||
secretDAL,
|
||||
resourceMetadataDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
tx
|
||||
});
|
||||
updatedSecrets.push(...bulkInsertedSecrets.map((el) => ({ ...el, secretPath: folder.path })));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return secrets.map((el) =>
|
||||
reshapeBridgeSecret(projectId, environment, secretPath, {
|
||||
await Promise.allSettled(folders.map((el) => (el?.id ? snapshotService.performSnapshot(el.id) : undefined)));
|
||||
await Promise.allSettled(
|
||||
folders.map((el) =>
|
||||
el
|
||||
? secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath: el.path,
|
||||
projectId,
|
||||
orgId: actorOrgId,
|
||||
environmentSlug: environment
|
||||
})
|
||||
: undefined
|
||||
)
|
||||
);
|
||||
|
||||
return updatedSecrets.map((el) =>
|
||||
reshapeBridgeSecret(projectId, environment, el.secretPath, {
|
||||
...el,
|
||||
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
|
||||
comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : ""
|
||||
|
@ -23,6 +23,12 @@ export type TSecretReferenceDTO = {
|
||||
secretKey: string;
|
||||
};
|
||||
|
||||
export enum SecretUpdateMode {
|
||||
Ignore = "ignore",
|
||||
Upsert = "upsert",
|
||||
FailOnNotFound = "failOnNotFound"
|
||||
}
|
||||
|
||||
export type TGetSecretsDTO = {
|
||||
expandSecretReferences?: boolean;
|
||||
path: string;
|
||||
@ -113,6 +119,7 @@ export type TUpdateManySecretDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
mode: SecretUpdateMode;
|
||||
secrets: {
|
||||
secretKey: string;
|
||||
newSecretName?: string;
|
||||
@ -123,6 +130,7 @@ export type TUpdateManySecretDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
secretReminderRepeatDays?: number | null;
|
||||
secretReminderNote?: string | null;
|
||||
secretMetadata?: ResourceMetadataDTO;
|
||||
secretPath?: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
|
@ -30,7 +30,10 @@ import { groupBy, pick } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { TGetSecretsRawByFolderMappingsDTO } from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
import {
|
||||
SecretUpdateMode,
|
||||
TGetSecretsRawByFolderMappingsDTO
|
||||
} from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
|
||||
import { ActorType } from "../auth/auth-type";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
@ -2012,6 +2015,7 @@ export const secretServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
mode = SecretUpdateMode.FailOnNotFound,
|
||||
secrets: inputSecrets = []
|
||||
}: TUpdateManySecretRawDTO) => {
|
||||
if (!projectSlug && !optionalProjectId)
|
||||
@ -2076,7 +2080,8 @@ export const secretServiceFactory = ({
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
secrets: inputSecrets
|
||||
secrets: inputSecrets,
|
||||
mode
|
||||
});
|
||||
return { type: SecretProtectionType.Direct as const, secrets };
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
|
||||
import { ResourceMetadataDTO } from "../resource-metadata/resource-metadata-schema";
|
||||
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { SecretUpdateMode } from "../secret-v2-bridge/secret-v2-bridge-types";
|
||||
import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal";
|
||||
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
|
||||
|
||||
@ -274,6 +275,7 @@ export type TUpdateManySecretRawDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
projectId?: string;
|
||||
projectSlug?: string;
|
||||
environment: string;
|
||||
mode: SecretUpdateMode;
|
||||
secrets: {
|
||||
secretKey: string;
|
||||
newSecretName?: string;
|
||||
|
@ -4,9 +4,6 @@ title: "Backend development guide"
|
||||
|
||||
Suppose you're interested in implementing a new feature in Infisical's backend, let's call it "feature-x." Here are the general steps you should follow.
|
||||
|
||||
## Database schema migration
|
||||
In order to run [schema migrations](https://en.wikipedia.org/wiki/Schema_migration#:~:text=A%20schema%20migration%20is%20performed,some%20newer%20or%20older%20version) you need to expose your database connection string. Create a `.env.migration` file to set the database connection URI for migration scripts, or alternatively, export the `DB_CONNECTION_URI` environment variable.
|
||||
|
||||
## Creating new database model
|
||||
If your feature involves a change in the database, you need to first address this by generating the necessary database schemas.
|
||||
|
||||
|
@ -0,0 +1,87 @@
|
||||
---
|
||||
title: "Terraform Cloud"
|
||||
description: "How to authenticate with Infisical from Terraform Cloud using OIDC."
|
||||
---
|
||||
|
||||
This guide will walk you through setting up Terraform Cloud to inject a [workload identity token](https://developer.hashicorp.com/terraform/cloud-docs/workspaces/dynamic-provider-credentials/workload-identity-tokens) and use it for OIDC-based authentication with the Infisical Terraform provider. You'll start by creating a machine identity in Infisical, then configure Terraform Cloud to pass the injected token into your Terraform runs.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a Machine Identity in Infisical">
|
||||
Follow the instructions [in this documentation](/documentation/platform/identities/oidc-auth/general) to create a machine identity with OIDC auth. Infisical OIDC configuration values for Terraform Cloud:
|
||||
1. Set the OIDC Discovery URL to https://app.terraform.io.
|
||||
2. Set the Issuer to https://app.terraform.io.
|
||||
3. Configure the Audience to match the value you will use for **TFC_WORKLOAD_IDENTITY_AUDIENCE** in Terraform Cloud for the next step.
|
||||
|
||||
|
||||
To view all possible claims available from Terraform cloud, visit [HashiCorp’s documentation](https://developer.hashicorp.com/terraform/cloud-docs/workspaces/dynamic-provider-credentials/workload-identity-tokens#token-structure).
|
||||
|
||||
</Step>
|
||||
<Step title="Enable Workload Identity Token Injection in Terraform Cloud">
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Generate single token">
|
||||
1. **Navigate to your workspace** in Terraform Cloud.
|
||||
2. **Add a workspace variable** named `TFC_WORKLOAD_IDENTITY_AUDIENCE`:
|
||||
- **Key**: `TFC_WORKLOAD_IDENTITY_AUDIENCE`
|
||||
- **Value**: For example, `my-infisical-audience`
|
||||
- **Category**: Environment
|
||||
|
||||
> **Important**:
|
||||
> - The presence of `TFC_WORKLOAD_IDENTITY_AUDIENCE` is required for Terraform Cloud to inject a token.
|
||||
> - If you are self-hosting HCP Terraform agents, ensure they are **v1.7.0 or above**.
|
||||
|
||||
Once set, Terraform Cloud will inject a workload identity token into the run environment as `TFC_WORKLOAD_IDENTITY_TOKEN`.
|
||||
</Tab>
|
||||
<Tab title="(Optional) Generate Multiple Tokens">
|
||||
If you need multiple tokens (each with a different audience), create additional variables:
|
||||
|
||||
```
|
||||
TFC_WORKLOAD_IDENTITY_AUDIENCE_[YOUR_TAG_HERE]
|
||||
```
|
||||
|
||||
For example:
|
||||
- `TFC_WORKLOAD_IDENTITY_AUDIENCE_INFISICAL`
|
||||
- `TFC_WORKLOAD_IDENTITY_AUDIENCE_OTHER_SERVICE`
|
||||
|
||||
Terraform Cloud will then inject:
|
||||
- `TFC_WORKLOAD_IDENTITY_TOKEN_INFISICAL`
|
||||
- `TFC_WORKLOAD_IDENTITY_TOKEN_OTHER_SERVICE`
|
||||
|
||||
> **Note**:
|
||||
> - The `[YOUR_TAG_HERE]` can only contain letters, numbers, and underscores.
|
||||
> - You **cannot** use the reserved keyword `TYPE`.
|
||||
> - Generating multiple tokens requires **v1.12.0 or later** if you are self-hosting agents.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
<Warning>
|
||||
If you are running on self-hosted HCP Terraform agents, you must use v1.7.0 or later to enable token injection. If you need to generate multiple tokens, you must use v1.12.0 or later.
|
||||
</Warning>
|
||||
</Step>
|
||||
<Step title="Configure the Infisical Provider">
|
||||
In your Terraform configuration, reference the injected token by name. For example:
|
||||
|
||||
```hcl
|
||||
provider "infisical" {
|
||||
host = "https://app.infisical.com"
|
||||
|
||||
auth = {
|
||||
oidc = {
|
||||
identity_id = "<identity-id>"
|
||||
# This must match the environment variable Terraform injects:
|
||||
token_environment_variable_name = "TFC_WORKLOAD_IDENTITY_TOKEN"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- **`host`**: Defaults to `https://app.infisical.com`. Override if using a self-hosted Infisical instance.
|
||||
- **`identity_id`**: The OIDC identity ID from Infisical.
|
||||
- **`token_environment_variable_name`**: Must match the injected variable name from Terraform Cloud. If using single token, use `TFC_WORKLOAD_IDENTITY_TOKEN`. If using multiple tokens, choose the one you want to use (e.g., `TFC_WORKLOAD_IDENTITY_TOKEN_INFISICAL`).
|
||||
</Step>
|
||||
<Step title="Validate Your Setup">
|
||||
1. Run a plan and apply in Terraform Cloud.
|
||||
2. Verify the Infisical provider authenticates successfully without issues. If you run into authentication errors, double-check the Infisical identity has the correct roles/permissions in Infisical.
|
||||
|
||||
</Step>
|
||||
</Steps>
|
36
docs/documentation/setup/networking.mdx
Normal file
36
docs/documentation/setup/networking.mdx
Normal file
@ -0,0 +1,36 @@
|
||||
---
|
||||
title: "Networking"
|
||||
sidebarTitle: "Networking"
|
||||
description: "Network configuration details for Infisical Cloud"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
When integrating your infrastructure with Infisical Cloud, you may need to configure network access controls. This page provides the IP addresses that Infisical uses to communicate with your services.
|
||||
|
||||
## Egress IP Addresses
|
||||
|
||||
Infisical Cloud operates from two regions: US and EU. If your infrastructure has strict network policies, you may need to allow traffic from Infisical by adding the following IP addresses to your ingress rules. These are the egress IPs Infisical uses when making outbound requests to your services.
|
||||
|
||||
### US Region
|
||||
|
||||
To allow connections from Infisical US, add these IP addresses to your ingress rules:
|
||||
|
||||
- `3.213.63.16`
|
||||
- `54.164.68.7`
|
||||
|
||||
### EU Region
|
||||
|
||||
To allow connections from Infisical EU, add these IP addresses to your ingress rules:
|
||||
|
||||
- `3.77.89.19`
|
||||
- `3.125.209.189`
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
You may need to allow Infisical’s egress IPs if your services require inbound connections for:
|
||||
|
||||
- Secret rotation - When Infisical needs to send requests to your systems to automatically rotate credentials
|
||||
- Dynamic secrets - When Infisical generates and manages temporary credentials for your cloud services
|
||||
- Secret integrations - When syncing secrets with third-party services like Azure Key Vault
|
||||
- Native authentication with machine identities - When using methods like Kubernetes authentication
|
@ -1,102 +1,237 @@
|
||||
---
|
||||
title: "Terraform Provider"
|
||||
description: "Learn how to fetch Secrets From Infisical With Terraform."
|
||||
url: "https://registry.terraform.io/providers/Infisical/infisical/latest/docs"
|
||||
title: "Terraform"
|
||||
description: "Learn how to fetch secrets from Infisical with Terraform using both traditional data sources and ephemeral resources"
|
||||
---
|
||||
{/*
|
||||
This guide provides step-by-step guidance on how to fetch secrets from Infisical using Terraform.
|
||||
|
||||
This guide demonstrates how to use Infisical to manage secrets in your Terraform infrastructure code, supporting both traditional data sources and ephemeral resources for enhanced security. It uses:
|
||||
|
||||
- Infisical (you can use [Infisical Cloud](https://app.infisical.com) or a [self-hosted instance of Infisical](https://infisical.com/docs/self-hosting/overview)) to store your secrets
|
||||
- The [Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest) to fetch secrets for your infrastructure
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Basic understanding of Terraform
|
||||
- Install [Terraform](https://www.terraform.io/downloads.html)
|
||||
Before you begin, make sure you have:
|
||||
|
||||
## Steps
|
||||
- [Terraform](https://www.terraform.io/downloads.html) installed (v1.10.0+ for ephemeral resources)
|
||||
- An Infisical account with access to a project
|
||||
- Basic understanding of Terraform and infrastructure as code
|
||||
|
||||
### 1. Define Required Providers
|
||||
## Project Setup
|
||||
|
||||
Specify `infisical` in the `required_providers` block within the `terraform` block of your configuration file. If you would like to use a specific version of the provider, uncomment and replace `<latest version>` with the version of the Infisical provider that you want to use.
|
||||
### Configure Provider
|
||||
|
||||
```hcl main.tf
|
||||
First, specify the Infisical provider in your Terraform configuration:
|
||||
|
||||
```hcl
|
||||
terraform {
|
||||
required_providers {
|
||||
infisical = {
|
||||
# version = <latest version>
|
||||
source = "infisical/infisical"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Configure the Infisical Provider
|
||||
### Authentication
|
||||
|
||||
Set up the Infisical provider by specifying the `host` and `service_token`. Replace `<>` in `service_token` with your actual token. The `host` is only required if you are using a self-hosted instance of Infisical.
|
||||
Configure the provider using one of these authentication methods:
|
||||
|
||||
```hcl main.tf
|
||||
#### Machine Identity (Recommended)
|
||||
|
||||
Using a Machine Identity, you can authenticate your Terraform provider using either [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general) or [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth) methods.
|
||||
|
||||
```hcl
|
||||
provider "infisical" {
|
||||
host = "https://app.infisical.com" # Only required if using a self-hosted instance of Infisical, default is https://app.infisical.com
|
||||
client_id = "<>"
|
||||
client_secret = "<>"
|
||||
service_token = "<>" # DEPRECATED, USE MACHINE IDENTITY AUTH INSTEAD
|
||||
host = "https://app.infisical.com" # Optional for cloud, required for self-hosted
|
||||
auth {
|
||||
universal { # or use oidc authentication method by providing an identity_id
|
||||
client_id = var.infisical_client_id
|
||||
client_secret = var.infisical_client_secret
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
Learn more about [machine identities](/documentation/platform/identities/machine-identities).
|
||||
|
||||
#### Service Token (Legacy)
|
||||
|
||||
<Warning>
|
||||
Machine Identity authentication is strongly recommended as the secure and modern method. Service tokens are considered legacy and will be deprecated in a future release.
|
||||
</Warning>
|
||||
|
||||
```hcl
|
||||
provider "infisical" {
|
||||
host = "https://app.infisical.com"
|
||||
service_token = var.infisical_service_token
|
||||
}
|
||||
```
|
||||
|
||||
## Using Secrets in Terraform
|
||||
|
||||
Infisical provides two methods to fetch and use secrets in your Terraform configurations:
|
||||
|
||||
### Method 1: Ephemeral Resources (Recommended)
|
||||
|
||||
Ephemeral resources, introduced in Terraform v1.10, provide enhanced security by ensuring sensitive values are never persisted in state files. This is the recommended approach for handling secrets in your infrastructure code.
|
||||
|
||||
```hcl
|
||||
# Fetch database credentials ephemerally
|
||||
ephemeral "infisical_secret" "db_creds" {
|
||||
name = "DB_CREDENTIALS"
|
||||
env_slug = "prod"
|
||||
workspace_id = var.infisical_workspace_id
|
||||
folder_path = "/database"
|
||||
}
|
||||
|
||||
# Use the credentials to configure a provider
|
||||
provider "postgresql" {
|
||||
host = data.aws_db_instance.example.address
|
||||
port = data.aws_db_instance.example.port
|
||||
username = jsondecode(ephemeral.infisical_secret.db_creds.value)["username"]
|
||||
password = jsondecode(ephemeral.infisical_secret.db_creds.value)["password"]
|
||||
}
|
||||
```
|
||||
|
||||
Key benefits:
|
||||
- Values are never stored in state files
|
||||
- Secrets are fetched on-demand during each Terraform operation
|
||||
- Perfect for GitOps workflows
|
||||
- Improved security posture for your infrastructure as code
|
||||
|
||||
### Method 2: Data Sources
|
||||
|
||||
For backwards compatibility or when working with older Terraform versions, you can use the traditional data source approach:
|
||||
|
||||
```hcl
|
||||
# Fetch all secrets in a folder
|
||||
data "infisical_secrets" "my_secrets" {
|
||||
env_slug = "dev"
|
||||
workspace_id = var.infisical_workspace_id
|
||||
folder_path = "/api"
|
||||
}
|
||||
|
||||
# Use individual secrets
|
||||
resource "aws_db_instance" "example" {
|
||||
username = data.infisical_secrets.my_secrets.secrets["DB_USER"]
|
||||
password = data.infisical_secrets.my_secrets.secrets["DB_PASS"]
|
||||
}
|
||||
```
|
||||
|
||||
<Warning>
|
||||
It is recommended to use Terraform variables to pass your service token dynamically to avoid hard coding it
|
||||
When using data sources, secret values are stored in Terraform's state file. Ensure your state file is properly secured.
|
||||
</Warning>
|
||||
|
||||
### 3. Fetch Infisical Secrets
|
||||
## Common Use Cases
|
||||
|
||||
Use the `infisical_secrets` data source to fetch your secrets. In this block, you must set the `env_slug` and `folder_path` to scope the secrets you want.
|
||||
### Secure Database Credential Management
|
||||
|
||||
`env_slug` is the slug of the environment name. This slug name can be found under the project settings page on the Infisical dashboard.
|
||||
|
||||
`folder_path` is the path to the folder in a given environment. The path `/` for root of the environment where as `/folder1` is the folder at the root of the environment.
|
||||
|
||||
```hcl main.tf
|
||||
data "infisical_secrets" "my-secrets" {
|
||||
env_slug = "dev"
|
||||
folder_path = "/some-folder/another-folder"
|
||||
workspace_id = "your-project-id"
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Define Outputs
|
||||
|
||||
As an example, we are going to output your fetched secrets. Replace `SECRET-NAME` with the actual name of your secret.
|
||||
|
||||
For a single secret:
|
||||
|
||||
```hcl main.tf
|
||||
output "single-secret" {
|
||||
value = data.infisical_secrets.my-secrets.secrets["SECRET-NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
For all secrets:
|
||||
Manage database credentials securely without exposing sensitive information in your state files:
|
||||
|
||||
```hcl
|
||||
output "all-secrets" {
|
||||
value = data.infisical_secrets.my-secrets.secrets
|
||||
# Fetch database credentials securely
|
||||
ephemeral "infisical_secret" "db_creds" {
|
||||
name = "DB_CREDENTIALS"
|
||||
env_slug = "prod"
|
||||
workspace_id = var.infisical_workspace_id
|
||||
folder_path = "/database"
|
||||
}
|
||||
|
||||
# Use the credentials in your database instance
|
||||
resource "aws_db_instance" "example" {
|
||||
identifier = "my-database"
|
||||
allocated_storage = 20
|
||||
engine = "postgres"
|
||||
engine_version = "14.0"
|
||||
instance_class = "db.t3.micro"
|
||||
|
||||
# Securely inject credentials from Infisical
|
||||
username = jsondecode(ephemeral.infisical_secret.db_creds.value)["username"]
|
||||
password = jsondecode(ephemeral.infisical_secret.db_creds.value)["password"]
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Run Terraform
|
||||
### GitOps Workflow with OIDC
|
||||
|
||||
Once your configuration is complete, initialize your Terraform working directory:
|
||||
To eliminate the need for static credentials, you can authenticate your workflow using [OpenID Connect (OIDC)](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general) through providers like the [Infisical Secrets GitHub Action](https://github.com/Infisical/secrets-action).
|
||||
Once authenticated, you can securely access secrets through the Infisical provider:
|
||||
|
||||
```bash
|
||||
$ terraform init
|
||||
```hcl
|
||||
provider "infisical" {
|
||||
# Auth credentials automatically injected from the environment
|
||||
}
|
||||
|
||||
# Fetch deployment credentials
|
||||
ephemeral "infisical_secret" "deploy_token" {
|
||||
name = "DEPLOY_TOKEN"
|
||||
env_slug = "prod"
|
||||
workspace_id = var.infisical_workspace_id
|
||||
folder_path = "/deployment"
|
||||
}
|
||||
```
|
||||
For detailed instructions on setting up OIDC authentication with GitHub Actions, refer to our [GitHub Actions OIDC guide](https://infisical.com/docs/documentation/platform/identities/oidc-auth/github).
|
||||
|
||||
Then, run the plan command to view the fetched secrets:
|
||||
## Best Practices
|
||||
|
||||
```bash
|
||||
$ terraform plan
|
||||
```
|
||||
1. **Use Ephemeral Resources**: Whenever possible, use ephemeral resources instead of data sources for improved security.
|
||||
|
||||
Terraform will now fetch your secrets from Infisical and display them as output according to your configuration.
|
||||
2. **Organize Secrets**: Structure your secrets in Infisical using folders to maintain clean separation:
|
||||
```hcl
|
||||
ephemeral "infisical_secret" "db_secret" {
|
||||
folder_path = "/databases/postgresql" # Organized by service
|
||||
# ...
|
||||
}
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
3. **Variable Usage**: Use Terraform variables for workspace IDs and environment slugs:
|
||||
```hcl
|
||||
variable "environment" {
|
||||
description = "Environment (dev, staging, prod)"
|
||||
type = string
|
||||
}
|
||||
|
||||
You have now successfully set up and used the Infisical provider with Terraform to fetch secrets. For more information, visit the [Infisical documentation](https://registry.terraform.io/providers/Infisical/infisical/latest/docs). */}
|
||||
ephemeral "infisical_secret" "secret" {
|
||||
env_slug = var.environment
|
||||
# ...
|
||||
}
|
||||
```
|
||||
|
||||
4. **Error Handling**: Add lifecycle blocks for critical secrets:
|
||||
```hcl
|
||||
ephemeral "infisical_secret" "critical_secret" {
|
||||
# ...
|
||||
lifecycle {
|
||||
postcondition {
|
||||
condition = length(self.value) > 0
|
||||
error_message = "Critical secret must not be empty"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="What happens if I'm using an older version of Terraform?">
|
||||
If you're using Terraform < v1.10.0, you'll need to use the data source approach.
|
||||
Consider upgrading to take advantage of the enhanced security features provided
|
||||
by ephemeral resources.
|
||||
</Accordion>
|
||||
<Accordion title="Can I mix ephemeral resources and data sources?">
|
||||
Yes, you can use both in the same configuration. However, we recommend using
|
||||
ephemeral resources for any sensitive values to ensure they're not stored in state.
|
||||
</Accordion>
|
||||
<Accordion title="How do I secure my state file when using data sources?">
|
||||
When using data sources, follow Terraform's best practices for state management:
|
||||
- Use remote state with encryption at rest
|
||||
- Implement proper access controls
|
||||
- Consider using state encryption
|
||||
- Treat the state like a secret
|
||||
|
||||
Better yet, use ephemeral resources to avoid storing sensitive values in state entirely.
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
See also:
|
||||
- [Machine Identity setup guide](/documentation/platform/identities/machine-identities)
|
||||
- [Terraform Provider Registry](https://registry.terraform.io/providers/Infisical/infisical/latest/docs)
|
||||
- [GitOps Best Practices](https://www.infisical.com/blog/gitops-best-practices)
|
||||
|
@ -1,28 +1,34 @@
|
||||
---
|
||||
title: "Components"
|
||||
description: "Infisical's components span multiple clients, an API, and a storage backend."
|
||||
description: "Understand Infisical's core architectural components and how they work together."
|
||||
---
|
||||
|
||||
## Infisical API
|
||||
## Overview
|
||||
|
||||
The Infisical API (sometimes referred to as the **backend**) contains the core platform logic.
|
||||
Infisical is architected around several key components that work in concert to provide a secure and streamlined secret management experience. These components span the client, API, and storage layers, ensuring that secrets are protected at every stage of their lifecycle.
|
||||
|
||||
## Storage backend
|
||||
## 1. API (Backend)
|
||||
|
||||
Infisical relies on a storage backend to store data including users and secrets. Infisical's storage backend is Postgres.
|
||||
Infisical exposes a well-documented [REST API](https://infisical.com/docs/api-reference/overview/introduction) that enables programmatic interaction with the platform, enabling a wide range of use cases.
|
||||
|
||||
## Redis
|
||||
## 2. Storage Backend
|
||||
|
||||
Infisical uses [Redis](https://redis.com) to enable more complex workflows including a queuing system to manage long running asynchronous tasks, cron jobs, as well as reliable cache for frequently used resources.
|
||||
Infisical relies on a robust storage backend to durably store secrets, users, and other platform data. Infisical's storage backend is [PostgreSQL](https://www.postgresql.org/).
|
||||
|
||||
## Infisical Web UI
|
||||
## 3. Caching Layer
|
||||
|
||||
The Web UI is the browser-based portal that connects to the Infisical API.
|
||||
Infisical uses [Redis](https://redis.com) to enable more complex workflows including a queuing system to manage long-running asynchronous tasks, cron jobs, as well as reliable cache for frequently used resources.
|
||||
|
||||
## Infisical clients
|
||||
## 4. Clients
|
||||
|
||||
Clients are any application or infrastructure that connecting to the Infisical API using one of the below methods:
|
||||
- Public API: Making API requests directly to the Infisical API.
|
||||
- Client SDK: A platform-specific library with method abstractions for working with secrets. Currently, there are three official SDKs: [Node SDK](https://infisical.com/docs/sdks/languages/node), [Python SDK](https://infisical.com/docs/sdks/languages/python), and [Java SDK](https://infisical.com/docs/sdks/languages/java).
|
||||
- CLI: A terminal-based interface for interacting with the Infisical API.
|
||||
- Kubernetes Operator: This operator retrieves secrets from Infisical and securely store
|
||||
Clients are interfaces through which users and applications interact with the Infisical API:
|
||||
|
||||
- **Web UI**: A browser-based portal providing a user-friendly interface for managing secrets, configurations, and performing administrative tasks.
|
||||
|
||||
- [**CLI**](https://infisical.com/docs/cli): A terminal-based tool for interacting with the Infisical API, enabling automation, scripting, and integration into CI/CD pipelines.
|
||||
|
||||
- **SDKs (Software Development Kits)**: Platform-specific libraries with method abstractions for working with secrets. Supported languages include [Node.js](https://infisical.com/docs/sdks/languages/node), [Python](https://infisical.com/docs/sdks/languages/python), [Java](https://infisical.com/docs/sdks/languages/java), [Golang](https://infisical.com/docs/sdks/languages/go), [Ruby](https://infisical.com/docs/sdks/languages/ruby) and [.NET](https://infisical.com/docs/sdks/languages/csharp).
|
||||
|
||||
- [**Kubernetes Operator**](https://infisical.com/docs/integrations/platforms/kubernetes): A Kubernetes-native component that facilitates the secure retrieval and management of secrets within a Kubernetes cluster. The operator supports multiple custom resource definitions (CRDs) for syncing secrets.
|
||||
|
||||
- [**Infisical Agent**](https://infisical.com/docs/integrations/platforms/infisical-agent): Daemon that automatically fetches and manages access tokens and secrets to be used in various client resources.
|
||||
|
@ -85,6 +85,10 @@
|
||||
"documentation/guides/microsoft-power-apps",
|
||||
"documentation/guides/organization-structure"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Setup",
|
||||
"pages": ["documentation/setup/networking"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -232,7 +236,8 @@
|
||||
"documentation/platform/identities/oidc-auth/general",
|
||||
"documentation/platform/identities/oidc-auth/github",
|
||||
"documentation/platform/identities/oidc-auth/circleci",
|
||||
"documentation/platform/identities/oidc-auth/gitlab"
|
||||
"documentation/platform/identities/oidc-auth/gitlab",
|
||||
"documentation/platform/identities/oidc-auth/terraform-cloud"
|
||||
]
|
||||
},
|
||||
"documentation/platform/mfa",
|
||||
@ -285,7 +290,7 @@
|
||||
"pages": [
|
||||
"self-hosting/overview",
|
||||
{
|
||||
"group": "Containerized installation methods",
|
||||
"group": "Installation methods",
|
||||
"pages": [
|
||||
"self-hosting/deployment-options/standalone-infisical",
|
||||
"self-hosting/deployment-options/docker-swarm",
|
||||
@ -293,12 +298,12 @@
|
||||
"self-hosting/deployment-options/kubernetes-helm"
|
||||
]
|
||||
},
|
||||
"self-hosting/guides/upgrading-infisical",
|
||||
"self-hosting/configuration/envars",
|
||||
"self-hosting/configuration/requirements",
|
||||
{
|
||||
"group": "Guides",
|
||||
"pages": [
|
||||
"self-hosting/configuration/schema-migrations",
|
||||
"self-hosting/guides/mongo-to-postgres",
|
||||
"self-hosting/guides/custom-certificates"
|
||||
]
|
||||
@ -633,7 +638,8 @@
|
||||
"api-reference/endpoints/oidc-auth/attach",
|
||||
"api-reference/endpoints/oidc-auth/retrieve",
|
||||
"api-reference/endpoints/oidc-auth/update",
|
||||
"api-reference/endpoints/oidc-auth/revoke"
|
||||
"api-reference/endpoints/oidc-auth/revoke",
|
||||
"integrations/frameworks/terraform-cloud"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -4,8 +4,6 @@ sidebarTitle: "Go"
|
||||
icon: "golang"
|
||||
---
|
||||
|
||||
|
||||
|
||||
If you're working with Go Lang, the official [Infisical Go SDK](https://github.com/infisical/go-sdk) package is the easiest way to fetch and work with secrets for your application.
|
||||
|
||||
- [Package](https://pkg.go.dev/github.com/infisical/go-sdk)
|
||||
@ -57,7 +55,9 @@ func main() {
|
||||
This example demonstrates how to use the Infisical Go SDK in a simple Go application. The application retrieves a secret named `API_KEY` from the `dev` environment of the `YOUR_PROJECT_ID` project.
|
||||
|
||||
<Warning>
|
||||
We do not recommend hardcoding your [Machine Identity Tokens](/platform/identities/overview). Setting it as an environment variable would be best.
|
||||
We do not recommend hardcoding your [Machine Identity
|
||||
Tokens](/platform/identities/overview). Setting it as an environment variable
|
||||
would be best.
|
||||
</Warning>
|
||||
|
||||
# Installation
|
||||
@ -95,6 +95,10 @@ client := infisical.NewInfisicalClient(context.Background(), infisical.Config{
|
||||
<ParamField query="SilentMode" type="boolean" default={false} optional>
|
||||
Whether or not to suppress logs such as warnings from the token refreshing process. Defaults to false if not specified.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="CacheExpiryInSeconds" type="number" default={0} optional>
|
||||
Defines how long certain responses should be cached in memory, in seconds. When set to a positive value, responses from specific methods (like secret fetching) will be cached for this duration. Set to 0 to disable caching.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
@ -140,6 +144,7 @@ Call `.Auth().UniversalAuthLogin()` with empty arguments to use the following en
|
||||
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` - Your machine identity client secret.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err := client.Auth().UniversalAuthLogin("CLIENT_ID", "CLIENT_SECRET")
|
||||
|
||||
@ -150,9 +155,12 @@ if err != nil {
|
||||
```
|
||||
|
||||
#### GCP ID Token Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Google Cloud Platform.
|
||||
Please [read more](/documentation/platform/identities/gcp-auth) about this authentication method.
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on Google Cloud Platform. Please [read
|
||||
more](/documentation/platform/identities/gcp-auth) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
@ -162,6 +170,7 @@ Call `.Auth().GcpIdTokenAuthLogin()` with empty arguments to use the following e
|
||||
- `INFISICAL_GCP_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err := client.Auth().GcpIdTokenAuthLogin("YOUR_MACHINE_IDENTITY_ID")
|
||||
|
||||
@ -181,6 +190,7 @@ Call `.Auth().GcpIamAuthLogin()` with empty arguments to use the following envir
|
||||
- `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` - The path to your GCP service account key file.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err = client.Auth().GcpIamAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_KEY_FILE_PATH")
|
||||
|
||||
@ -191,9 +201,12 @@ if err != nil {
|
||||
```
|
||||
|
||||
#### AWS IAM Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on AWS.
|
||||
Please [read more](/documentation/platform/identities/aws-auth) about this authentication method.
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on AWS. Please [read
|
||||
more](/documentation/platform/identities/aws-auth) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
@ -203,6 +216,7 @@ Call `.Auth().AwsIamAuthLogin()` with empty arguments to use the following envir
|
||||
- `INFISICAL_AWS_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err = client.Auth().AwsIamAuthLogin("MACHINE_IDENTITY_ID")
|
||||
|
||||
@ -212,11 +226,13 @@ if err != nil {
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
#### Azure Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Azure.
|
||||
Please [read more](/documentation/platform/identities/azure-auth) about this authentication method.
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on Azure. Please [read
|
||||
more](/documentation/platform/identities/azure-auth) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
@ -226,6 +242,7 @@ Call `.Auth().AzureAuthLogin()` with empty arguments to use the following enviro
|
||||
- `INFISICAL_AZURE_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err = client.Auth().AzureAuthLogin("MACHINE_IDENTITY_ID")
|
||||
|
||||
@ -236,9 +253,12 @@ if err != nil {
|
||||
```
|
||||
|
||||
#### Kubernetes Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Kubernetes.
|
||||
Please [read more](/documentation/platform/identities/kubernetes-auth) about this authentication method.
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on Kubernetes. Please [read
|
||||
more](/documentation/platform/identities/kubernetes-auth) about this
|
||||
authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
@ -249,6 +269,7 @@ Call `.Auth().KubernetesAuthLogin()` with empty arguments to use the following e
|
||||
- `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH_ENV_NAME` - The environment variable name that contains the path to the service account token. This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
// Service account token path will default to /var/run/secrets/kubernetes.io/serviceaccount/token if empty value is passed
|
||||
_, err = client.Auth().KubernetesAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_TOKEN_PATH")
|
||||
@ -262,6 +283,7 @@ if err != nil {
|
||||
## Working With Secrets
|
||||
|
||||
### List Secrets
|
||||
|
||||
`client.Secrets().List(options)`
|
||||
|
||||
Retrieve all secrets within the Infisical project and environment that client is connected to.
|
||||
@ -311,7 +333,9 @@ secrets, err := client.Secrets().List(infisical.ListSecretsOptions{
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Retrieve Secret
|
||||
|
||||
`client.Secrets().Retrieve(options)`
|
||||
|
||||
Retrieve a secret from Infisical. By default `Secrets().Retrieve()` fetches and returns a shared secret.
|
||||
@ -335,19 +359,23 @@ secret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
The slug name (dev, prod, etc) of the environment from where secrets
|
||||
should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
The type of the secret. Valid options are "shared" or "personal". If not
|
||||
specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Create Secret
|
||||
|
||||
`client.Secrets().Create(options)`
|
||||
|
||||
Create a new secret in Infisical.
|
||||
@ -363,7 +391,6 @@ secret, err := client.Secrets().Create(infisical.CreateSecretOptions{
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
@ -381,18 +408,21 @@ secret, err := client.Secrets().Create(infisical.CreateSecretOptions{
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
The slug name (dev, prod, etc) of the environment from where secrets
|
||||
should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be created.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
The type of the secret. Valid options are "shared" or "personal". If not
|
||||
specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Update Secret
|
||||
|
||||
`client.Secrets().Update(options)`
|
||||
@ -419,26 +449,35 @@ secret, err := client.Secrets().Update(infisical.UpdateSecretOptions{
|
||||
<ParamField query="NewSecretValue" type="string" required>
|
||||
The new value of the secret.
|
||||
</ParamField>
|
||||
<ParamField query="NewSkipMultilineEncoding" type="boolean" default="false" optional>
|
||||
<ParamField
|
||||
query="NewSkipMultilineEncoding"
|
||||
type="boolean"
|
||||
default="false"
|
||||
optional
|
||||
>
|
||||
Whether or not to skip multiline encoding for the new secret value.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
The slug name (dev, prod, etc) of the environment from where secrets
|
||||
should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be updated.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
The type of the secret. Valid options are "shared" or "personal". If not
|
||||
specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Delete Secret
|
||||
|
||||
`client.Secrets().Delete(options)`
|
||||
|
||||
Delete a secret in Infisical.
|
||||
@ -462,22 +501,25 @@ secret, err := client.Secrets().Delete(infisical.DeleteSecretOptions{
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
The slug name (dev, prod, etc) of the environment from where secrets
|
||||
should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be deleted.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
The type of the secret. Valid options are "shared" or "personal". If not
|
||||
specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
## Working With folders
|
||||
|
||||
|
||||
###
|
||||
|
||||
### List Folders
|
||||
|
||||
`client.Folders().List(options)`
|
||||
|
||||
Retrieve all within the Infisical project and environment that client is connected to.
|
||||
@ -510,7 +552,9 @@ folders, err := client.Folders().List(infisical.ListFoldersOptions{
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Create Folder
|
||||
|
||||
`client.Folders().Create(options)`
|
||||
|
||||
Create a new folder in Infisical.
|
||||
@ -532,7 +576,8 @@ folder, err := client.Folders().Create(infisical.CreateFolderOptions{
|
||||
The ID of the project where the folder will be created.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment where the folder will be created.
|
||||
The slug name (dev, prod, etc) of the environment where the folder will be
|
||||
created.
|
||||
</ParamField>
|
||||
<ParamField query="Path" type="string" optional>
|
||||
The path to create the folder in. The root path is `/`.
|
||||
@ -543,9 +588,10 @@ folder, err := client.Folders().Create(infisical.CreateFolderOptions{
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
|
||||
###
|
||||
|
||||
### Update Folder
|
||||
|
||||
`client.Folders().Update(options)`
|
||||
|
||||
Update an existing folder in Infisical.
|
||||
@ -568,7 +614,8 @@ folder, err := client.Folders().Update(infisical.UpdateFolderOptions{
|
||||
The ID of the project where the folder will be updated.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where the folder lives in.
|
||||
The slug name (dev, prod, etc) of the environment from where the folder
|
||||
lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Path" type="string" optional>
|
||||
The path from where the folder should be updated.
|
||||
@ -583,7 +630,9 @@ folder, err := client.Folders().Update(infisical.UpdateFolderOptions{
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Delete Folder
|
||||
|
||||
`client.Folders().Delete(options)`
|
||||
|
||||
Delete a folder in Infisical.
|
||||
@ -620,6 +669,5 @@ deletedFolder, err := client.Folders().Delete(infisical.DeleteFolderOptions{
|
||||
The path from where the folder should be deleted.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|
||||
|
||||
|
@ -22,33 +22,34 @@ The actual resource requirements will vary in direct proportion to the operation
|
||||
Infisical doesn’t require file storage as all persisted data is saved in the database.
|
||||
However, its logs and metrics are saved to disk for later viewing. As a result, we recommend provisioning 1-2 GB of storage.
|
||||
|
||||
### CPU
|
||||
### CPU and Memory (Per Container/Instance)
|
||||
|
||||
CPU requirements vary heavily on the volume of secret operations (reads and writes) you anticipate.
|
||||
Processing large volumes of secrets frequently and consistently will require higher CPU.
|
||||
Infisical is stateless and scales horizontally by running across multiple containers/instances. Each instance typically does **not** need more than **2–4 CPU cores** and **4–8 GB** of memory.
|
||||
If you need additional capacity, simply increase the **number** of containers/instances running in parallel.
|
||||
|
||||
Recommended minimum CPU hardware for different sizes of deployments:
|
||||
| **Deployment Size** | **CPU (Cores, per container)** | **Memory (GB, per container)** | **Recommended Number of Containers** |
|
||||
|---------------------|--------------------------------|--------------------------------|--------------------------------------|
|
||||
| **Small** | 2 | 4 | 2+ |
|
||||
| **Medium** | 2–4 | 4–8 | 5+ |
|
||||
| **Large** | 2–4 | 4–8 | 10+ |
|
||||
|
||||
- **small:** 2-4 core is the **recommended** minimum
|
||||
- **large:** 4-8 cores are suitable for larger deployments
|
||||
|
||||
### Memory Allocation
|
||||
|
||||
Memory needs depend on expected workload, including factors like user activity, automation level, and the frequency of secret operations.
|
||||
|
||||
Recommended minimum memory hardware for different sizes of deployments:
|
||||
- **small:** 4-8 GB is the **recommended** minimum
|
||||
- **large:** 16-32 GB are suitable for larger deployments
|
||||
> **Note:**
|
||||
> - Adding more containers (horizontal scaling) is generally the best way to handle spikes in secret operations.
|
||||
> - If you prefer, you can increase CPU/memory on a single container (vertical scaling), but horizontal scaling is more flexible and resilient.
|
||||
|
||||
## Database & caching layer
|
||||
|
||||
### Postgres
|
||||
|
||||
PostgreSQL is the only database supported by Infisical. Infisical has been extensively tested with Postgres version 16. We recommend using versions 14 and up for optimal compatibility.
|
||||
The compute required for Postgres is largely dependent on the number of secret operations (reads and writes) you expect. The more frequently you read and write secrets, the more compute you will need.
|
||||
You'll notice that storage requirements are high and this is because audit logs are by default stored in the database.
|
||||
|
||||
Recommended resource allocation based on deployment size:
|
||||
- **small:** 2 vCPU / 8 GB RAM / 20 GB Disk
|
||||
- **large:** 4vCPU / 16 GB RAM / 100 GB Disk
|
||||
|
||||
Recommended resource allocation based on deployment size. You may require more resources if you have a large number of secrets or high transaction volume:
|
||||
- **small:** 2 vCPU / 8 GB RAM / 100 GB Disk
|
||||
- **medium:** 4vCPU / 16 GB RAM / 200 GB Disk
|
||||
- **large:** 8vCPU / 32 GB RAM / 500 GB Disk
|
||||
|
||||
### Redis
|
||||
|
||||
|
@ -1,60 +0,0 @@
|
||||
---
|
||||
title: "Schema migration"
|
||||
description: "Learn how to run Postgres schema migrations."
|
||||
---
|
||||
|
||||
Running schema migrations is a requirement before deploying Infisical.
|
||||
Each time you decide to upgrade your version of Infisical, it's necessary to run schema migrations for that specific version.
|
||||
The guide below outlines a step-by-step guide to help you manually run schema migrations for Infisical.
|
||||
|
||||
### Prerequisites
|
||||
- Docker installed on your machine
|
||||
- An active PostgreSQL database
|
||||
- Postgres database connection string
|
||||
|
||||
<Steps>
|
||||
<Step title="Pull the Infisical Docker Image">
|
||||
First, ensure you have the correct version of the Infisical Docker image. You can pull it from Docker Hub using the following command:
|
||||
```bash
|
||||
docker pull infisical/infisical:<version>
|
||||
```
|
||||
Replace `<version>` with the specific version number you intend to deploy. View available versions [here](https://hub.docker.com/r/infisical/infisical/tags)
|
||||
</Step>
|
||||
|
||||
<Step title="Set Up the Environment Variable">
|
||||
The Docker image requires a `DB_CONNECTION_URI` environment variable. This connection string should point to your PostgreSQL database. The format generally looks like this: `postgresql://username:password@host:port/database`.
|
||||
</Step>
|
||||
|
||||
<Step title="Run the Migration ">
|
||||
To run the schema migration for the version of Infisical you want to deploy, use the following Docker command:
|
||||
|
||||
```bash
|
||||
docker run --env DB_CONNECTION_URI=<your_connection_string> infisical/infisical:<version> npm run migration:latest
|
||||
```
|
||||
Replace `<your_connection_string>` with your actual PostgreSQL connection string, and `<version>` with the desired version number.
|
||||
</Step>
|
||||
|
||||
<Step title="Verify the Migration">
|
||||
After running the migration, it's good practice to check if the migration was successful. You can do this by checking the logs or accessing your database to ensure the schema has been updated accordingly.
|
||||
</Step>
|
||||
<Step title="Rollback If Needed">
|
||||
If you need to rollback a migration by one step, use the following command:
|
||||
|
||||
```bash
|
||||
docker run --env DB_CONNECTION_URI=<your_connection_string> infisical/infisical:<version> npm run migration:rollback
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Repeat for Each Version">
|
||||
It's important to run schema migrations for each version of the Infisical you deploy. For instance, if you're updating from `infisical/infisical:1` to `infisical/infisical:2`, ensure you run the schema migrations for `infisical/infisical:2` before deploying it.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
<Tip>
|
||||
In a production setting, we recommend a more structured approach to deploying migrations prior to upgrading Infisical. This can be accomplished via CI automation.
|
||||
</Tip>
|
||||
|
||||
### Additional discussion
|
||||
- Always back up your database before running migrations, especially in a production environment.
|
||||
- Test the migration process in a staging environment before applying it to production.
|
||||
- Keep track of the versions and their corresponding migrations to avoid any inconsistencies.
|
@ -157,23 +157,6 @@ The [Docker stack file](https://github.com/Infisical/infisical/tree/main/docker-
|
||||
3lznscvk7k5t infisical_spolo2 replicated 1/1 ghcr.io/zalando/spilo-16:3.2-p2
|
||||
v04ml7rz2j5q infisical_spolo3 replicated 1/1 ghcr.io/zalando/spilo-16:3.2-p2
|
||||
```
|
||||
|
||||
<Note>
|
||||
You'll notice that service `infisical_infisical` will not be in running state.
|
||||
This is expected as the database does not yet have the desired schemas.
|
||||
Once the database schema migrations have been successfully applied, this issue should be resolved.
|
||||
</Note>
|
||||
</Step>
|
||||
|
||||
<Step title="Run schema migrations">
|
||||
Run the schema migration to initialize the database. Follow the [guide here](/self-hosting/configuration/schema-migrations) to learn how.
|
||||
|
||||
To run the migrations, you'll need to connect to the Postgres instance deployed on your Docker swarm. The default Postgres user credentials are defined in the Docker swarm: username: `postgres`, password: `postgres` and database: `postgres`.
|
||||
We recommend you change these credentials when deploying to production and creating a separate DB for Infisical.
|
||||
|
||||
<Info>
|
||||
After running the schema migrations, be sure to update the `.env` file to have the correct `DB_CONNECTION_URI`.
|
||||
</Info>
|
||||
</Step>
|
||||
|
||||
<Step title="View service status">
|
||||
|
@ -78,18 +78,6 @@ description: "Learn how to use Helm chart to install Infisical on your Kubernete
|
||||
</Tabs>
|
||||
</Step>
|
||||
|
||||
<Step title="Database schema migration ">
|
||||
Infisical relies on a relational database, which means that database schemas need to be migrated before the instance can become operational.
|
||||
|
||||
To automate this process, the chart includes a option named `infisical.autoDatabaseSchemaMigration`.
|
||||
When this option is enabled, a deployment/upgrade will only occur _after_ a successful schema migration.
|
||||
|
||||
<Info>
|
||||
If you are using in-cluster Postgres, you may notice the migration job failing initially.
|
||||
This is expected as it is waiting for the database to be in ready state.
|
||||
</Info>
|
||||
</Step>
|
||||
|
||||
<Step title="Routing traffic to Infisical">
|
||||
By default, this chart uses Nginx as its Ingress controller to direct traffic to Infisical services.
|
||||
|
||||
|
@ -22,11 +22,6 @@ The following guide provides a detailed step-by-step walkthrough on how you can
|
||||
|
||||
Remember to replace `<version>` with the docker image tag of your choice.
|
||||
</Step>
|
||||
<Step title="Run Postgres schema migration ">
|
||||
Before you can start the instance of Infisical, you need to run the database schema migrations.
|
||||
Follow the step by [step guide here](/self-hosting/configuration/schema-migrations) on running schema migrations for Infisical.
|
||||
|
||||
</Step>
|
||||
<Step title="Start Infisical">
|
||||
For a minimal installation of Infisical, you must configure `ENCRYPTION_KEY`, `AUTH_SECRET`, `DB_CONNECTION_URI`, `SITE_URL`, and `REDIS_URL`. [View all available configurations](/self-hosting/configuration/envars).
|
||||
|
||||
|
57
docs/self-hosting/guides/upgrading-infisical.mdx
Normal file
57
docs/self-hosting/guides/upgrading-infisical.mdx
Normal file
@ -0,0 +1,57 @@
|
||||
---
|
||||
|
||||
title: "Upgrade Infisical Instance"
|
||||
description: "How to upgrade Infisical self-hosted instance"
|
||||
|
||||
---
|
||||
|
||||
Keeping your Infisical instance up to date is key to making sure you receive the latest performance improvements, security patches, and feature updates.
|
||||
We release updates approximately once a week, which may include new features, bug fixes, performance enhancements, and critical security patches.
|
||||
|
||||
Since secrets management is a critical component of your infrastructure, we aim to avoid disruptive changes that will impact fetching secrets in downstream clients.
|
||||
If a release requires specific attention, a note will be attached to the corresponding [release](https://github.com/Infisical/infisical/releases) version.
|
||||
|
||||
During an upgrade, two key components are updated:
|
||||
|
||||
- **Infisical Application:** The core application code is updated.
|
||||
- **PostgreSQL Database Schema:** Schema migrations run automatically to ensure your database remains in sync with the updated application.
|
||||
|
||||
> **Before You Upgrade:**
|
||||
> **Always back up your database.** While our automated migration system is robust, having a backup ensures you can recover quickly in the event of an issue.
|
||||
|
||||
## Automated Schema Migrations
|
||||
|
||||
In previous versions (prior to `v0.111.0-postgres`), schema migrations had to be executed manually before starting the application.
|
||||
Now, migrations run automatically during boot-up. This improvement streamlines the upgrade process, reduces manual steps, and minimizes the risk of inconsistencies between your database schema and application code.
|
||||
|
||||
### Benefits of Automated Migrations
|
||||
|
||||
- **Seamless Integration:**
|
||||
Migrations are now part of the boot-up process, removing the need for manual intervention.
|
||||
|
||||
- **Synchronous Upgrades:**
|
||||
In multi-instance deployments, one instance acquires a lock and performs the migration while the others wait. This ensures that if a migration fails, the rollout is halted to prevent inconsistencies.
|
||||
|
||||
- **Reduced Room for Error:**
|
||||
Automatic migrations help ensure that your database schema always remains in sync with your application code.
|
||||
|
||||
## Upgrade Steps
|
||||
|
||||
1. **Back Up Your Data:**
|
||||
- Ensure you have a complete backup of your Postgres database.
|
||||
- Verify that your backup is current and accessible.
|
||||
|
||||
2. **Select the Upgrade Version:**
|
||||
- Visit the [Infisical releases page](https://github.com/Infisical/infisical/releases) for a list of available versions.
|
||||
- Look for releases with the prefix `infisical/` as there are other releases that are not related to the Infisical instance.
|
||||
|
||||
3. **Start the Upgrade Process:**
|
||||
- Launch the new version of Infisical. During startup, the application will automatically compare the current database schema with the updated schema in the code.
|
||||
- If any differences are detected, Infisical will apply the necessary migrations automatically.
|
||||
|
||||
4. **Multi-Instance Coordination:**
|
||||
- In environments with multiple instances, one instance will acquire a lock and perform the migration while the other instances wait.
|
||||
- Once the migration is complete, all instances will operate with the updated schema.
|
||||
|
||||
5. **Verify the Upgrade:**
|
||||
- Review the logs for any migration errors or warnings.
|
@ -48,9 +48,3 @@ This ensures that if there is a failure in one availability zone, the working re
|
||||
<Accordion title="Can Infisical run in an air-gapped environment without any internet access?" defaultOpen >
|
||||
Yes, Infisical can function in an air-gapped environment. To do so, update your ECS task to use the publicly available AWS Elastic Container Registry (ECR) image instead of the default Docker Hub image. Additionally, it's necessary to configure VPC endpoints, which allows your system to access AWS ECR via a private network route instead of the internet, ensuring all connectivity remains within the secure, private network.
|
||||
</Accordion>
|
||||
<Accordion title="Since RDS is in a private subnet, how do run the Postgres schema migrations?">
|
||||
Since the Amazon RDS instance is housed within a private network to enhance security, it is not directly accessible from the internet. This means that in order to run the required [Postgres schema migrations](/self-hosting/configuration/schema-migrations), you need to connect to this instance of RDS. There are many approaches you can take:
|
||||
- To automate schema migrations, you may setup CI/CD pipeline with access to the same RDS network to run the schema migrations before making deployment to ECS. This ensures that if migrations fail, your Infisical instances continues to run.
|
||||
- If you would like to run the migrations manually, consider using AWS Systems Manager Session Manager to access the RDS within the VPC on your local machine.
|
||||
- If your organization already has mechanisms in place for secure access to the VPC, such as VPNs or Direct Connect, these can also be utilized for performing database migrations manually.
|
||||
</Accordion>
|
||||
|
@ -57,6 +57,10 @@ export const ProjectPermissionCan: FunctionComponent<Props<ProjectPermissionSet>
|
||||
const finalChild =
|
||||
typeof children === "function" ? children(isAllowed, ability as any) : children;
|
||||
|
||||
if (!isAllowed && renderGuardBanner) {
|
||||
return <ProjectPermissionGuardBanner />;
|
||||
}
|
||||
|
||||
if (!isAllowed && passThrough) {
|
||||
return <Tooltip content={label}>{finalChild}</Tooltip>;
|
||||
}
|
||||
@ -65,10 +69,6 @@ export const ProjectPermissionCan: FunctionComponent<Props<ProjectPermissionSet>
|
||||
return <Tooltip content={allowedLabel}>{finalChild}</Tooltip>;
|
||||
}
|
||||
|
||||
if (!isAllowed && renderGuardBanner) {
|
||||
return <ProjectPermissionGuardBanner />;
|
||||
}
|
||||
|
||||
if (!isAllowed) return null;
|
||||
|
||||
return finalChild;
|
||||
|
@ -10,9 +10,9 @@ type Props = {
|
||||
};
|
||||
|
||||
export const SecretSyncSelect = ({ onSelect }: Props) => {
|
||||
const { isLoading, data: secretSyncOptions } = useSecretSyncOptions();
|
||||
const { isPending, data: secretSyncOptions } = useSecretSyncOptions();
|
||||
|
||||
if (isLoading) {
|
||||
if (isPending) {
|
||||
return (
|
||||
<div className="flex h-full flex-col items-center justify-center py-2.5">
|
||||
<Spinner size="lg" className="text-mineshaft-500" />
|
||||
|
@ -23,7 +23,7 @@ export const SecretSyncConnectionField = ({ onChange: callback }: Props) => {
|
||||
const destination = watch("destination");
|
||||
const app = SECRET_SYNC_CONNECTION_MAP[destination];
|
||||
|
||||
const { data: availableConnections, isLoading } = useListAvailableAppConnections(app);
|
||||
const { data: availableConnections, isPending } = useListAvailableAppConnections(app);
|
||||
|
||||
const connectionName = APP_CONNECTION_MAP[app].name;
|
||||
|
||||
@ -54,7 +54,7 @@ export const SecretSyncConnectionField = ({ onChange: callback }: Props) => {
|
||||
onChange(newValue);
|
||||
if (callback) callback();
|
||||
}}
|
||||
isLoading={isLoading}
|
||||
isLoading={isPending}
|
||||
options={availableConnections}
|
||||
placeholder="Select connection..."
|
||||
getOptionLabel={(option) => option.name}
|
||||
|
@ -323,12 +323,12 @@ export const MinimizedOrgSidebar = () => {
|
||||
<DropdownMenuContent align="start" side="right" className="p-1">
|
||||
<DropdownMenuLabel>Organization Options</DropdownMenuLabel>
|
||||
<Link to="/organization/access-management">
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon icon={faUsers} />}>
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon className="w-3" icon={faUsers} />}>
|
||||
Access Control
|
||||
</DropdownMenuItem>
|
||||
</Link>
|
||||
<Link to="/organization/app-connections">
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon icon={faPlug} />}>
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon className="w-3" icon={faPlug} />}>
|
||||
App Connections
|
||||
</DropdownMenuItem>
|
||||
</Link>
|
||||
@ -336,18 +336,20 @@ export const MinimizedOrgSidebar = () => {
|
||||
window.location.origin.includes("https://eu.infisical.com") ||
|
||||
window.location.origin.includes("https://gamma.infisical.com")) && (
|
||||
<Link to="/organization/billing">
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon icon={faMoneyBill} />}>
|
||||
<DropdownMenuItem
|
||||
icon={<FontAwesomeIcon className="w-3" icon={faMoneyBill} />}
|
||||
>
|
||||
Usage & Billing
|
||||
</DropdownMenuItem>
|
||||
</Link>
|
||||
)}
|
||||
<Link to="/organization/audit-logs">
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon icon={faBook} />}>
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon className="w-3" icon={faBook} />}>
|
||||
Audit Logs
|
||||
</DropdownMenuItem>
|
||||
</Link>
|
||||
<Link to="/organization/settings">
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon icon={faCog} />}>
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon className="w-3" icon={faCog} />}>
|
||||
Organization Settings
|
||||
</DropdownMenuItem>
|
||||
</Link>
|
||||
|
@ -11,9 +11,9 @@ type Props = {
|
||||
};
|
||||
|
||||
export const AppConnectionsSelect = ({ onSelect }: Props) => {
|
||||
const { isLoading, data: appConnectionOptions } = useAppConnectionOptions();
|
||||
const { isPending, data: appConnectionOptions } = useAppConnectionOptions();
|
||||
|
||||
if (isLoading) {
|
||||
if (isPending) {
|
||||
return (
|
||||
<div className="flex h-full flex-col items-center justify-center py-2.5">
|
||||
<Spinner size="lg" className="text-mineshaft-500" />
|
||||
|
@ -51,7 +51,7 @@ type AppConnectionFilters = {
|
||||
};
|
||||
|
||||
export const AppConnectionsTable = () => {
|
||||
const { isLoading, data: appConnections = [] } = useListAppConnections();
|
||||
const { isPending, data: appConnections = [] } = useListAppConnections();
|
||||
|
||||
const { popUp, handlePopUpOpen, handlePopUpToggle } = usePopUp([
|
||||
"deleteConnection",
|
||||
@ -262,7 +262,7 @@ export const AppConnectionsTable = () => {
|
||||
</Tr>
|
||||
</THead>
|
||||
<TBody>
|
||||
{isLoading && (
|
||||
{isPending && (
|
||||
<TableSkeleton innerKey="app-connections-table" columns={4} key="app-connections" />
|
||||
)}
|
||||
{filteredAppConnections.slice(offset, perPage * page).map((connection) => (
|
||||
@ -285,7 +285,7 @@ export const AppConnectionsTable = () => {
|
||||
onChangePerPage={setPerPage}
|
||||
/>
|
||||
)}
|
||||
{!isLoading && !filteredAppConnections?.length && (
|
||||
{!isPending && !filteredAppConnections?.length && (
|
||||
<EmptyState
|
||||
title={
|
||||
appConnections.length
|
||||
|
@ -2,12 +2,15 @@ import { Helmet } from "react-helmet";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { faRefresh, faTrash } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { useParams } from "@tanstack/react-router";
|
||||
import { useNavigate, useParams } from "@tanstack/react-router";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
|
||||
import { createNotification } from "@app/components/notifications";
|
||||
import { OrgPermissionCan } from "@app/components/permissions";
|
||||
import {
|
||||
Button,
|
||||
Checkbox,
|
||||
DeleteActionModal,
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
@ -18,8 +21,10 @@ import {
|
||||
} from "@app/components/v2";
|
||||
import { ROUTE_PATHS } from "@app/const/routes";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects, useWorkspace } from "@app/context";
|
||||
import { usePopUp, useToggle } from "@app/hooks";
|
||||
import { useGetIntegration } from "@app/hooks/api";
|
||||
import { useSyncIntegration } from "@app/hooks/api/integrations/queries";
|
||||
import { useDeleteIntegration, useSyncIntegration } from "@app/hooks/api/integrations/queries";
|
||||
import { ProjectType } from "@app/hooks/api/workspace/types";
|
||||
|
||||
import { IntegrationAuditLogsSection } from "./components/IntegrationAuditLogsSection";
|
||||
import { IntegrationConnectionSection } from "./components/IntegrationConnectionSection";
|
||||
@ -38,9 +43,44 @@ export const IntegrationDetailsByIDPage = () => {
|
||||
refetchInterval: 4000
|
||||
});
|
||||
|
||||
const [shouldDeleteSecrets, setShouldDeleteSecrets] = useToggle(false);
|
||||
|
||||
const { currentWorkspace } = useWorkspace();
|
||||
const projectId = currentWorkspace.id;
|
||||
const { mutateAsync: syncIntegration } = useSyncIntegration();
|
||||
const { mutateAsync: deleteIntegration } = useDeleteIntegration();
|
||||
|
||||
const navigate = useNavigate();
|
||||
|
||||
const handleIntegrationDelete = async (shouldDeleteIntegrationSecrets: boolean) => {
|
||||
try {
|
||||
await deleteIntegration({
|
||||
id: integrationId,
|
||||
workspaceId: currentWorkspace.id,
|
||||
shouldDeleteIntegrationSecrets
|
||||
});
|
||||
|
||||
createNotification({
|
||||
type: "success",
|
||||
text: "Deleted integration"
|
||||
});
|
||||
|
||||
await navigate({
|
||||
to: `/${ProjectType.SecretManager}/${projectId}/integrations`
|
||||
});
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
createNotification({
|
||||
type: "error",
|
||||
text: "Failed to delete integration"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const { popUp, handlePopUpOpen, handlePopUpClose, handlePopUpToggle } = usePopUp([
|
||||
"deleteConfirmation",
|
||||
"deleteSecretsConfirmation"
|
||||
] as const);
|
||||
|
||||
return (
|
||||
<>
|
||||
@ -90,7 +130,10 @@ export const IntegrationDetailsByIDPage = () => {
|
||||
? "hover:!bg-red-500 hover:!text-white"
|
||||
: "pointer-events-none cursor-not-allowed opacity-50"
|
||||
)}
|
||||
onClick={() => {}}
|
||||
onClick={() => {
|
||||
setShouldDeleteSecrets.off();
|
||||
handlePopUpOpen("deleteConfirmation", integration);
|
||||
}}
|
||||
disabled={!isAllowed}
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
@ -120,6 +163,57 @@ export const IntegrationDetailsByIDPage = () => {
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DeleteActionModal
|
||||
isOpen={popUp.deleteConfirmation.isOpen}
|
||||
title={`Are you sure want to remove ${integration?.integration || " "} integration for ${
|
||||
integration?.app || "this project"
|
||||
}?`}
|
||||
onChange={(isOpen) => handlePopUpToggle("deleteConfirmation", isOpen)}
|
||||
deleteKey={
|
||||
(integration?.integration === "azure-app-configuration" &&
|
||||
integration?.app?.split("//")[1]?.split(".")[0]) ||
|
||||
integration?.app ||
|
||||
integration?.owner ||
|
||||
integration?.path ||
|
||||
integration?.integration ||
|
||||
""
|
||||
}
|
||||
onDeleteApproved={async () => {
|
||||
if (shouldDeleteSecrets) {
|
||||
handlePopUpOpen("deleteSecretsConfirmation");
|
||||
return;
|
||||
}
|
||||
|
||||
await handleIntegrationDelete(false);
|
||||
|
||||
handlePopUpClose("deleteConfirmation");
|
||||
}}
|
||||
>
|
||||
{integration?.integration === "github" && (
|
||||
<div className="mt-4">
|
||||
<Checkbox
|
||||
id="delete-integration-secrets"
|
||||
checkIndicatorBg="text-white"
|
||||
onCheckedChange={() => setShouldDeleteSecrets.toggle()}
|
||||
>
|
||||
Delete previously synced secrets from the destination
|
||||
</Checkbox>
|
||||
</div>
|
||||
)}
|
||||
</DeleteActionModal>
|
||||
<DeleteActionModal
|
||||
isOpen={popUp.deleteSecretsConfirmation.isOpen}
|
||||
title={`Are you sure you also want to delete secrets on ${integration?.integration}?`}
|
||||
subTitle="By confirming, you acknowledge that all secrets managed by this integration will be removed from the destination. This action is irreversible."
|
||||
onChange={(isOpen) => handlePopUpToggle("deleteSecretsConfirmation", isOpen)}
|
||||
deleteKey="confirm"
|
||||
onDeleteApproved={async () => {
|
||||
await handleIntegrationDelete(true);
|
||||
handlePopUpClose("deleteSecretsConfirmation");
|
||||
handlePopUpClose("deleteConfirmation");
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
@ -8,6 +8,7 @@ import { ProjectPermissionCan } from "@app/components/permissions";
|
||||
import { Badge, PageHeader, Tab, TabList, TabPanel, Tabs } from "@app/components/v2";
|
||||
import { ROUTE_PATHS } from "@app/const/routes";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub, useWorkspace } from "@app/context";
|
||||
import { ProjectPermissionSecretSyncActions } from "@app/context/ProjectPermissionContext/types";
|
||||
import { IntegrationsListPageTabs } from "@app/types/integrations";
|
||||
|
||||
import {
|
||||
@ -100,8 +101,7 @@ export const IntegrationsListPage = () => {
|
||||
<TabPanel value={IntegrationsListPageTabs.SecretSyncs}>
|
||||
<ProjectPermissionCan
|
||||
renderGuardBanner
|
||||
passThrough={false}
|
||||
I={ProjectPermissionActions.Read}
|
||||
I={ProjectPermissionSecretSyncActions.Read}
|
||||
a={ProjectPermissionSub.SecretSyncs}
|
||||
>
|
||||
<SecretSyncsTab />
|
||||
@ -110,7 +110,6 @@ export const IntegrationsListPage = () => {
|
||||
<TabPanel value={IntegrationsListPageTabs.NativeIntegrations}>
|
||||
<ProjectPermissionCan
|
||||
renderGuardBanner
|
||||
passThrough={false}
|
||||
I={ProjectPermissionActions.Read}
|
||||
a={ProjectPermissionSub.Integrations}
|
||||
>
|
||||
|
@ -3,7 +3,12 @@ import { zodValidator } from "@tanstack/zod-adapter";
|
||||
import { z } from "zod";
|
||||
|
||||
import { workspaceKeys } from "@app/hooks/api";
|
||||
import { fetchSecretSyncsByProjectId, secretSyncKeys } from "@app/hooks/api/secretSyncs";
|
||||
import { TIntegration } from "@app/hooks/api/integrations/types";
|
||||
import {
|
||||
fetchSecretSyncsByProjectId,
|
||||
secretSyncKeys,
|
||||
TSecretSync
|
||||
} from "@app/hooks/api/secretSyncs";
|
||||
import { fetchWorkspaceIntegrations } from "@app/hooks/api/workspace/queries";
|
||||
import { IntegrationsListPageTabs } from "@app/types/integrations";
|
||||
|
||||
@ -20,10 +25,22 @@ export const Route = createFileRoute(
|
||||
validateSearch: zodValidator(IntegrationsListPageQuerySchema),
|
||||
beforeLoad: async ({ context, search, params: { projectId } }) => {
|
||||
if (!search.selectedTab) {
|
||||
const secretSyncs = await context.queryClient.ensureQueryData({
|
||||
let secretSyncs: TSecretSync[];
|
||||
|
||||
try {
|
||||
secretSyncs = await context.queryClient.ensureQueryData({
|
||||
queryKey: secretSyncKeys.list(projectId),
|
||||
queryFn: () => fetchSecretSyncsByProjectId(projectId)
|
||||
});
|
||||
} catch {
|
||||
throw redirect({
|
||||
to: "/secret-manager/$projectId/integrations",
|
||||
params: {
|
||||
projectId
|
||||
},
|
||||
search: { selectedTab: IntegrationsListPageTabs.NativeIntegrations }
|
||||
});
|
||||
}
|
||||
|
||||
if (secretSyncs.length) {
|
||||
throw redirect({
|
||||
@ -35,10 +52,21 @@ export const Route = createFileRoute(
|
||||
});
|
||||
}
|
||||
|
||||
const integrations = await context.queryClient.ensureQueryData({
|
||||
let integrations: TIntegration[];
|
||||
try {
|
||||
integrations = await context.queryClient.ensureQueryData({
|
||||
queryKey: workspaceKeys.getWorkspaceIntegrations(projectId),
|
||||
queryFn: () => fetchWorkspaceIntegrations(projectId)
|
||||
});
|
||||
} catch {
|
||||
throw redirect({
|
||||
to: "/secret-manager/$projectId/integrations",
|
||||
params: {
|
||||
projectId
|
||||
},
|
||||
search: { selectedTab: IntegrationsListPageTabs.SecretSyncs }
|
||||
});
|
||||
}
|
||||
|
||||
if (integrations.length) {
|
||||
throw redirect({
|
||||
|
@ -88,7 +88,7 @@ export const OctopusDeployConfigurePage = () => {
|
||||
{
|
||||
integrationAuthId,
|
||||
spaceId: currentSpace?.Id,
|
||||
resourceId: currentResource.appId!,
|
||||
resourceId: currentResource?.appId || "",
|
||||
scope: currentScope
|
||||
},
|
||||
{ enabled: Boolean(currentSpace && currentResource) }
|
||||
|
Reference in New Issue
Block a user