Compare commits
113 Commits
gcp-iam-au
...
daniel/aut
Author | SHA1 | Date | |
---|---|---|---|
3d65d121c0 | |||
663f8abc51 | |||
941a71efaf | |||
19bbc2ab26 | |||
f4de52e714 | |||
0b87121b67 | |||
e649667da8 | |||
6af4b3f64c | |||
efcc248486 | |||
82eeae6030 | |||
440c77965c | |||
880289217e | |||
d0947f1040 | |||
303edadb1e | |||
50155a610d | |||
c2830a56b6 | |||
b9a9b6b4d9 | |||
e7f7f271c8 | |||
b26e96c5a2 | |||
9b404c215b | |||
d6dae04959 | |||
629bd9b7c6 | |||
3d4aa0fdc9 | |||
711e30a6be | |||
7b1462fdee | |||
50915833ff | |||
44e37fd531 | |||
fa3f957738 | |||
224b26ced6 | |||
e833d9e67c | |||
dc08edb7d2 | |||
0b78e30848 | |||
9253c69325 | |||
7d3a62cc4c | |||
7e2147f14e | |||
32f39c98a7 | |||
ddf6db5a7e | |||
554dbf6c23 | |||
d1997f04c0 | |||
deefaa0961 | |||
a392c9f022 | |||
34222b83ee | |||
ef36852a47 | |||
d79fd826a4 | |||
18aaa423a9 | |||
32c33eaf6e | |||
702699b4f0 | |||
35ee03d347 | |||
9c5deee688 | |||
ce4cb39a2d | |||
84724e5f65 | |||
56c2e12760 | |||
21656a7ab6 | |||
2ccc77ef40 | |||
1438415d0c | |||
eca0e62764 | |||
e4186f0317 | |||
704c630797 | |||
f398fee2b8 | |||
7fce51e8c1 | |||
a6fe233122 | |||
5e678b1ad2 | |||
cf453e87d8 | |||
4af703df5b | |||
75b8b521b3 | |||
58c1d3b0ac | |||
6b5cafa631 | |||
4a35623956 | |||
74fe673724 | |||
2f92719771 | |||
399ca7a221 | |||
29f37295e1 | |||
e3184a5f40 | |||
ace008f44e | |||
4afd95fe1a | |||
3cd719f6b0 | |||
c6352cc970 | |||
d4555f9698 | |||
393964c4ae | |||
e4afbe8662 | |||
0d89aa8607 | |||
2b91ec5ae9 | |||
c438479246 | |||
9828cbbfbe | |||
cd910a2fac | |||
fc1dffd7e2 | |||
55f8198a2d | |||
4d166402df | |||
19edf83dbc | |||
13f6b238e7 | |||
8dee1f8fc7 | |||
3b23035dfb | |||
0c8ef13d8d | |||
389d51fa5c | |||
638208e9fa | |||
c176d1e4f7 | |||
91a23a608e | |||
c6a25271dd | |||
0f5c1340d3 | |||
ecbdae110d | |||
8ef727b4ec | |||
c6f24dbb5e | |||
c45dae4137 | |||
18c0d2fd6f | |||
c1fb8f47bf | |||
bd57a068d1 | |||
990eddeb32 | |||
ce01f8d099 | |||
faf6708b00 | |||
a58d6ebdac | |||
818b136836 | |||
0cdade6a2d | |||
c276c44c08 |
@ -122,13 +122,13 @@ jobs:
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-prod-platform --query taskDefinition > task-definition.json
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-prod-platform
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
|
48
backend/package-lock.json
generated
@ -34,7 +34,7 @@
|
||||
"axios": "^1.6.7",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.3.3",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
@ -2940,6 +2940,7 @@
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@nodelib/fs.stat": "2.0.5",
|
||||
"run-parallel": "^1.1.9"
|
||||
@ -2952,6 +2953,7 @@
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
|
||||
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
@ -2960,6 +2962,7 @@
|
||||
"version": "1.2.8",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
|
||||
"integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@nodelib/fs.scandir": "2.1.5",
|
||||
"fastq": "^1.6.0"
|
||||
@ -6295,6 +6298,7 @@
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"fill-range": "^7.0.1"
|
||||
},
|
||||
@ -6344,15 +6348,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq": {
|
||||
"version": "5.3.3",
|
||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.3.3.tgz",
|
||||
"integrity": "sha512-Gc/68HxiCHLMPBiGIqtINxcf8HER/5wvBYMY/6x3tFejlvldUBFaAErMTLDv4TnPsTyzNPrfBKmFCEM58uVnJg==",
|
||||
"version": "5.4.2",
|
||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.4.2.tgz",
|
||||
"integrity": "sha512-dkR/KGUw18miLe3QWtvSlmGvEe08aZF+w1jZyqEHMWFW3RP4162qp6OGud0/QCAOjusiRI8UOxUhbnortPY+rA==",
|
||||
"dependencies": {
|
||||
"cron-parser": "^4.6.0",
|
||||
"fast-glob": "^3.3.2",
|
||||
"ioredis": "^5.3.2",
|
||||
"lodash": "^4.17.21",
|
||||
"minimatch": "^9.0.3",
|
||||
"msgpackr": "^1.10.1",
|
||||
"node-abort-controller": "^3.1.1",
|
||||
"semver": "^7.5.4",
|
||||
@ -6360,28 +6362,6 @@
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq/node_modules/minimatch": {
|
||||
"version": "9.0.3",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
|
||||
"integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/bundle-require": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/bundle-require/-/bundle-require-4.0.2.tgz",
|
||||
@ -7813,6 +7793,7 @@
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
|
||||
"integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@nodelib/fs.stat": "^2.0.2",
|
||||
"@nodelib/fs.walk": "^1.2.3",
|
||||
@ -7964,6 +7945,7 @@
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
},
|
||||
@ -8497,6 +8479,7 @@
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
|
||||
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-glob": "^4.0.1"
|
||||
},
|
||||
@ -9191,6 +9174,7 @@
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
|
||||
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
@ -9221,6 +9205,7 @@
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
||||
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-extglob": "^2.1.1"
|
||||
},
|
||||
@ -9255,6 +9240,7 @@
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.12.0"
|
||||
}
|
||||
@ -10091,6 +10077,7 @@
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
|
||||
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
@ -10107,6 +10094,7 @@
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
|
||||
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"braces": "^3.0.2",
|
||||
"picomatch": "^2.3.1"
|
||||
@ -10119,6 +10107,7 @@
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
||||
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=8.6"
|
||||
},
|
||||
@ -11748,6 +11737,7 @@
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
@ -12100,6 +12090,7 @@
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
|
||||
"integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
@ -12900,6 +12891,7 @@
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-number": "^7.0.0"
|
||||
},
|
||||
|
@ -95,7 +95,7 @@
|
||||
"axios": "^1.6.7",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.3.3",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
@ -34,6 +34,7 @@ import { TIdentityServiceFactory } from "@app/services/identity/identity-service
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
@ -117,6 +118,7 @@ declare module "fastify" {
|
||||
identityAccessToken: TIdentityAccessTokenServiceFactory;
|
||||
identityProject: TIdentityProjectServiceFactory;
|
||||
identityUa: TIdentityUaServiceFactory;
|
||||
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||
|
14
backend/src/@types/knex.d.ts
vendored
@ -65,6 +65,9 @@ import {
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
@ -231,6 +234,7 @@ import {
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { TSecretReferences, TSecretReferencesInsert, TSecretReferencesUpdate } from "@app/db/schemas/secret-references";
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
@ -304,6 +308,11 @@ declare module "knex/types/tables" {
|
||||
>;
|
||||
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: Knex.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
@ -332,6 +341,11 @@ declare module "knex/types/tables" {
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: Knex.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: Knex.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
|
@ -0,0 +1,24 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretReference))) {
|
||||
await knex.schema.createTable(TableName.SecretReference, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("environment").notNullable();
|
||||
t.string("secretPath").notNullable();
|
||||
t.uuid("secretId").notNullable();
|
||||
t.foreign("secretId").references("id").inTable(TableName.Secret).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretReference);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretReference);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretReference);
|
||||
}
|
36
backend/src/db/migrations/20240518142614_kubernetes-auth.ts
Normal file
@ -0,0 +1,36 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityKubernetesAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("kubernetesHost").notNullable();
|
||||
t.text("encryptedCaCert").notNullable();
|
||||
t.string("caCertIV").notNullable();
|
||||
t.string("caCertTag").notNullable();
|
||||
t.text("encryptedTokenReviewerJwt").notNullable();
|
||||
t.string("tokenReviewerJwtIV").notNullable();
|
||||
t.string("tokenReviewerJwtTag").notNullable();
|
||||
t.string("allowedNamespaces").notNullable();
|
||||
t.string("allowedNames").notNullable();
|
||||
t.string("allowedAudience").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityKubernetesAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityKubernetesAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityKubernetesAuth);
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasIsSyncedColumn = await knex.schema.hasColumn(TableName.Integration, "isSynced");
|
||||
const hasSyncMessageColumn = await knex.schema.hasColumn(TableName.Integration, "syncMessage");
|
||||
const hasLastSyncJobId = await knex.schema.hasColumn(TableName.Integration, "lastSyncJobId");
|
||||
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
if (!hasIsSyncedColumn) {
|
||||
t.boolean("isSynced").nullable();
|
||||
}
|
||||
|
||||
if (!hasSyncMessageColumn) {
|
||||
t.text("syncMessage").nullable();
|
||||
}
|
||||
|
||||
if (!hasLastSyncJobId) {
|
||||
t.string("lastSyncJobId").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasIsSyncedColumn = await knex.schema.hasColumn(TableName.Integration, "isSynced");
|
||||
const hasSyncMessageColumn = await knex.schema.hasColumn(TableName.Integration, "syncMessage");
|
||||
const hasLastSyncJobId = await knex.schema.hasColumn(TableName.Integration, "lastSyncJobId");
|
||||
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
if (hasIsSyncedColumn) {
|
||||
t.dropColumn("isSynced");
|
||||
}
|
||||
|
||||
if (hasSyncMessageColumn) {
|
||||
t.dropColumn("syncMessage");
|
||||
}
|
||||
|
||||
if (hasLastSyncJobId) {
|
||||
t.dropColumn("lastSyncJobId");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist) t.index("projectId");
|
||||
if (doesOrgIdExist) t.index("orgId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist) t.dropIndex("projectId");
|
||||
if (doesOrgIdExist) t.dropIndex("orgId");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "envId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesEnvIdExist) t.index("envId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "envId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesEnvIdExist) t.dropIndex("envId");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SecretVersion, "envId");
|
||||
if (await knex.schema.hasTable(TableName.SecretVersion)) {
|
||||
await knex.schema.alterTable(TableName.SecretVersion, (t) => {
|
||||
if (doesEnvIdExist) t.index("envId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SecretVersion, "envId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretVersion)) {
|
||||
await knex.schema.alterTable(TableName.SecretVersion, (t) => {
|
||||
if (doesEnvIdExist) t.dropIndex("envId");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSnapshotIdExist) t.index("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSnapshotIdExist) t.dropIndex("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotFolder, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotFolder)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotFolder, (t) => {
|
||||
if (doesSnapshotIdExist) t.index("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotFolder, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotFolder)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotFolder, (t) => {
|
||||
if (doesSnapshotIdExist) t.dropIndex("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesFolderIdExist = await knex.schema.hasColumn(TableName.Secret, "folderId");
|
||||
const doesUserIdExist = await knex.schema.hasColumn(TableName.Secret, "userId");
|
||||
if (await knex.schema.hasTable(TableName.Secret)) {
|
||||
await knex.schema.alterTable(TableName.Secret, (t) => {
|
||||
if (doesFolderIdExist && doesUserIdExist) t.index(["folderId", "userId"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesFolderIdExist = await knex.schema.hasColumn(TableName.Secret, "folderId");
|
||||
const doesUserIdExist = await knex.schema.hasColumn(TableName.Secret, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Secret)) {
|
||||
await knex.schema.alterTable(TableName.Secret, (t) => {
|
||||
if (doesUserIdExist && doesFolderIdExist) t.dropIndex(["folderId", "userId"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesExpireAtExist = await knex.schema.hasColumn(TableName.AuditLog, "expiresAt");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesExpireAtExist) t.index("expiresAt");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesExpireAtExist = await knex.schema.hasColumn(TableName.AuditLog, "expiresAt");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesExpireAtExist) t.dropIndex("expiresAt");
|
||||
});
|
||||
}
|
||||
}
|
35
backend/src/db/schemas/identity-kubernetes-auths.ts
Normal file
@ -0,0 +1,35 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityKubernetesAuthsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
accessTokenTTL: z.coerce.number().default(7200),
|
||||
accessTokenMaxTTL: z.coerce.number().default(7200),
|
||||
accessTokenNumUsesLimit: z.coerce.number().default(0),
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
kubernetesHost: z.string(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedTokenReviewerJwt: z.string(),
|
||||
tokenReviewerJwtIV: z.string(),
|
||||
tokenReviewerJwtTag: z.string(),
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string()
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
export type TIdentityKubernetesAuthsInsert = Omit<z.input<typeof IdentityKubernetesAuthsSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityKubernetesAuthsUpdate = Partial<
|
||||
Omit<z.input<typeof IdentityKubernetesAuthsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -19,6 +19,7 @@ export * from "./identities";
|
||||
export * from "./identity-access-tokens";
|
||||
export * from "./identity-aws-auths";
|
||||
export * from "./identity-gcp-auths";
|
||||
export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-org-memberships";
|
||||
export * from "./identity-project-additional-privilege";
|
||||
export * from "./identity-project-membership-role";
|
||||
|
@ -28,7 +28,10 @@ export const IntegrationsSchema = z.object({
|
||||
secretPath: z.string().default("/"),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
isSynced: z.boolean().nullable().optional(),
|
||||
syncMessage: z.string().nullable().optional(),
|
||||
lastSyncJobId: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIntegrations = z.infer<typeof IntegrationsSchema>;
|
||||
|
@ -28,6 +28,7 @@ export enum TableName {
|
||||
ProjectUserMembershipRole = "project_user_membership_roles",
|
||||
ProjectKeys = "project_keys",
|
||||
Secret = "secrets",
|
||||
SecretReference = "secret_references",
|
||||
SecretBlindIndex = "secret_blind_indexes",
|
||||
SecretVersion = "secret_versions",
|
||||
SecretFolder = "secret_folders",
|
||||
@ -44,6 +45,7 @@ export enum TableName {
|
||||
Identity = "identities",
|
||||
IdentityAccessToken = "identity_access_tokens",
|
||||
IdentityUniversalAuth = "identity_universal_auths",
|
||||
IdentityKubernetesAuth = "identity_kubernetes_auths",
|
||||
IdentityGcpAuth = "identity_gcp_auths",
|
||||
IdentityUaClientSecret = "identity_ua_client_secrets",
|
||||
IdentityAwsAuth = "identity_aws_auths",
|
||||
@ -145,6 +147,7 @@ export enum ProjectUpgradeStatus {
|
||||
|
||||
export enum IdentityAuthMethod {
|
||||
Univeral = "universal-auth",
|
||||
KUBERNETES_AUTH = "kubernetes-auth",
|
||||
GCP_AUTH = "gcp-auth",
|
||||
AWS_AUTH = "aws-auth"
|
||||
}
|
||||
|
21
backend/src/db/schemas/secret-references.ts
Normal file
@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretReferencesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
environment: z.string(),
|
||||
secretPath: z.string(),
|
||||
secretId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretReferences = z.infer<typeof SecretReferencesSchema>;
|
||||
export type TSecretReferencesInsert = Omit<z.input<typeof SecretReferencesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretReferencesUpdate = Partial<Omit<z.input<typeof SecretReferencesSchema>, TImmutableDBKeys>>;
|
@ -8,7 +8,7 @@ import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { PermissionSchema, SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { ProjectPermissionSchema, SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
@ -39,7 +39,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: PermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -90,7 +90,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: PermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
temporaryMode: z
|
||||
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode),
|
||||
@ -155,7 +155,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
|
||||
permissions: PermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
isTemporary: z.boolean().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary),
|
||||
temporaryMode: z
|
||||
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
|
||||
|
@ -51,6 +51,7 @@ export enum EventType {
|
||||
UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
|
||||
CREATE_INTEGRATION = "create-integration",
|
||||
DELETE_INTEGRATION = "delete-integration",
|
||||
MANUAL_SYNC_INTEGRATION = "manual-sync-integration",
|
||||
ADD_TRUSTED_IP = "add-trusted-ip",
|
||||
UPDATE_TRUSTED_IP = "update-trusted-ip",
|
||||
DELETE_TRUSTED_IP = "delete-trusted-ip",
|
||||
@ -63,6 +64,10 @@ export enum EventType {
|
||||
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
|
||||
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth",
|
||||
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
|
||||
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
|
||||
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
|
||||
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
|
||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||
@ -277,6 +282,25 @@ interface DeleteIntegrationEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface ManualSyncIntegrationEvent {
|
||||
type: EventType.MANUAL_SYNC_INTEGRATION;
|
||||
metadata: {
|
||||
integrationId: string;
|
||||
integration: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
url?: string;
|
||||
app?: string;
|
||||
appId?: string;
|
||||
targetEnvironment?: string;
|
||||
targetEnvironmentId?: string;
|
||||
targetService?: string;
|
||||
targetServiceId?: string;
|
||||
path?: string;
|
||||
region?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AddTrustedIPEvent {
|
||||
type: EventType.ADD_TRUSTED_IP;
|
||||
metadata: {
|
||||
@ -391,6 +415,50 @@ interface GetIdentityUniversalAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginIdentityKubernetesAuthEvent {
|
||||
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
identityKubernetesAuthId: string;
|
||||
identityAccessTokenId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AddIdentityKubernetesAuthEvent {
|
||||
type: EventType.ADD_IDENTITY_KUBERNETES_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
kubernetesHost: string;
|
||||
allowedNamespaces: string;
|
||||
allowedNames: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityKubernetesAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
kubernetesHost?: string;
|
||||
allowedNamespaces?: string;
|
||||
allowedNames?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetIdentityKubernetesAuthEvent {
|
||||
type: EventType.GET_IDENTITY_KUBERNETES_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateIdentityUniversalAuthClientSecretEvent {
|
||||
type: EventType.CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET;
|
||||
metadata: {
|
||||
@ -743,6 +811,7 @@ export type Event =
|
||||
| UnauthorizeIntegrationEvent
|
||||
| CreateIntegrationEvent
|
||||
| DeleteIntegrationEvent
|
||||
| ManualSyncIntegrationEvent
|
||||
| AddTrustedIPEvent
|
||||
| UpdateTrustedIPEvent
|
||||
| DeleteTrustedIPEvent
|
||||
@ -755,6 +824,10 @@ export type Event =
|
||||
| AddIdentityUniversalAuthEvent
|
||||
| UpdateIdentityUniversalAuthEvent
|
||||
| GetIdentityUniversalAuthEvent
|
||||
| LoginIdentityKubernetesAuthEvent
|
||||
| AddIdentityKubernetesAuthEvent
|
||||
| UpdateIdentityKubernetesAuthEvent
|
||||
| GetIdentityKubernetesAuthEvent
|
||||
| CreateIdentityUniversalAuthClientSecretEvent
|
||||
| GetIdentityUniversalAuthClientSecretsEvent
|
||||
| RevokeIdentityUniversalAuthClientSecretEvent
|
||||
|
@ -7,12 +7,15 @@ import {
|
||||
SecretType,
|
||||
TSecretApprovalRequestsSecretsInsert
|
||||
} from "@app/db/schemas";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { groupBy, pick, unique } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
import { getAllNestedSecretReferences } from "@app/services/secret/secret-fns";
|
||||
import { TSecretQueueFactory } from "@app/services/secret/secret-queue";
|
||||
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
|
||||
@ -53,6 +56,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
secretService: Pick<
|
||||
TSecretServiceFactory,
|
||||
| "fnSecretBulkInsert"
|
||||
@ -80,7 +84,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
snapshotService,
|
||||
secretService,
|
||||
secretVersionDAL,
|
||||
secretQueueService
|
||||
secretQueueService,
|
||||
projectBotService
|
||||
}: TSecretApprovalRequestServiceFactoryDep) => {
|
||||
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
@ -352,7 +357,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}
|
||||
|
||||
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Delete);
|
||||
|
||||
const botKey = await projectBotService.getBotKey(projectId).catch(() => null);
|
||||
const mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => {
|
||||
const newSecrets = secretCreationCommits.length
|
||||
? await secretService.fnSecretBulkInsert({
|
||||
@ -379,7 +384,17 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
]),
|
||||
tags: el?.tags.map(({ id }) => id),
|
||||
version: 1,
|
||||
type: SecretType.Shared
|
||||
type: SecretType.Shared,
|
||||
references: botKey
|
||||
? getAllNestedSecretReferences(
|
||||
decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag,
|
||||
key: botKey
|
||||
})
|
||||
)
|
||||
: undefined
|
||||
})),
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
@ -414,7 +429,17 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
"secretReminderNote",
|
||||
"secretReminderRepeatDays",
|
||||
"secretBlindIndex"
|
||||
])
|
||||
]),
|
||||
references: botKey
|
||||
? getAllNestedSecretReferences(
|
||||
decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag,
|
||||
key: botKey
|
||||
})
|
||||
)
|
||||
: undefined
|
||||
}
|
||||
})),
|
||||
secretDAL,
|
||||
|
@ -90,15 +90,17 @@ export const secretScanningServiceFactory = ({
|
||||
const {
|
||||
data: { repositories }
|
||||
} = await octokit.apps.listReposAccessibleToInstallation();
|
||||
await Promise.all(
|
||||
repositories.map(({ id, full_name }) =>
|
||||
secretScanningQueue.startFullRepoScan({
|
||||
organizationId: session.orgId,
|
||||
installationId,
|
||||
repository: { id, fullName: full_name }
|
||||
})
|
||||
)
|
||||
);
|
||||
if (!appCfg.DISABLE_SECRET_SCANNING) {
|
||||
await Promise.all(
|
||||
repositories.map(({ id, full_name }) =>
|
||||
secretScanningQueue.startFullRepoScan({
|
||||
organizationId: session.orgId,
|
||||
installationId,
|
||||
repository: { id, fullName: full_name }
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
return { installatedApp };
|
||||
};
|
||||
|
||||
@ -151,6 +153,7 @@ export const secretScanningServiceFactory = ({
|
||||
};
|
||||
|
||||
const handleRepoPushEvent = async (payload: WebhookEventMap["push"]) => {
|
||||
const appCfg = getConfig();
|
||||
const { commits, repository, installation, pusher } = payload;
|
||||
if (!commits || !repository || !installation || !pusher) {
|
||||
return;
|
||||
@ -161,13 +164,15 @@ export const secretScanningServiceFactory = ({
|
||||
});
|
||||
if (!installationLink) return;
|
||||
|
||||
await secretScanningQueue.startPushEventScan({
|
||||
commits,
|
||||
pusher: { name: pusher.name, email: pusher.email },
|
||||
repository: { fullName: repository.full_name, id: repository.id },
|
||||
organizationId: installationLink.orgId,
|
||||
installationId: String(installation?.id)
|
||||
});
|
||||
if (!appCfg.DISABLE_SECRET_SCANNING) {
|
||||
await secretScanningQueue.startPushEventScan({
|
||||
commits,
|
||||
pusher: { name: pusher.name, email: pusher.email },
|
||||
repository: { fullName: repository.full_name, id: repository.id },
|
||||
organizationId: installationLink.orgId,
|
||||
installationId: String(installation?.id)
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleRepoDeleteEvent = async (installationId: string, repositoryIds: string[]) => {
|
||||
|
@ -89,6 +89,9 @@ export const UNIVERSAL_AUTH = {
|
||||
},
|
||||
RENEW_ACCESS_TOKEN: {
|
||||
accessToken: "The access token to renew."
|
||||
},
|
||||
REVOKE_ACCESS_TOKEN: {
|
||||
accessToken: "The access token to revoke."
|
||||
}
|
||||
} as const;
|
||||
|
||||
@ -145,36 +148,6 @@ export const PROJECTS = {
|
||||
name: "The new name of the project.",
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project."
|
||||
},
|
||||
INVITE_MEMBER: {
|
||||
projectId: "The ID of the project to invite the member to.",
|
||||
emails: "A list of organization member emails to invite to the project.",
|
||||
usernames: "A list of usernames to invite to the project."
|
||||
},
|
||||
REMOVE_MEMBER: {
|
||||
projectId: "The ID of the project to remove the member from.",
|
||||
emails: "A list of organization member emails to remove from the project.",
|
||||
usernames: "A list of usernames to remove from the project."
|
||||
},
|
||||
GET_USER_MEMBERSHIPS: {
|
||||
workspaceId: "The ID of the project to get memberships from."
|
||||
},
|
||||
UPDATE_USER_MEMBERSHIP: {
|
||||
workspaceId: "The ID of the project to update the membership for.",
|
||||
membershipId: "The ID of the membership to update.",
|
||||
roles: "A list of roles to update the membership to."
|
||||
},
|
||||
LIST_IDENTITY_MEMBERSHIPS: {
|
||||
projectId: "The ID of the project to get identity memberships from."
|
||||
},
|
||||
UPDATE_IDENTITY_MEMBERSHIP: {
|
||||
projectId: "The ID of the project to update the identity membership for.",
|
||||
identityId: "The ID of the identity to update the membership for.",
|
||||
roles: "A list of roles to update the membership to."
|
||||
},
|
||||
DELETE_IDENTITY_MEMBERSHIP: {
|
||||
projectId: "The ID of the project to delete the identity membership from.",
|
||||
identityId: "The ID of the identity to delete the membership from."
|
||||
},
|
||||
GET_KEY: {
|
||||
workspaceId: "The ID of the project to get the key from."
|
||||
},
|
||||
@ -213,6 +186,70 @@ export const PROJECTS = {
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const PROJECT_USERS = {
|
||||
INVITE_MEMBER: {
|
||||
projectId: "The ID of the project to invite the member to.",
|
||||
emails: "A list of organization member emails to invite to the project.",
|
||||
usernames: "A list of usernames to invite to the project."
|
||||
},
|
||||
REMOVE_MEMBER: {
|
||||
projectId: "The ID of the project to remove the member from.",
|
||||
emails: "A list of organization member emails to remove from the project.",
|
||||
usernames: "A list of usernames to remove from the project."
|
||||
},
|
||||
GET_USER_MEMBERSHIPS: {
|
||||
workspaceId: "The ID of the project to get memberships from."
|
||||
},
|
||||
GET_USER_MEMBERSHIP: {
|
||||
workspaceId: "The ID of the project to get memberships from.",
|
||||
username: "The username to get project membership of. Email is the default username."
|
||||
},
|
||||
UPDATE_USER_MEMBERSHIP: {
|
||||
workspaceId: "The ID of the project to update the membership for.",
|
||||
membershipId: "The ID of the membership to update.",
|
||||
roles: "A list of roles to update the membership to."
|
||||
}
|
||||
};
|
||||
|
||||
export const PROJECT_IDENTITIES = {
|
||||
LIST_IDENTITY_MEMBERSHIPS: {
|
||||
projectId: "The ID of the project to get identity memberships from."
|
||||
},
|
||||
GET_IDENTITY_MEMBERSHIP_BY_ID: {
|
||||
identityId: "The ID of the identity to get the membership for.",
|
||||
projectId: "The ID of the project to get the identity membership for."
|
||||
},
|
||||
UPDATE_IDENTITY_MEMBERSHIP: {
|
||||
projectId: "The ID of the project to update the identity membership for.",
|
||||
identityId: "The ID of the identity to update the membership for.",
|
||||
roles: {
|
||||
description: "A list of role slugs to assign to the identity project membership.",
|
||||
role: "The role slug to assign to the newly created identity project membership.",
|
||||
isTemporary: "Whether the assigned role is temporary.",
|
||||
temporaryMode: "Type of temporary expiry.",
|
||||
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
|
||||
temporaryAccessStartTime: "Time to which the temporary access starts"
|
||||
}
|
||||
},
|
||||
DELETE_IDENTITY_MEMBERSHIP: {
|
||||
projectId: "The ID of the project to delete the identity membership from.",
|
||||
identityId: "The ID of the identity to delete the membership from."
|
||||
},
|
||||
CREATE_IDENTITY_MEMBERSHIP: {
|
||||
projectId: "The ID of the project to create the identity membership from.",
|
||||
identityId: "The ID of the identity to create the membership from.",
|
||||
role: "The role slug to assign to the newly created identity project membership.",
|
||||
roles: {
|
||||
description: "A list of role slugs to assign to the newly created identity project membership.",
|
||||
role: "The role slug to assign to the newly created identity project membership.",
|
||||
isTemporary: "Whether the assigned role is temporary.",
|
||||
temporaryMode: "Type of temporary expiry.",
|
||||
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
|
||||
temporaryAccessStartTime: "Time to which the temporary access starts"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const ENVIRONMENTS = {
|
||||
CREATE: {
|
||||
workspaceId: "The ID of the project to create the environment in.",
|
||||
@ -646,6 +683,9 @@ export const INTEGRATION = {
|
||||
},
|
||||
DELETE: {
|
||||
integrationId: "The ID of the integration object."
|
||||
},
|
||||
SYNC: {
|
||||
integrationId: "The ID of the integration object to manually sync"
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -13,6 +13,10 @@ const zodStrBool = z
|
||||
const envSchema = z
|
||||
.object({
|
||||
PORT: z.coerce.number().default(4000),
|
||||
DISABLE_SECRET_SCANNING: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((el) => el === "true"),
|
||||
REDIS_URL: zpStr(z.string()),
|
||||
HOST: zpStr(z.string().default("localhost")),
|
||||
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
||||
|
@ -30,6 +30,37 @@ const loggerConfig = z.object({
|
||||
NODE_ENV: z.enum(["development", "test", "production"]).default("production")
|
||||
});
|
||||
|
||||
const redactedKeys = [
|
||||
"accessToken",
|
||||
"authToken",
|
||||
"serviceToken",
|
||||
"identityAccessToken",
|
||||
"token",
|
||||
"privateKey",
|
||||
"serverPrivateKey",
|
||||
"plainPrivateKey",
|
||||
"plainProjectKey",
|
||||
"encryptedPrivateKey",
|
||||
"userPrivateKey",
|
||||
"protectedKey",
|
||||
"decryptKey",
|
||||
"encryptedProjectKey",
|
||||
"encryptedSymmetricKey",
|
||||
"encryptedPrivateKey",
|
||||
"backupPrivateKey",
|
||||
"secretKey",
|
||||
"SecretKey",
|
||||
"botPrivateKey",
|
||||
"encryptedKey",
|
||||
"plaintextProjectKey",
|
||||
"accessKey",
|
||||
"botKey",
|
||||
"decryptedSecret",
|
||||
"secrets",
|
||||
"key",
|
||||
"password"
|
||||
];
|
||||
|
||||
export const initLogger = async () => {
|
||||
const cfg = loggerConfig.parse(process.env);
|
||||
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
||||
@ -74,7 +105,9 @@ export const initLogger = async () => {
|
||||
hostname: bindings.hostname
|
||||
// node_version: process.version
|
||||
})
|
||||
}
|
||||
},
|
||||
// redact until depth of three
|
||||
redact: [...redactedKeys, ...redactedKeys.map((key) => `*.${key}`), ...redactedKeys.map((key) => `*.*.${key}`)]
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
transport
|
||||
|
@ -65,7 +65,13 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
[QueueName.IntegrationSync]: {
|
||||
name: QueueJobs.IntegrationSync;
|
||||
payload: { projectId: string; environment: string; secretPath: string; depth?: number };
|
||||
payload: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
depth?: number;
|
||||
deDupeQueue?: Record<string, boolean>;
|
||||
};
|
||||
};
|
||||
[QueueName.SecretFullRepoScan]: {
|
||||
name: QueueJobs.SecretScan;
|
||||
|
@ -36,7 +36,7 @@ export const writeLimit: RateLimitOptions = {
|
||||
export const secretsLimit: RateLimitOptions = {
|
||||
// secrets, folders, secret imports
|
||||
timeWindow: 60 * 1000,
|
||||
max: 1000,
|
||||
max: 60,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
// inject permission type needed based on auth extracted
|
||||
@ -15,6 +16,10 @@ export const injectPermission = fp(async (server) => {
|
||||
orgId: req.auth.orgId, // if the req.auth.authMode is AuthMode.API_KEY, the orgId will be "API_KEY"
|
||||
authMethod: req.auth.authMethod // if the req.auth.authMode is AuthMode.API_KEY, the authMethod will be null
|
||||
};
|
||||
|
||||
logger.info(
|
||||
`injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.userId}] [type=${ActorType.USER}]`
|
||||
);
|
||||
} else if (req.auth.actor === ActorType.IDENTITY) {
|
||||
req.permission = {
|
||||
type: ActorType.IDENTITY,
|
||||
@ -22,6 +27,10 @@ export const injectPermission = fp(async (server) => {
|
||||
orgId: req.auth.orgId,
|
||||
authMethod: null
|
||||
};
|
||||
|
||||
logger.info(
|
||||
`injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.identityId}] [type=${ActorType.IDENTITY}]`
|
||||
);
|
||||
} else if (req.auth.actor === ActorType.SERVICE) {
|
||||
req.permission = {
|
||||
type: ActorType.SERVICE,
|
||||
@ -29,6 +38,10 @@ export const injectPermission = fp(async (server) => {
|
||||
orgId: req.auth.orgId,
|
||||
authMethod: null
|
||||
};
|
||||
|
||||
logger.info(
|
||||
`injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.serviceTokenId}] [type=${ActorType.SERVICE}]`
|
||||
);
|
||||
} else if (req.auth.actor === ActorType.SCIM_CLIENT) {
|
||||
req.permission = {
|
||||
type: ActorType.SCIM_CLIENT,
|
||||
@ -36,6 +49,10 @@ export const injectPermission = fp(async (server) => {
|
||||
orgId: req.auth.orgId,
|
||||
authMethod: null
|
||||
};
|
||||
|
||||
logger.info(
|
||||
`injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.scimTokenId}] [type=${ActorType.SCIM_CLIENT}]`
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -5,8 +5,13 @@ import { getConfig } from "@app/lib/config/env";
|
||||
export const maintenanceMode = fp(async (fastify) => {
|
||||
fastify.addHook("onRequest", async (req) => {
|
||||
const serverEnvs = getConfig();
|
||||
if (req.url !== "/api/v1/auth/checkAuth" && req.method !== "GET" && serverEnvs.MAINTENANCE_MODE) {
|
||||
throw new Error("Infisical is in maintenance mode. Please try again later.");
|
||||
if (serverEnvs.MAINTENANCE_MODE) {
|
||||
// skip if its universal auth login or renew
|
||||
if (req.url === "/api/v1/auth/universal-auth/login" && req.method === "POST") return;
|
||||
if (req.url === "/api/v1/auth/token/renew" && req.method === "POST") return;
|
||||
if (req.url !== "/api/v1/auth/checkAuth" && req.method !== "GET") {
|
||||
throw new Error("Infisical is in maintenance mode. Please try again later.");
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -82,6 +82,8 @@ import { identityAwsAuthDALFactory } from "@app/services/identity-aws-auth/ident
|
||||
import { identityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { identityGcpAuthDALFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-dal";
|
||||
import { identityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { identityKubernetesAuthDALFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-dal";
|
||||
import { identityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { identityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { identityProjectMembershipRoleDALFactory } from "@app/services/identity-project/identity-project-membership-role-dal";
|
||||
import { identityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
@ -158,7 +160,10 @@ export const registerRoutes = async (
|
||||
keyStore
|
||||
}: { db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory }
|
||||
) => {
|
||||
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.DISABLE_SECRET_SCANNING) {
|
||||
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
|
||||
}
|
||||
|
||||
// db layers
|
||||
const userDAL = userDALFactory(db);
|
||||
@ -204,6 +209,7 @@ export const registerRoutes = async (
|
||||
const identityProjectAdditionalPrivilegeDAL = identityProjectAdditionalPrivilegeDALFactory(db);
|
||||
|
||||
const identityUaDAL = identityUaDALFactory(db);
|
||||
const identityKubernetesAuthDAL = identityKubernetesAuthDALFactory(db);
|
||||
const identityUaClientSecretDAL = identityUaClientSecretDALFactory(db);
|
||||
const identityAwsAuthDAL = identityAwsAuthDALFactory(db);
|
||||
|
||||
@ -604,6 +610,7 @@ export const registerRoutes = async (
|
||||
});
|
||||
const sarService = secretApprovalRequestServiceFactory({
|
||||
permissionService,
|
||||
projectBotService,
|
||||
folderDAL,
|
||||
secretDAL,
|
||||
secretTagDAL,
|
||||
@ -708,6 +715,15 @@ export const registerRoutes = async (
|
||||
identityUaDAL,
|
||||
licenseService
|
||||
});
|
||||
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
orgBotDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
});
|
||||
const identityGcpAuthService = identityGcpAuthServiceFactory({
|
||||
identityGcpAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
@ -794,6 +810,7 @@ export const registerRoutes = async (
|
||||
identityAccessToken: identityAccessTokenService,
|
||||
identityProject: identityProjectService,
|
||||
identityUa: identityUaService,
|
||||
identityKubernetesAuth: identityKubernetesAuthService,
|
||||
identityGcpAuth: identityGcpAuthService,
|
||||
identityAwsAuth: identityAwsAuthService,
|
||||
secretApprovalPolicy: sapService,
|
||||
|
@ -8,6 +8,7 @@ import {
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { UnpackedPermissionSchema } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
|
||||
// sometimes the return data must be santizied to avoid leaking important values
|
||||
// always prefer pick over omit in zod
|
||||
@ -64,14 +65,12 @@ export const secretRawSchema = z.object({
|
||||
secretComment: z.string().optional()
|
||||
});
|
||||
|
||||
export const PermissionSchema = z.object({
|
||||
export const ProjectPermissionSchema = z.object({
|
||||
action: z
|
||||
.string()
|
||||
.min(1)
|
||||
.nativeEnum(ProjectPermissionActions)
|
||||
.describe("Describe what action an entity can take. Possible actions: create, edit, delete, and read"),
|
||||
subject: z
|
||||
.string()
|
||||
.min(1)
|
||||
.nativeEnum(ProjectPermissionSub)
|
||||
.describe("The entity this permission pertains to. Possible options: secrets, environments"),
|
||||
conditions: z
|
||||
.object({
|
||||
|
@ -20,16 +20,23 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
config: SuperAdminSchema.omit({ createdAt: true, updatedAt: true }).merge(
|
||||
z.object({ isMigrationModeOn: z.boolean() })
|
||||
)
|
||||
config: SuperAdminSchema.omit({ createdAt: true, updatedAt: true }).extend({
|
||||
isMigrationModeOn: z.boolean(),
|
||||
isSecretScanningDisabled: z.boolean()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async () => {
|
||||
const config = await getServerCfg();
|
||||
const serverEnvs = getConfig();
|
||||
return { config: { ...config, isMigrationModeOn: serverEnvs.MAINTENANCE_MODE } };
|
||||
return {
|
||||
config: {
|
||||
...config,
|
||||
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
|
||||
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -36,4 +36,29 @@ export const registerIdentityAccessTokenRouter = async (server: FastifyZodProvid
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/token/revoke",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Revoke access token",
|
||||
body: z.object({
|
||||
accessToken: z.string().trim().describe(UNIVERSAL_AUTH.REVOKE_ACCESS_TOKEN.accessToken)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.identityAccessToken.revokeAccessToken(req.body.accessToken);
|
||||
return {
|
||||
message: "Successfully revoked access token"
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
283
backend/src/server/routes/v1/identity-kubernetes-auth-router.ts
Normal file
@ -0,0 +1,283 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityKubernetesAuthsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
|
||||
const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.omit({
|
||||
encryptedCaCert: true,
|
||||
caCertIV: true,
|
||||
caCertTag: true,
|
||||
encryptedTokenReviewerJwt: true,
|
||||
tokenReviewerJwtIV: true,
|
||||
tokenReviewerJwtTag: true
|
||||
}).extend({
|
||||
caCert: z.string(),
|
||||
tokenReviewerJwt: z.string()
|
||||
});
|
||||
|
||||
export const registerIdentityKubernetesRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/kubernetes-auth/login",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Login with Kubernetes Auth",
|
||||
body: z.object({
|
||||
identityId: z.string().trim(),
|
||||
jwt: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
accessToken: z.string(),
|
||||
expiresIn: z.coerce.number(),
|
||||
accessTokenMaxTTL: z.coerce.number(),
|
||||
tokenType: z.literal("Bearer")
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { identityKubernetesAuth, accessToken, identityAccessToken, identityMembershipOrg } =
|
||||
await server.services.identityKubernetesAuth.login({
|
||||
identityId: req.body.identityId,
|
||||
jwt: req.body.jwt
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityMembershipOrg?.orgId,
|
||||
event: {
|
||||
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH,
|
||||
metadata: {
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
identityKubernetesAuthId: identityKubernetesAuth.id
|
||||
}
|
||||
}
|
||||
});
|
||||
return {
|
||||
accessToken,
|
||||
tokenType: "Bearer" as const,
|
||||
expiresIn: identityKubernetesAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/kubernetes-auth/identities/:identityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Attach Kubernetes Auth configuration onto identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
kubernetesHost: z.string().trim().min(1),
|
||||
caCert: z.string().trim().default(""),
|
||||
tokenReviewerJwt: z.string().trim().min(1),
|
||||
allowedNamespaces: z.string(), // TODO: validation
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string(),
|
||||
accessTokenTrustedIps: z
|
||||
.object({
|
||||
ipAddress: z.string().trim()
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]),
|
||||
accessTokenTTL: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.refine((value) => value !== 0, {
|
||||
message: "accessTokenTTL must have a non zero number"
|
||||
})
|
||||
.default(2592000),
|
||||
accessTokenMaxTTL: z
|
||||
.number()
|
||||
.int()
|
||||
.refine((value) => value !== 0, {
|
||||
message: "accessTokenMaxTTL must have a non zero number"
|
||||
})
|
||||
.default(2592000),
|
||||
accessTokenNumUsesLimit: z.number().int().min(0).default(0)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const identityKubernetesAuth = await server.services.identityKubernetesAuth.attachKubernetesAuth({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityKubernetesAuth.orgId,
|
||||
event: {
|
||||
type: EventType.ADD_IDENTITY_KUBERNETES_AUTH,
|
||||
metadata: {
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
kubernetesHost: identityKubernetesAuth.kubernetesHost,
|
||||
allowedNamespaces: identityKubernetesAuth.allowedNamespaces,
|
||||
allowedNames: identityKubernetesAuth.allowedNames,
|
||||
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityKubernetesAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { identityKubernetesAuth: IdentityKubernetesAuthResponseSchema.parse(identityKubernetesAuth) };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/kubernetes-auth/identities/:identityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Update Kubernetes Auth configuration on identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
kubernetesHost: z.string().trim().min(1).optional(),
|
||||
caCert: z.string().trim().optional(),
|
||||
tokenReviewerJwt: z.string().trim().min(1).optional(),
|
||||
allowedNamespaces: z.string().optional(), // TODO: validation
|
||||
allowedNames: z.string().optional(),
|
||||
allowedAudience: z.string().optional(),
|
||||
accessTokenTrustedIps: z
|
||||
.object({
|
||||
ipAddress: z.string().trim()
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
.optional(),
|
||||
accessTokenTTL: z.number().int().min(0).optional(),
|
||||
accessTokenNumUsesLimit: z.number().int().min(0).optional(),
|
||||
accessTokenMaxTTL: z
|
||||
.number()
|
||||
.int()
|
||||
.refine((value) => value !== 0, {
|
||||
message: "accessTokenMaxTTL must have a non zero number"
|
||||
})
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityKubernetesAuth: IdentityKubernetesAuthsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const identityKubernetesAuth = await server.services.identityKubernetesAuth.updateKubernetesAuth({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityKubernetesAuth.orgId,
|
||||
event: {
|
||||
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH,
|
||||
metadata: {
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
kubernetesHost: identityKubernetesAuth.kubernetesHost,
|
||||
allowedNamespaces: identityKubernetesAuth.allowedNamespaces,
|
||||
allowedNames: identityKubernetesAuth.allowedNames,
|
||||
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityKubernetesAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { identityKubernetesAuth };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/kubernetes-auth/identities/:identityId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Retrieve Kubernetes Auth configuration on identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const identityKubernetesAuth = await server.services.identityKubernetesAuth.getKubernetesAuth({
|
||||
identityId: req.params.identityId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityKubernetesAuth.orgId,
|
||||
event: {
|
||||
type: EventType.GET_IDENTITY_KUBERNETES_AUTH,
|
||||
metadata: {
|
||||
identityId: identityKubernetesAuth.identityId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { identityKubernetesAuth: IdentityKubernetesAuthResponseSchema.parse(identityKubernetesAuth) };
|
||||
}
|
||||
});
|
||||
};
|
@ -4,6 +4,7 @@ import { registerProjectBotRouter } from "./bot-router";
|
||||
import { registerIdentityAccessTokenRouter } from "./identity-access-token-router";
|
||||
import { registerIdentityAwsAuthRouter } from "./identity-aws-iam-auth-router";
|
||||
import { registerIdentityGcpAuthRouter } from "./identity-gcp-auth-router";
|
||||
import { registerIdentityKubernetesRouter } from "./identity-kubernetes-auth-router";
|
||||
import { registerIdentityRouter } from "./identity-router";
|
||||
import { registerIdentityUaRouter } from "./identity-ua";
|
||||
import { registerIntegrationAuthRouter } from "./integration-auth-router";
|
||||
@ -29,6 +30,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
async (authRouter) => {
|
||||
await authRouter.register(registerAuthRoutes);
|
||||
await authRouter.register(registerIdentityUaRouter);
|
||||
await authRouter.register(registerIdentityKubernetesRouter);
|
||||
await authRouter.register(registerIdentityGcpAuthRouter);
|
||||
await authRouter.register(registerIdentityAccessTokenRouter);
|
||||
await authRouter.register(registerIdentityAwsAuthRouter);
|
||||
|
@ -143,8 +143,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
integrationId: z.string().trim().describe(INTEGRATION.UPDATE.integrationId)
|
||||
}),
|
||||
body: z.object({
|
||||
app: z.string().trim().describe(INTEGRATION.UPDATE.app),
|
||||
appId: z.string().trim().describe(INTEGRATION.UPDATE.appId),
|
||||
app: z.string().trim().optional().describe(INTEGRATION.UPDATE.app),
|
||||
appId: z.string().trim().optional().describe(INTEGRATION.UPDATE.appId),
|
||||
isActive: z.boolean().describe(INTEGRATION.UPDATE.isActive),
|
||||
secretPath: z
|
||||
.string()
|
||||
@ -154,7 +154,33 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(INTEGRATION.UPDATE.secretPath),
|
||||
targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment),
|
||||
owner: z.string().trim().describe(INTEGRATION.UPDATE.owner),
|
||||
environment: z.string().trim().describe(INTEGRATION.UPDATE.environment)
|
||||
environment: z.string().trim().describe(INTEGRATION.UPDATE.environment),
|
||||
metadata: z
|
||||
.object({
|
||||
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
|
||||
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
|
||||
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
|
||||
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
|
||||
secretGCPLabel: z
|
||||
.object({
|
||||
labelName: z.string(),
|
||||
labelValue: z.string()
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
|
||||
secretAWSTag: z
|
||||
.array(
|
||||
z.object({
|
||||
key: z.string(),
|
||||
value: z.string()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
|
||||
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
|
||||
})
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -236,5 +262,64 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
// TODO(akhilmhdh-pg): manual sync
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:integrationId/sync",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Manually trigger sync of an integration by integration id",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
integrationId: z.string().trim().describe(INTEGRATION.SYNC.integrationId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
integration: IntegrationsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const integration = await server.services.integration.syncIntegration({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: integration.projectId,
|
||||
event: {
|
||||
type: EventType.MANUAL_SYNC_INTEGRATION,
|
||||
// eslint-disable-next-line
|
||||
metadata: shake({
|
||||
integrationId: integration.id,
|
||||
integration: integration.integration,
|
||||
environment: integration.environment.slug,
|
||||
secretPath: integration.secretPath,
|
||||
url: integration.url,
|
||||
app: integration.app,
|
||||
appId: integration.appId,
|
||||
targetEnvironment: integration.targetEnvironment,
|
||||
targetEnvironmentId: integration.targetEnvironmentId,
|
||||
targetService: integration.targetService,
|
||||
targetServiceId: integration.targetServiceId,
|
||||
path: integration.path,
|
||||
region: integration.region
|
||||
// eslint-disable-next-line
|
||||
}) as any
|
||||
}
|
||||
});
|
||||
|
||||
return { integration };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -9,7 +9,7 @@ import {
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { PROJECT_USERS } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -30,7 +30,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim().describe(PROJECTS.GET_USER_MEMBERSHIPS.workspaceId)
|
||||
workspaceId: z.string().trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIPS.workspaceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -74,6 +74,66 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:workspaceId/memberships/details",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Return project user memberships",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().min(1).trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIP.workspaceId)
|
||||
}),
|
||||
body: z.object({
|
||||
username: z.string().min(1).trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIP.username)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
membership: ProjectMembershipsSchema.extend({
|
||||
user: UsersSchema.pick({
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
id: true
|
||||
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
|
||||
roles: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
role: z.string(),
|
||||
customRoleId: z.string().optional().nullable(),
|
||||
customRoleName: z.string().optional().nullable(),
|
||||
customRoleSlug: z.string().optional().nullable(),
|
||||
isTemporary: z.boolean(),
|
||||
temporaryMode: z.string().optional().nullable(),
|
||||
temporaryRange: z.string().nullable().optional(),
|
||||
temporaryAccessStartTime: z.date().nullable().optional(),
|
||||
temporaryAccessEndTime: z.date().nullable().optional()
|
||||
})
|
||||
)
|
||||
}).omit({ createdAt: true, updatedAt: true })
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const membership = await server.services.projectMembership.getProjectMembershipByUsername({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId: req.params.workspaceId,
|
||||
username: req.body.username
|
||||
});
|
||||
return { membership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:workspaceId/memberships",
|
||||
@ -142,8 +202,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim().describe(PROJECTS.UPDATE_USER_MEMBERSHIP.workspaceId),
|
||||
membershipId: z.string().trim().describe(PROJECTS.UPDATE_USER_MEMBERSHIP.membershipId)
|
||||
workspaceId: z.string().trim().describe(PROJECT_USERS.UPDATE_USER_MEMBERSHIP.workspaceId),
|
||||
membershipId: z.string().trim().describe(PROJECT_USERS.UPDATE_USER_MEMBERSHIP.membershipId)
|
||||
}),
|
||||
body: z.object({
|
||||
roles: z
|
||||
@ -164,7 +224,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
)
|
||||
.min(1)
|
||||
.refine((data) => data.some(({ isTemporary }) => !isTemporary), "At least one long lived role is required")
|
||||
.describe(PROJECTS.UPDATE_USER_MEMBERSHIP.roles)
|
||||
.describe(PROJECT_USERS.UPDATE_USER_MEMBERSHIP.roles)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -7,7 +7,8 @@ import {
|
||||
ProjectMembershipRole,
|
||||
ProjectUserMembershipRolesSchema
|
||||
} from "@app/db/schemas";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { PROJECT_IDENTITIES } from "@app/lib/api-docs";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -22,12 +23,48 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Create project identity membership",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim(),
|
||||
identityId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
role: z.string().trim().min(1).default(ProjectMembershipRole.NoAccess)
|
||||
// @depreciated
|
||||
role: z.string().trim().optional().default(ProjectMembershipRole.NoAccess),
|
||||
roles: z
|
||||
.array(
|
||||
z.union([
|
||||
z.object({
|
||||
role: z.string().describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
|
||||
isTemporary: z
|
||||
.literal(false)
|
||||
.default(false)
|
||||
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role)
|
||||
}),
|
||||
z.object({
|
||||
role: z.string().describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
|
||||
isTemporary: z.literal(true).describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
|
||||
temporaryMode: z
|
||||
.nativeEnum(ProjectUserMembershipTemporaryMode)
|
||||
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
|
||||
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
|
||||
temporaryAccessStartTime: z
|
||||
.string()
|
||||
.datetime()
|
||||
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role)
|
||||
})
|
||||
])
|
||||
)
|
||||
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.description)
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -36,6 +73,9 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { role, roles } = req.body;
|
||||
if (!role && !roles) throw new BadRequestError({ message: "You must provide either role or roles field" });
|
||||
|
||||
const identityMembership = await server.services.identityProject.createProjectIdentity({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
@ -43,7 +83,7 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
actorOrgId: req.permission.orgId,
|
||||
identityId: req.params.identityId,
|
||||
projectId: req.params.projectId,
|
||||
role: req.body.role
|
||||
roles: roles || [{ role }]
|
||||
});
|
||||
return { identityMembership };
|
||||
}
|
||||
@ -64,28 +104,39 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECTS.UPDATE_IDENTITY_MEMBERSHIP.projectId),
|
||||
identityId: z.string().trim().describe(PROJECTS.UPDATE_IDENTITY_MEMBERSHIP.identityId)
|
||||
projectId: z.string().trim().describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.projectId),
|
||||
identityId: z.string().trim().describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.identityId)
|
||||
}),
|
||||
body: z.object({
|
||||
roles: z
|
||||
.array(
|
||||
z.union([
|
||||
z.object({
|
||||
role: z.string(),
|
||||
isTemporary: z.literal(false).default(false)
|
||||
role: z.string().describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.role),
|
||||
isTemporary: z
|
||||
.literal(false)
|
||||
.default(false)
|
||||
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.isTemporary)
|
||||
}),
|
||||
z.object({
|
||||
role: z.string(),
|
||||
isTemporary: z.literal(true),
|
||||
temporaryMode: z.nativeEnum(ProjectUserMembershipTemporaryMode),
|
||||
temporaryRange: z.string().refine((val) => ms(val) > 0, "Temporary range must be a positive number"),
|
||||
temporaryAccessStartTime: z.string().datetime()
|
||||
role: z.string().describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.role),
|
||||
isTemporary: z.literal(true).describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.isTemporary),
|
||||
temporaryMode: z
|
||||
.nativeEnum(ProjectUserMembershipTemporaryMode)
|
||||
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.temporaryMode),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
|
||||
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.temporaryRange),
|
||||
temporaryAccessStartTime: z
|
||||
.string()
|
||||
.datetime()
|
||||
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.temporaryAccessStartTime)
|
||||
})
|
||||
])
|
||||
)
|
||||
.min(1)
|
||||
.describe(PROJECTS.UPDATE_IDENTITY_MEMBERSHIP.roles)
|
||||
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.description)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -122,8 +173,8 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECTS.DELETE_IDENTITY_MEMBERSHIP.projectId),
|
||||
identityId: z.string().trim().describe(PROJECTS.DELETE_IDENTITY_MEMBERSHIP.identityId)
|
||||
projectId: z.string().trim().describe(PROJECT_IDENTITIES.DELETE_IDENTITY_MEMBERSHIP.projectId),
|
||||
identityId: z.string().trim().describe(PROJECT_IDENTITIES.DELETE_IDENTITY_MEMBERSHIP.identityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -159,7 +210,7 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECTS.LIST_IDENTITY_MEMBERSHIPS.projectId)
|
||||
projectId: z.string().trim().describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -200,4 +251,61 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
return { identityMemberships };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/identity-memberships/:identityId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Return project identity membership",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(PROJECT_IDENTITIES.GET_IDENTITY_MEMBERSHIP_BY_ID.projectId),
|
||||
identityId: z.string().trim().describe(PROJECT_IDENTITIES.GET_IDENTITY_MEMBERSHIP_BY_ID.identityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityMembership: z.object({
|
||||
id: z.string(),
|
||||
identityId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
roles: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
role: z.string(),
|
||||
customRoleId: z.string().optional().nullable(),
|
||||
customRoleName: z.string().optional().nullable(),
|
||||
customRoleSlug: z.string().optional().nullable(),
|
||||
isTemporary: z.boolean(),
|
||||
temporaryMode: z.string().optional().nullable(),
|
||||
temporaryRange: z.string().nullable().optional(),
|
||||
temporaryAccessStartTime: z.date().nullable().optional(),
|
||||
temporaryAccessEndTime: z.date().nullable().optional()
|
||||
})
|
||||
),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const identityMembership = await server.services.identityProject.getProjectIdentityByIdentityId({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId: req.params.projectId,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
return { identityMembership };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -2,7 +2,7 @@ import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { PROJECT_USERS } from "@app/lib/api-docs";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -22,11 +22,11 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().describe(PROJECTS.INVITE_MEMBER.projectId)
|
||||
projectId: z.string().describe(PROJECT_USERS.INVITE_MEMBER.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
emails: z.string().email().array().default([]).describe(PROJECTS.INVITE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECTS.INVITE_MEMBER.usernames)
|
||||
emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -77,11 +77,11 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().describe(PROJECTS.REMOVE_MEMBER.projectId)
|
||||
projectId: z.string().describe(PROJECT_USERS.REMOVE_MEMBER.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
emails: z.string().email().array().default([]).describe(PROJECTS.REMOVE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECTS.REMOVE_MEMBER.usernames)
|
||||
emails: z.string().email().array().default([]).describe(PROJECT_USERS.REMOVE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECT_USERS.REMOVE_MEMBER.usernames)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -1926,4 +1926,41 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
return { secrets };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/backfill-secret-references",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Backfill secret references",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
projectId: z.string().trim().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { projectId } = req.body;
|
||||
const message = await server.services.secret.backfillSecretReferences({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId
|
||||
});
|
||||
|
||||
return message;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
@ -15,23 +15,56 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
const doc = await (tx || db)(TableName.IdentityAccessToken)
|
||||
.where(filter)
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.leftJoin(
|
||||
TableName.IdentityUaClientSecret,
|
||||
`${TableName.IdentityAccessToken}.identityUAClientSecretId`,
|
||||
`${TableName.IdentityUaClientSecret}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.IdentityUniversalAuth,
|
||||
`${TableName.IdentityUaClientSecret}.identityUAId`,
|
||||
`${TableName.IdentityUniversalAuth}.id`
|
||||
)
|
||||
.leftJoin(TableName.IdentityUaClientSecret, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.Univeral])).andOn(
|
||||
`${TableName.IdentityAccessToken}.identityUAClientSecretId`,
|
||||
`${TableName.IdentityUaClientSecret}.id`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityUniversalAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.Univeral])).andOn(
|
||||
`${TableName.IdentityUaClientSecret}.identityUAId`,
|
||||
`${TableName.IdentityUniversalAuth}.id`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityGcpAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.GCP_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityGcpAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityAwsAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.AWS_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityAwsAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityKubernetesAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.KUBERNETES_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.IdentityAccessToken))
|
||||
.select(
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityUniversalAuth),
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityUniversalAuth).as("accessTokenTrustedIpsUa"),
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityGcpAuth).as("accessTokenTrustedIpsGcp"),
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityAwsAuth).as("accessTokenTrustedIpsAws"),
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityKubernetesAuth).as("accessTokenTrustedIpsK8s"),
|
||||
db.ref("name").withSchema(TableName.Identity)
|
||||
)
|
||||
.first();
|
||||
return doc;
|
||||
|
||||
if (!doc) return;
|
||||
|
||||
return {
|
||||
...doc,
|
||||
accessTokenTrustedIps:
|
||||
doc.accessTokenTrustedIpsUa ||
|
||||
doc.accessTokenTrustedIpsGcp ||
|
||||
doc.accessTokenTrustedIpsAws ||
|
||||
doc.accessTokenTrustedIpsK8s
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "IdAccessTokenFindOne" });
|
||||
}
|
||||
|
@ -106,6 +106,24 @@ export const identityAccessTokenServiceFactory = ({
|
||||
return { accessToken, identityAccessToken: updatedIdentityAccessToken };
|
||||
};
|
||||
|
||||
const revokeAccessToken = async (accessToken: string) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const decodedToken = jwt.verify(accessToken, appCfg.AUTH_SECRET) as JwtPayload & {
|
||||
identityAccessTokenId: string;
|
||||
};
|
||||
if (decodedToken.authTokenType !== AuthTokenType.IDENTITY_ACCESS_TOKEN) throw new UnauthorizedError();
|
||||
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: decodedToken.identityAccessTokenId,
|
||||
isAccessTokenRevoked: false
|
||||
});
|
||||
if (!identityAccessToken) throw new UnauthorizedError();
|
||||
|
||||
const revokedToken = await identityAccessTokenDAL.deleteById(identityAccessToken.id);
|
||||
return { revokedToken };
|
||||
};
|
||||
|
||||
const fnValidateIdentityAccessToken = async (token: TIdentityAccessTokenJwtPayload, ipAddress?: string) => {
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: token.identityAccessTokenId,
|
||||
@ -132,5 +150,5 @@ export const identityAccessTokenServiceFactory = ({
|
||||
return { ...identityAccessToken, orgId: identityOrgMembership.orgId };
|
||||
};
|
||||
|
||||
return { renewAccessToken, fnValidateIdentityAccessToken };
|
||||
return { renewAccessToken, revokeAccessToken, fnValidateIdentityAccessToken };
|
||||
};
|
||||
|
@ -0,0 +1,10 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TIdentityKubernetesAuthDALFactory = ReturnType<typeof identityKubernetesAuthDALFactory>;
|
||||
|
||||
export const identityKubernetesAuthDALFactory = (db: TDbClient) => {
|
||||
const kubernetesAuthOrm = ormify(db, TableName.IdentityKubernetesAuth);
|
||||
return kubernetesAuthOrm;
|
||||
};
|
@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Extracts the K8s service account name and namespace
|
||||
* from the username in this format: system:serviceaccount:default:infisical-auth
|
||||
*/
|
||||
export const extractK8sUsername = (username: string) => {
|
||||
const parts = username.split(":");
|
||||
// Ensure that the username format is correct
|
||||
if (parts.length === 4 && parts[0] === "system" && parts[1] === "serviceaccount") {
|
||||
return {
|
||||
namespace: parts[2],
|
||||
name: parts[3]
|
||||
};
|
||||
}
|
||||
throw new Error("Invalid username format");
|
||||
};
|
@ -0,0 +1,515 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import axios from "axios";
|
||||
import https from "https";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { IdentityAuthMethod, SecretKeyEncoding, TIdentityKubernetesAuthsUpdate } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
|
||||
import { AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityDALFactory } from "../identity/identity-dal";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
import { TIdentityKubernetesAuthDALFactory } from "./identity-kubernetes-auth-dal";
|
||||
import { extractK8sUsername } from "./identity-kubernetes-auth-fns";
|
||||
import {
|
||||
TAttachKubernetesAuthDTO,
|
||||
TCreateTokenReviewResponse,
|
||||
TGetKubernetesAuthDTO,
|
||||
TLoginKubernetesAuthDTO,
|
||||
TUpdateKubernetesAuthDTO
|
||||
} from "./identity-kubernetes-auth-types";
|
||||
|
||||
type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
identityKubernetesAuthDAL: Pick<
|
||||
TIdentityKubernetesAuthDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById"
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TIdentityKubernetesAuthServiceFactory = ReturnType<typeof identityKubernetesAuthServiceFactory>;
|
||||
|
||||
export const identityKubernetesAuthServiceFactory = ({
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
orgBotDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TIdentityKubernetesAuthServiceFactoryDep) => {
|
||||
const login = async ({ identityId, jwt: serviceAccountJwt }: TLoginKubernetesAuthDTO) => {
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId });
|
||||
if (!identityKubernetesAuth) throw new UnauthorizedError();
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({
|
||||
identityId: identityKubernetesAuth.identityId
|
||||
});
|
||||
if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" });
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { encryptedCaCert, caCertIV, caCertTag, encryptedTokenReviewerJwt, tokenReviewerJwtIV, tokenReviewerJwtTag } =
|
||||
identityKubernetesAuth;
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCaCert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCaCert,
|
||||
iv: caCertIV,
|
||||
tag: caCertTag,
|
||||
key
|
||||
});
|
||||
}
|
||||
|
||||
let tokenReviewerJwt = "";
|
||||
if (encryptedTokenReviewerJwt && tokenReviewerJwtIV && tokenReviewerJwtTag) {
|
||||
tokenReviewerJwt = decryptSymmetric({
|
||||
ciphertext: encryptedTokenReviewerJwt,
|
||||
iv: tokenReviewerJwtIV,
|
||||
tag: tokenReviewerJwtTag,
|
||||
key
|
||||
});
|
||||
}
|
||||
|
||||
const { data }: { data: TCreateTokenReviewResponse } = await axios.post(
|
||||
`${identityKubernetesAuth.kubernetesHost}/apis/authentication.k8s.io/v1/tokenreviews`,
|
||||
{
|
||||
apiVersion: "authentication.k8s.io/v1",
|
||||
kind: "TokenReview",
|
||||
spec: {
|
||||
token: serviceAccountJwt
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${tokenReviewerJwt}`
|
||||
},
|
||||
httpsAgent: new https.Agent({
|
||||
ca: caCert,
|
||||
rejectUnauthorized: !!caCert
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
if ("error" in data.status) throw new UnauthorizedError({ message: data.status.error });
|
||||
|
||||
// check the response to determine if the token is valid
|
||||
if (!(data.status && data.status.authenticated)) throw new UnauthorizedError();
|
||||
|
||||
const { namespace: targetNamespace, name: targetName } = extractK8sUsername(data.status.user.username);
|
||||
|
||||
if (identityKubernetesAuth.allowedNamespaces) {
|
||||
// validate if [targetNamespace] is in the list of allowed namespaces
|
||||
|
||||
const isNamespaceAllowed = identityKubernetesAuth.allowedNamespaces
|
||||
.split(",")
|
||||
.map((namespace) => namespace.trim())
|
||||
.some((namespace) => namespace === targetNamespace);
|
||||
|
||||
if (!isNamespaceAllowed) throw new UnauthorizedError();
|
||||
}
|
||||
|
||||
if (identityKubernetesAuth.allowedNames) {
|
||||
// validate if [targetName] is in the list of allowed names
|
||||
|
||||
const isNameAllowed = identityKubernetesAuth.allowedNames
|
||||
.split(",")
|
||||
.map((name) => name.trim())
|
||||
.some((name) => name === targetName);
|
||||
|
||||
if (!isNameAllowed) throw new UnauthorizedError();
|
||||
}
|
||||
|
||||
if (identityKubernetesAuth.allowedAudience) {
|
||||
// validate if [audience] is in the list of allowed audiences
|
||||
const isAudienceAllowed = data.status.audiences.some(
|
||||
(audience) => audience === identityKubernetesAuth.allowedAudience
|
||||
);
|
||||
|
||||
if (!isAudienceAllowed) throw new UnauthorizedError();
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
isAccessTokenRevoked: false,
|
||||
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit
|
||||
},
|
||||
tx
|
||||
);
|
||||
return newToken;
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
const accessToken = jwt.sign(
|
||||
{
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
|
||||
} as TIdentityAccessTokenJwtPayload,
|
||||
appCfg.AUTH_SECRET,
|
||||
{
|
||||
expiresIn:
|
||||
Number(identityAccessToken.accessTokenMaxTTL) === 0
|
||||
? undefined
|
||||
: Number(identityAccessToken.accessTokenMaxTTL)
|
||||
}
|
||||
);
|
||||
|
||||
return { accessToken, identityKubernetesAuth, identityAccessToken, identityMembershipOrg };
|
||||
};
|
||||
|
||||
const attachKubernetesAuth = async ({
|
||||
identityId,
|
||||
kubernetesHost,
|
||||
caCert,
|
||||
tokenReviewerJwt,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
accessTokenTTL,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TAttachKubernetesAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" });
|
||||
if (identityMembershipOrg.identity.authMethod)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to add Kubernetes Auth to already configured identity"
|
||||
});
|
||||
|
||||
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
|
||||
|
||||
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
|
||||
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
|
||||
if (
|
||||
!plan.ipAllowlisting &&
|
||||
accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" &&
|
||||
accessTokenTrustedIp.ipAddress !== "::/0"
|
||||
)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range."
|
||||
});
|
||||
if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress))
|
||||
throw new BadRequestError({
|
||||
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
|
||||
});
|
||||
return extractIPDetails(accessTokenTrustedIp.ipAddress);
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: identityMembershipOrg.orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedCaCert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
const {
|
||||
ciphertext: encryptedTokenReviewerJwt,
|
||||
iv: tokenReviewerJwtIV,
|
||||
tag: tokenReviewerJwtTag
|
||||
} = encryptSymmetric(tokenReviewerJwt, key);
|
||||
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const doc = await identityKubernetesAuthDAL.create(
|
||||
{
|
||||
identityId: identityMembershipOrg.identityId,
|
||||
kubernetesHost,
|
||||
encryptedCaCert,
|
||||
caCertIV,
|
||||
caCertTag,
|
||||
encryptedTokenReviewerJwt,
|
||||
tokenReviewerJwtIV,
|
||||
tokenReviewerJwtTag,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps)
|
||||
},
|
||||
tx
|
||||
);
|
||||
await identityDAL.updateById(
|
||||
identityMembershipOrg.identityId,
|
||||
{
|
||||
authMethod: IdentityAuthMethod.KUBERNETES_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
return doc;
|
||||
});
|
||||
|
||||
return { ...identityKubernetesAuth, caCert, tokenReviewerJwt, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
|
||||
const updateKubernetesAuth = async ({
|
||||
identityId,
|
||||
kubernetesHost,
|
||||
caCert,
|
||||
tokenReviewerJwt,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
accessTokenTTL,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TUpdateKubernetesAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.KUBERNETES_AUTH)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update Kubernetes Auth"
|
||||
});
|
||||
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId });
|
||||
|
||||
if (
|
||||
(accessTokenMaxTTL || identityKubernetesAuth.accessTokenMaxTTL) > 0 &&
|
||||
(accessTokenTTL || identityKubernetesAuth.accessTokenMaxTTL) >
|
||||
(accessTokenMaxTTL || identityKubernetesAuth.accessTokenMaxTTL)
|
||||
) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
|
||||
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => {
|
||||
if (
|
||||
!plan.ipAllowlisting &&
|
||||
accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" &&
|
||||
accessTokenTrustedIp.ipAddress !== "::/0"
|
||||
)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range."
|
||||
});
|
||||
if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress))
|
||||
throw new BadRequestError({
|
||||
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
|
||||
});
|
||||
return extractIPDetails(accessTokenTrustedIp.ipAddress);
|
||||
});
|
||||
|
||||
const updateQuery: TIdentityKubernetesAuthsUpdate = {
|
||||
kubernetesHost,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps: reformattedAccessTokenTrustedIps
|
||||
? JSON.stringify(reformattedAccessTokenTrustedIps)
|
||||
: undefined
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
if (caCert !== undefined) {
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
updateQuery.encryptedCaCert = encryptedCACert;
|
||||
updateQuery.caCertIV = caCertIV;
|
||||
updateQuery.caCertTag = caCertTag;
|
||||
}
|
||||
|
||||
if (tokenReviewerJwt !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedTokenReviewerJwt,
|
||||
iv: tokenReviewerJwtIV,
|
||||
tag: tokenReviewerJwtTag
|
||||
} = encryptSymmetric(tokenReviewerJwt, key);
|
||||
updateQuery.encryptedTokenReviewerJwt = encryptedTokenReviewerJwt;
|
||||
updateQuery.tokenReviewerJwtIV = tokenReviewerJwtIV;
|
||||
updateQuery.tokenReviewerJwtTag = tokenReviewerJwtTag;
|
||||
}
|
||||
|
||||
const updatedKubernetesAuth = await identityKubernetesAuthDAL.updateById(identityKubernetesAuth.id, updateQuery);
|
||||
|
||||
return { ...updatedKubernetesAuth, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
|
||||
const getKubernetesAuth = async ({
|
||||
identityId,
|
||||
actorId,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TGetKubernetesAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.KUBERNETES_AUTH)
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have Kubernetes Auth attached"
|
||||
});
|
||||
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { encryptedCaCert, caCertIV, caCertTag, encryptedTokenReviewerJwt, tokenReviewerJwtIV, tokenReviewerJwtTag } =
|
||||
identityKubernetesAuth;
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCaCert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCaCert,
|
||||
iv: caCertIV,
|
||||
tag: caCertTag,
|
||||
key
|
||||
});
|
||||
}
|
||||
|
||||
let tokenReviewerJwt = "";
|
||||
if (encryptedTokenReviewerJwt && tokenReviewerJwtIV && tokenReviewerJwtTag) {
|
||||
tokenReviewerJwt = decryptSymmetric({
|
||||
ciphertext: encryptedTokenReviewerJwt,
|
||||
iv: tokenReviewerJwtIV,
|
||||
tag: tokenReviewerJwtTag,
|
||||
key
|
||||
});
|
||||
}
|
||||
|
||||
return { ...identityKubernetesAuth, caCert, tokenReviewerJwt, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
|
||||
return {
|
||||
login,
|
||||
attachKubernetesAuth,
|
||||
updateKubernetesAuth,
|
||||
getKubernetesAuth
|
||||
};
|
||||
};
|
@ -0,0 +1,61 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TLoginKubernetesAuthDTO = {
|
||||
identityId: string;
|
||||
jwt: string;
|
||||
};
|
||||
|
||||
export type TAttachKubernetesAuthDTO = {
|
||||
identityId: string;
|
||||
kubernetesHost: string;
|
||||
caCert: string;
|
||||
tokenReviewerJwt: string;
|
||||
allowedNamespaces: string;
|
||||
allowedNames: string;
|
||||
allowedAudience: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
accessTokenTrustedIps: { ipAddress: string }[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateKubernetesAuthDTO = {
|
||||
identityId: string;
|
||||
kubernetesHost?: string;
|
||||
caCert?: string;
|
||||
tokenReviewerJwt?: string;
|
||||
allowedNamespaces?: string;
|
||||
allowedNames?: string;
|
||||
allowedAudience?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
accessTokenTrustedIps?: { ipAddress: string }[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetKubernetesAuthDTO = {
|
||||
identityId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
type TCreateTokenReviewSuccessResponse = {
|
||||
authenticated: true;
|
||||
user: {
|
||||
username: string;
|
||||
uid: string;
|
||||
groups: string[];
|
||||
};
|
||||
audiences: string[];
|
||||
};
|
||||
|
||||
type TCreateTokenReviewErrorResponse = {
|
||||
error: string;
|
||||
};
|
||||
|
||||
export type TCreateTokenReviewResponse = {
|
||||
apiVersion: "authentication.k8s.io/v1";
|
||||
kind: "TokenReview";
|
||||
spec: {
|
||||
token: string;
|
||||
};
|
||||
status: TCreateTokenReviewSuccessResponse | TCreateTokenReviewErrorResponse;
|
||||
};
|
@ -10,11 +10,16 @@ export type TIdentityProjectDALFactory = ReturnType<typeof identityProjectDALFac
|
||||
export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
const identityProjectOrm = ormify(db, TableName.IdentityProjectMembership);
|
||||
|
||||
const findByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
const findByProjectId = async (projectId: string, filter: { identityId?: string } = {}, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.IdentityProjectMembership)
|
||||
.where(`${TableName.IdentityProjectMembership}.projectId`, projectId)
|
||||
.join(TableName.Identity, `${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`)
|
||||
.where((qb) => {
|
||||
if (filter.identityId) {
|
||||
void qb.where("identityId", filter.identityId);
|
||||
}
|
||||
})
|
||||
.join(
|
||||
TableName.IdentityProjectMembershipRole,
|
||||
`${TableName.IdentityProjectMembershipRole}.projectMembershipId`,
|
||||
|
@ -18,6 +18,7 @@ import { TIdentityProjectMembershipRoleDALFactory } from "./identity-project-mem
|
||||
import {
|
||||
TCreateProjectIdentityDTO,
|
||||
TDeleteProjectIdentityDTO,
|
||||
TGetProjectIdentityByIdentityIdDTO,
|
||||
TListProjectIdentityDTO,
|
||||
TUpdateProjectIdentityDTO
|
||||
} from "./identity-project-types";
|
||||
@ -51,7 +52,7 @@ export const identityProjectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
role
|
||||
roles
|
||||
}: TCreateProjectIdentityDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -78,17 +79,33 @@ export const identityProjectServiceFactory = ({
|
||||
message: `Failed to find identity with id ${identityId}`
|
||||
});
|
||||
|
||||
const { permission: rolePermission, role: customRole } = await permissionService.getProjectPermissionByRole(
|
||||
role,
|
||||
project.id
|
||||
);
|
||||
const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasPriviledge)
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Failed to add identity to project with more privileged role"
|
||||
});
|
||||
const isCustomRole = Boolean(customRole);
|
||||
for await (const { role: requestedRoleChange } of roles) {
|
||||
const { permission: rolePermission } = await permissionService.getProjectPermissionByRole(
|
||||
requestedRoleChange,
|
||||
projectId
|
||||
);
|
||||
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
|
||||
if (!hasRequiredPriviledges) {
|
||||
throw new ForbiddenRequestError({ message: "Failed to change to a more privileged role" });
|
||||
}
|
||||
}
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
||||
);
|
||||
const hasCustomRole = Boolean(customInputRoles.length);
|
||||
const customRoles = hasCustomRole
|
||||
? await projectRoleDAL.find({
|
||||
projectId,
|
||||
$in: { slug: customInputRoles.map(({ role }) => role) }
|
||||
})
|
||||
: [];
|
||||
if (customRoles.length !== customInputRoles.length) throw new BadRequestError({ message: "Custom role not found" });
|
||||
|
||||
const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug);
|
||||
const projectIdentity = await identityProjectDAL.transaction(async (tx) => {
|
||||
const identityProjectMembership = await identityProjectDAL.create(
|
||||
{
|
||||
@ -97,16 +114,32 @@ export const identityProjectServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
const sanitizedProjectMembershipRoles = roles.map((inputRole) => {
|
||||
const isCustomRole = Boolean(customRolesGroupBySlug?.[inputRole.role]?.[0]);
|
||||
if (!inputRole.isTemporary) {
|
||||
return {
|
||||
projectMembershipId: identityProjectMembership.id,
|
||||
role: isCustomRole ? ProjectMembershipRole.Custom : inputRole.role,
|
||||
customRoleId: customRolesGroupBySlug[inputRole.role] ? customRolesGroupBySlug[inputRole.role][0].id : null
|
||||
};
|
||||
}
|
||||
|
||||
await identityProjectMembershipRoleDAL.create(
|
||||
{
|
||||
// check cron or relative here later for now its just relative
|
||||
const relativeTimeInMs = ms(inputRole.temporaryRange);
|
||||
return {
|
||||
projectMembershipId: identityProjectMembership.id,
|
||||
role: isCustomRole ? ProjectMembershipRole.Custom : role,
|
||||
customRoleId: customRole?.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
return identityProjectMembership;
|
||||
role: isCustomRole ? ProjectMembershipRole.Custom : inputRole.role,
|
||||
customRoleId: customRolesGroupBySlug[inputRole.role] ? customRolesGroupBySlug[inputRole.role][0].id : null,
|
||||
isTemporary: true,
|
||||
temporaryMode: ProjectUserMembershipTemporaryMode.Relative,
|
||||
temporaryRange: inputRole.temporaryRange,
|
||||
temporaryAccessStartTime: new Date(inputRole.temporaryAccessStartTime),
|
||||
temporaryAccessEndTime: new Date(new Date(inputRole.temporaryAccessStartTime).getTime() + relativeTimeInMs)
|
||||
};
|
||||
});
|
||||
|
||||
const identityRoles = await identityProjectMembershipRoleDAL.insertMany(sanitizedProjectMembershipRoles, tx);
|
||||
return { ...identityProjectMembership, roles: identityRoles };
|
||||
});
|
||||
return projectIdentity;
|
||||
};
|
||||
@ -135,16 +168,18 @@ export const identityProjectServiceFactory = ({
|
||||
message: `Identity with id ${identityId} doesn't exists in project with id ${projectId}`
|
||||
});
|
||||
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
projectIdentity.identityId,
|
||||
projectIdentity.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to delete more privileged identity" });
|
||||
for await (const { role: requestedRoleChange } of roles) {
|
||||
const { permission: rolePermission } = await permissionService.getProjectPermissionByRole(
|
||||
requestedRoleChange,
|
||||
projectId
|
||||
);
|
||||
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
|
||||
if (!hasRequiredPriviledges) {
|
||||
throw new ForbiddenRequestError({ message: "Failed to change to a more privileged role" });
|
||||
}
|
||||
}
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
@ -248,10 +283,33 @@ export const identityProjectServiceFactory = ({
|
||||
return identityMemberships;
|
||||
};
|
||||
|
||||
const getProjectIdentityByIdentityId = async ({
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
identityId
|
||||
}: TGetProjectIdentityByIdentityIdDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
|
||||
const [identityMembership] = await identityProjectDAL.findByProjectId(projectId, { identityId });
|
||||
if (!identityMembership) throw new BadRequestError({ message: `Membership not found for identity ${identityId}` });
|
||||
return identityMembership;
|
||||
};
|
||||
|
||||
return {
|
||||
createProjectIdentity,
|
||||
updateProjectIdentity,
|
||||
deleteProjectIdentity,
|
||||
listProjectIdentities
|
||||
listProjectIdentities,
|
||||
getProjectIdentityByIdentityId
|
||||
};
|
||||
};
|
||||
|
@ -4,7 +4,19 @@ import { ProjectUserMembershipTemporaryMode } from "../project-membership/projec
|
||||
|
||||
export type TCreateProjectIdentityDTO = {
|
||||
identityId: string;
|
||||
role: string;
|
||||
roles: (
|
||||
| {
|
||||
role: string;
|
||||
isTemporary?: false;
|
||||
}
|
||||
| {
|
||||
role: string;
|
||||
isTemporary: true;
|
||||
temporaryMode: ProjectUserMembershipTemporaryMode.Relative;
|
||||
temporaryRange: string;
|
||||
temporaryAccessStartTime: string;
|
||||
}
|
||||
)[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateProjectIdentityDTO = {
|
||||
@ -29,3 +41,7 @@ export type TDeleteProjectIdentityDTO = {
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TListProjectIdentityDTO = TProjectPermission;
|
||||
|
||||
export type TGetProjectIdentityByIdentityIdDTO = {
|
||||
identityId: string;
|
||||
} & TProjectPermission;
|
||||
|
@ -9,9 +9,12 @@
|
||||
|
||||
import {
|
||||
CreateSecretCommand,
|
||||
DescribeSecretCommand,
|
||||
GetSecretValueCommand,
|
||||
ResourceNotFoundException,
|
||||
SecretsManagerClient,
|
||||
TagResourceCommand,
|
||||
UntagResourceCommand,
|
||||
UpdateSecretCommand
|
||||
} from "@aws-sdk/client-secrets-manager";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
@ -459,27 +462,39 @@ const syncSecretsAWSParameterStore = async ({
|
||||
ssm.config.update(config);
|
||||
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
const awsParameterStoreSecretsObj: Record<string, AWS.SSM.Parameter> = {};
|
||||
|
||||
const params = {
|
||||
Path: integration.path as string,
|
||||
Recursive: false,
|
||||
WithDecryption: true
|
||||
};
|
||||
// now fetch all aws parameter store secrets
|
||||
let hasNext = true;
|
||||
let nextToken: string | undefined;
|
||||
while (hasNext) {
|
||||
const parameters = await ssm
|
||||
.getParametersByPath({
|
||||
Path: integration.path as string,
|
||||
Recursive: false,
|
||||
WithDecryption: true,
|
||||
MaxResults: 10,
|
||||
NextToken: nextToken
|
||||
})
|
||||
.promise();
|
||||
|
||||
const parameterList = (await ssm.getParametersByPath(params).promise()).Parameters;
|
||||
if (parameters.Parameters) {
|
||||
parameters.Parameters.forEach((parameter) => {
|
||||
if (parameter.Name) {
|
||||
const secKey = parameter.Name.substring((integration.path as string).length);
|
||||
awsParameterStoreSecretsObj[secKey] = parameter;
|
||||
}
|
||||
});
|
||||
}
|
||||
hasNext = Boolean(parameters.NextToken);
|
||||
nextToken = parameters.NextToken;
|
||||
}
|
||||
|
||||
const awsParameterStoreSecretsObj = (parameterList || [])
|
||||
.filter(({ Name }) => Boolean(Name))
|
||||
.reduce(
|
||||
(obj, secret) => ({
|
||||
...obj,
|
||||
[(secret.Name as string).substring((integration.path as string).length)]: secret
|
||||
}),
|
||||
{} as Record<string, AWS.SSM.Parameter>
|
||||
);
|
||||
// Identify secrets to create
|
||||
await Promise.all(
|
||||
Object.keys(secrets).map(async (key) => {
|
||||
// don't use Promise.all() and promise map here
|
||||
// it will cause rate limit
|
||||
for (const key in secrets) {
|
||||
if (Object.hasOwn(secrets, key)) {
|
||||
if (!(key in awsParameterStoreSecretsObj)) {
|
||||
// case: secret does not exist in AWS parameter store
|
||||
// -> create secret
|
||||
@ -514,13 +529,16 @@ const syncSecretsAWSParameterStore = async ({
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 50);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!metadata.shouldDisableDelete) {
|
||||
// Identify secrets to delete
|
||||
await Promise.all(
|
||||
Object.keys(awsParameterStoreSecretsObj).map(async (key) => {
|
||||
for (const key in awsParameterStoreSecretsObj) {
|
||||
if (Object.hasOwn(awsParameterStoreSecretsObj, key)) {
|
||||
if (!(key in secrets)) {
|
||||
// case:
|
||||
// -> delete secret
|
||||
@ -530,8 +548,11 @@ const syncSecretsAWSParameterStore = async ({
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
})
|
||||
);
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 50);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -574,6 +595,7 @@ const syncSecretsAWSSecretManager = async ({
|
||||
if (awsSecretManagerSecret?.SecretString) {
|
||||
awsSecretManagerSecretObj = JSON.parse(awsSecretManagerSecret.SecretString);
|
||||
}
|
||||
|
||||
if (!isEqual(awsSecretManagerSecretObj, secKeyVal)) {
|
||||
await secretsManager.send(
|
||||
new UpdateSecretCommand({
|
||||
@ -582,7 +604,88 @@ const syncSecretsAWSSecretManager = async ({
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const secretAWSTag = metadata.secretAWSTag as { key: string; value: string }[] | undefined;
|
||||
|
||||
if (secretAWSTag && secretAWSTag.length) {
|
||||
const describedSecret = await secretsManager.send(
|
||||
// requires secretsmanager:DescribeSecret policy
|
||||
new DescribeSecretCommand({
|
||||
SecretId: integration.app as string
|
||||
})
|
||||
);
|
||||
|
||||
if (!describedSecret.Tags) return;
|
||||
|
||||
const integrationTagObj = secretAWSTag.reduce(
|
||||
(acc, item) => {
|
||||
acc[item.key] = item.value;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string>
|
||||
);
|
||||
|
||||
const awsTagObj = (describedSecret.Tags || []).reduce(
|
||||
(acc, item) => {
|
||||
if (item.Key && item.Value) {
|
||||
acc[item.Key] = item.Value;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string>
|
||||
);
|
||||
|
||||
const tagsToUpdate: { Key: string; Value: string }[] = [];
|
||||
const tagsToDelete: { Key: string; Value: string }[] = [];
|
||||
|
||||
describedSecret.Tags?.forEach((tag) => {
|
||||
if (tag.Key && tag.Value) {
|
||||
if (!(tag.Key in integrationTagObj)) {
|
||||
// delete tag from AWS secret manager
|
||||
tagsToDelete.push({
|
||||
Key: tag.Key,
|
||||
Value: tag.Value
|
||||
});
|
||||
} else if (tag.Value !== integrationTagObj[tag.Key]) {
|
||||
// update tag in AWS secret manager
|
||||
tagsToUpdate.push({
|
||||
Key: tag.Key,
|
||||
Value: integrationTagObj[tag.Key]
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
secretAWSTag?.forEach((tag) => {
|
||||
if (!(tag.key in awsTagObj)) {
|
||||
// create tag in AWS secret manager
|
||||
tagsToUpdate.push({
|
||||
Key: tag.key,
|
||||
Value: tag.value
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
if (tagsToUpdate.length) {
|
||||
await secretsManager.send(
|
||||
new TagResourceCommand({
|
||||
SecretId: integration.app as string,
|
||||
Tags: tagsToUpdate
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (tagsToDelete.length) {
|
||||
await secretsManager.send(
|
||||
new UntagResourceCommand({
|
||||
SecretId: integration.app as string,
|
||||
TagKeys: tagsToDelete.map((tag) => tag.Key)
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// case when AWS manager can't find the specified secret
|
||||
if (err instanceof ResourceNotFoundException && secretsManager) {
|
||||
await secretsManager.send(
|
||||
new CreateSecretCommand({
|
||||
@ -2862,7 +2965,7 @@ const syncSecretsDigitalOceanAppPlatform = async ({
|
||||
spec: {
|
||||
name: integration.app,
|
||||
...appSettings,
|
||||
envs: Object.entries(secrets).map(([key, data]) => ({ key, value: data.value }))
|
||||
envs: Object.entries(secrets).map(([key, data]) => ({ key, value: data.value, type: "SECRET" }))
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -9,7 +9,12 @@ import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth
|
||||
import { TSecretQueueFactory } from "../secret/secret-queue";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TIntegrationDALFactory } from "./integration-dal";
|
||||
import { TCreateIntegrationDTO, TDeleteIntegrationDTO, TUpdateIntegrationDTO } from "./integration-types";
|
||||
import {
|
||||
TCreateIntegrationDTO,
|
||||
TDeleteIntegrationDTO,
|
||||
TSyncIntegrationDTO,
|
||||
TUpdateIntegrationDTO
|
||||
} from "./integration-types";
|
||||
|
||||
type TIntegrationServiceFactoryDep = {
|
||||
integrationDAL: TIntegrationDALFactory;
|
||||
@ -103,7 +108,8 @@ export const integrationServiceFactory = ({
|
||||
owner,
|
||||
isActive,
|
||||
environment,
|
||||
secretPath
|
||||
secretPath,
|
||||
metadata
|
||||
}: TUpdateIntegrationDTO) => {
|
||||
const integration = await integrationDAL.findById(id);
|
||||
if (!integration) throw new BadRequestError({ message: "Integration auth not found" });
|
||||
@ -127,7 +133,17 @@ export const integrationServiceFactory = ({
|
||||
appId,
|
||||
targetEnvironment,
|
||||
owner,
|
||||
secretPath
|
||||
secretPath,
|
||||
metadata: {
|
||||
...(integration.metadata as object),
|
||||
...metadata
|
||||
}
|
||||
});
|
||||
|
||||
await secretQueueService.syncIntegrations({
|
||||
environment: folder.environment.slug,
|
||||
secretPath,
|
||||
projectId: folder.projectId
|
||||
});
|
||||
|
||||
return updatedIntegration;
|
||||
@ -190,10 +206,35 @@ export const integrationServiceFactory = ({
|
||||
return integrations;
|
||||
};
|
||||
|
||||
const syncIntegration = async ({ id, actorId, actor, actorOrgId, actorAuthMethod }: TSyncIntegrationDTO) => {
|
||||
const integration = await integrationDAL.findById(id);
|
||||
if (!integration) {
|
||||
throw new BadRequestError({ message: "Integration not found" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integration.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
|
||||
await secretQueueService.syncIntegrations({
|
||||
environment: integration.environment.slug,
|
||||
secretPath: integration.secretPath,
|
||||
projectId: integration.projectId
|
||||
});
|
||||
|
||||
return { ...integration, envId: integration.environment.id };
|
||||
};
|
||||
|
||||
return {
|
||||
createIntegration,
|
||||
updateIntegration,
|
||||
deleteIntegration,
|
||||
listIntegrationByProject
|
||||
listIntegrationByProject,
|
||||
syncIntegration
|
||||
};
|
||||
};
|
||||
|
@ -33,15 +33,33 @@ export type TCreateIntegrationDTO = {
|
||||
|
||||
export type TUpdateIntegrationDTO = {
|
||||
id: string;
|
||||
app: string;
|
||||
appId: string;
|
||||
app?: string;
|
||||
appId?: string;
|
||||
isActive?: boolean;
|
||||
secretPath: string;
|
||||
targetEnvironment: string;
|
||||
owner: string;
|
||||
environment: string;
|
||||
metadata?: {
|
||||
secretPrefix?: string;
|
||||
secretSuffix?: string;
|
||||
secretGCPLabel?: {
|
||||
labelName: string;
|
||||
labelValue: string;
|
||||
};
|
||||
secretAWSTag?: {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
kmsKeyId?: string;
|
||||
shouldDisableDelete?: boolean;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteIntegrationDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TSyncIntegrationDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TUserEncryptionKeys } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
@ -9,11 +11,19 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
const projectMemberOrm = ormify(db, TableName.ProjectMembership);
|
||||
|
||||
// special query
|
||||
const findAllProjectMembers = async (projectId: string) => {
|
||||
const findAllProjectMembers = async (projectId: string, filter: { usernames?: string[]; username?: string } = {}) => {
|
||||
try {
|
||||
const docs = await db(TableName.ProjectMembership)
|
||||
.where({ [`${TableName.ProjectMembership}.projectId` as "projectId"]: projectId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where((qb) => {
|
||||
if (filter.usernames) {
|
||||
void qb.whereIn("username", filter.usernames);
|
||||
}
|
||||
if (filter.username) {
|
||||
void qb.where("username", filter.username);
|
||||
}
|
||||
})
|
||||
.join<TUserEncryptionKeys>(
|
||||
TableName.UserEncryptionKey,
|
||||
`${TableName.UserEncryptionKey}.userId`,
|
||||
@ -96,9 +106,9 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findProjectGhostUser = async (projectId: string) => {
|
||||
const findProjectGhostUser = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const ghostUser = await db(TableName.ProjectMembership)
|
||||
const ghostUser = await (tx || db)(TableName.ProjectMembership)
|
||||
.where({ projectId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.select(selectAllTableCols(TableName.Users))
|
||||
|
@ -34,6 +34,7 @@ import {
|
||||
TAddUsersToWorkspaceNonE2EEDTO,
|
||||
TDeleteProjectMembershipOldDTO,
|
||||
TDeleteProjectMembershipsDTO,
|
||||
TGetProjectMembershipByUsernameDTO,
|
||||
TGetProjectMembershipDTO,
|
||||
TUpdateProjectMembershipDTO
|
||||
} from "./project-membership-types";
|
||||
@ -89,6 +90,28 @@ export const projectMembershipServiceFactory = ({
|
||||
return projectMembershipDAL.findAllProjectMembers(projectId);
|
||||
};
|
||||
|
||||
const getProjectMembershipByUsername = async ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
username
|
||||
}: TGetProjectMembershipByUsernameDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
|
||||
const [membership] = await projectMembershipDAL.findAllProjectMembers(projectId, { username });
|
||||
if (!membership) throw new BadRequestError({ message: `Project membership not found for user ${username}` });
|
||||
return membership;
|
||||
};
|
||||
|
||||
const addUsersToProject = async ({
|
||||
projectId,
|
||||
actorId,
|
||||
@ -510,6 +533,7 @@ export const projectMembershipServiceFactory = ({
|
||||
|
||||
return {
|
||||
getProjectMemberships,
|
||||
getProjectMembershipByUsername,
|
||||
updateProjectMembership,
|
||||
addUsersToProjectNonE2EE,
|
||||
deleteProjectMemberships,
|
||||
|
@ -9,6 +9,10 @@ export type TInviteUserToProjectDTO = {
|
||||
emails: string[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetProjectMembershipByUsernameDTO = {
|
||||
username: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateProjectMembershipDTO = {
|
||||
membershipId: string;
|
||||
roles: (
|
||||
|
@ -340,7 +340,7 @@ export const projectServiceFactory = ({
|
||||
|
||||
const deletedProject = await projectDAL.transaction(async (tx) => {
|
||||
const delProject = await projectDAL.deleteById(project.id, tx);
|
||||
const projectGhostUser = await projectMembershipDAL.findProjectGhostUser(project.id).catch(() => null);
|
||||
const projectGhostUser = await projectMembershipDAL.findProjectGhostUser(project.id, tx).catch(() => null);
|
||||
|
||||
// Delete the org membership for the ghost user if it's found.
|
||||
if (projectGhostUser) {
|
||||
|
@ -243,6 +243,74 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const upsertSecretReferences = async (
|
||||
data: {
|
||||
secretId: string;
|
||||
references: Array<{ environment: string; secretPath: string }>;
|
||||
}[] = [],
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
if (!data.length) return;
|
||||
|
||||
await (tx || db)(TableName.SecretReference)
|
||||
.whereIn(
|
||||
"secretId",
|
||||
data.map(({ secretId }) => secretId)
|
||||
)
|
||||
.delete();
|
||||
const newSecretReferences = data
|
||||
.filter(({ references }) => references.length)
|
||||
.flatMap(({ secretId, references }) =>
|
||||
references.map(({ environment, secretPath }) => ({
|
||||
secretPath,
|
||||
secretId,
|
||||
environment
|
||||
}))
|
||||
);
|
||||
if (!newSecretReferences.length) return;
|
||||
const secretReferences = await (tx || db)(TableName.SecretReference).insert(newSecretReferences);
|
||||
return secretReferences;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "UpsertSecretReference" });
|
||||
}
|
||||
};
|
||||
|
||||
const findReferencedSecretReferences = async (projectId: string, envSlug: string, secretPath: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.SecretReference)
|
||||
.where({
|
||||
secretPath,
|
||||
environment: envSlug
|
||||
})
|
||||
.join(TableName.Secret, `${TableName.Secret}.id`, `${TableName.SecretReference}.secretId`)
|
||||
.join(TableName.SecretFolder, `${TableName.Secret}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.where("projectId", projectId)
|
||||
.select(selectAllTableCols(TableName.SecretReference))
|
||||
.select("folderId");
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindReferencedSecretReferences" });
|
||||
}
|
||||
};
|
||||
|
||||
// special query to backfill secret value
|
||||
const findAllProjectSecretValues = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.Secret)
|
||||
.join(TableName.SecretFolder, `${TableName.Secret}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.where("projectId", projectId)
|
||||
// not empty
|
||||
.whereNotNull("secretValueCiphertext")
|
||||
.select("secretValueTag", "secretValueCiphertext", "secretValueIV", `${TableName.Secret}.id` as "id");
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindAllProjectSecretValues" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...secretOrm,
|
||||
update,
|
||||
@ -252,6 +320,9 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
getSecretTags,
|
||||
findByFolderId,
|
||||
findByFolderIds,
|
||||
findByBlindIndexes
|
||||
findByBlindIndexes,
|
||||
upsertSecretReferences,
|
||||
findReferencedSecretReferences,
|
||||
findAllProjectSecretValues
|
||||
};
|
||||
};
|
||||
|
@ -194,6 +194,7 @@ type TInterpolateSecretArg = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
};
|
||||
|
||||
const INTERPOLATION_SYNTAX_REG = /\${([^}]+)}/g;
|
||||
export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderDAL }: TInterpolateSecretArg) => {
|
||||
const fetchSecretsCrossEnv = () => {
|
||||
const fetchCache: Record<string, Record<string, string>> = {};
|
||||
@ -235,7 +236,6 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
|
||||
};
|
||||
};
|
||||
|
||||
const INTERPOLATION_SYNTAX_REG = /\${([^}]+)}/g;
|
||||
const recursivelyExpandSecret = async (
|
||||
expandedSec: Record<string, string>,
|
||||
interpolatedSec: Record<string, string>,
|
||||
@ -353,7 +353,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
|
||||
};
|
||||
|
||||
export const decryptSecretRaw = (
|
||||
secret: TSecrets & { workspace: string; environment: string; secretPath?: string },
|
||||
secret: TSecrets & { workspace: string; environment: string; secretPath: string },
|
||||
key: string
|
||||
) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
@ -396,6 +396,37 @@ export const decryptSecretRaw = (
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Grabs and processes nested secret references from a string
|
||||
*
|
||||
* This function looks for patterns that match the interpolation syntax in the input string.
|
||||
* It filters out references that include nested paths, splits them into environment and
|
||||
* secret path parts, and then returns an array of objects with the environment and the
|
||||
* joined secret path.
|
||||
*
|
||||
* @param {string} maybeSecretReference - The string that has the potential secret references.
|
||||
* @returns {Array<{ environment: string, secretPath: string }>} - An array of objects
|
||||
* with the environment and joined secret path.
|
||||
*
|
||||
* @example
|
||||
* const value = "Hello ${dev.someFolder.OtherFolder.SECRET_NAME} and ${prod.anotherFolder.SECRET_NAME}";
|
||||
* const result = getAllNestedSecretReferences(value);
|
||||
* // result will be:
|
||||
* // [
|
||||
* // { environment: 'dev', secretPath: '/someFolder/OtherFolder' },
|
||||
* // { environment: 'prod', secretPath: '/anotherFolder' }
|
||||
* // ]
|
||||
*/
|
||||
export const getAllNestedSecretReferences = (maybeSecretReference: string) => {
|
||||
const references = Array.from(maybeSecretReference.matchAll(INTERPOLATION_SYNTAX_REG), (m) => m[1]);
|
||||
return references
|
||||
.filter((el) => el.includes("."))
|
||||
.map((el) => {
|
||||
const [environment, ...secretPathList] = el.split(".");
|
||||
return { environment, secretPath: path.join("/", ...secretPathList.slice(0, -1)) };
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks and handles secrets using a blind index method.
|
||||
* The function generates mappings between secret names and their blind indexes, validates user IDs for personal secrets, and retrieves secrets from the database based on their blind indexes.
|
||||
@ -467,7 +498,7 @@ export const fnSecretBulkInsert = async ({
|
||||
tx
|
||||
}: TFnSecretBulkInsert) => {
|
||||
const newSecrets = await secretDAL.insertMany(
|
||||
inputSecrets.map(({ tags, ...el }) => ({ ...el, folderId })),
|
||||
inputSecrets.map(({ tags, references, ...el }) => ({ ...el, folderId })),
|
||||
tx
|
||||
);
|
||||
const newSecretGroupByBlindIndex = groupBy(newSecrets, (item) => item.secretBlindIndex as string);
|
||||
@ -478,13 +509,20 @@ export const fnSecretBulkInsert = async ({
|
||||
}))
|
||||
);
|
||||
const secretVersions = await secretVersionDAL.insertMany(
|
||||
inputSecrets.map(({ tags, ...el }) => ({
|
||||
inputSecrets.map(({ tags, references, ...el }) => ({
|
||||
...el,
|
||||
folderId,
|
||||
secretId: newSecretGroupByBlindIndex[el.secretBlindIndex as string][0].id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
await secretDAL.upsertSecretReferences(
|
||||
inputSecrets.map(({ references = [], secretBlindIndex }) => ({
|
||||
secretId: newSecretGroupByBlindIndex[secretBlindIndex as string][0].id,
|
||||
references
|
||||
})),
|
||||
tx
|
||||
);
|
||||
if (newSecretTags.length) {
|
||||
const secTags = await secretTagDAL.saveTagsToSecret(newSecretTags, tx);
|
||||
const secVersionsGroupBySecId = groupBy(secretVersions, (i) => i.secretId);
|
||||
@ -509,7 +547,7 @@ export const fnSecretBulkUpdate = async ({
|
||||
secretVersionTagDAL
|
||||
}: TFnSecretBulkUpdate) => {
|
||||
const newSecrets = await secretDAL.bulkUpdate(
|
||||
inputSecrets.map(({ filter, data: { tags, ...data } }) => ({
|
||||
inputSecrets.map(({ filter, data: { tags, references, ...data } }) => ({
|
||||
filter: { ...filter, folderId },
|
||||
data
|
||||
})),
|
||||
@ -522,6 +560,15 @@ export const fnSecretBulkUpdate = async ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
await secretDAL.upsertSecretReferences(
|
||||
inputSecrets
|
||||
.filter(({ data: { references } }) => Boolean(references))
|
||||
.map(({ data: { references = [] } }, i) => ({
|
||||
secretId: newSecrets[i].id,
|
||||
references
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const secsUpdatedTag = inputSecrets.flatMap(({ data: { tags } }, i) =>
|
||||
tags !== undefined ? { tags, secretId: newSecrets[i].id } : []
|
||||
);
|
||||
@ -591,50 +638,39 @@ export const createManySecretsRawFnFactory = ({
|
||||
folderId,
|
||||
isNew: true,
|
||||
blindIndexCfg,
|
||||
userId,
|
||||
secretDAL
|
||||
});
|
||||
|
||||
const inputSecrets = await Promise.all(
|
||||
secrets.map(async (secret) => {
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretName, botKey);
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretValue || "", botKey);
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretComment || "", botKey);
|
||||
const inputSecrets = secrets.map((secret) => {
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretName, botKey);
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretValue || "", botKey);
|
||||
const secretReferences = getAllNestedSecretReferences(secret.secretValue || "");
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretComment || "", botKey);
|
||||
|
||||
if (secret.type === SecretType.Personal) {
|
||||
if (!userId) throw new BadRequestError({ message: "Missing user id for personal secret" });
|
||||
const sharedExist = await secretDAL.findOne({
|
||||
secretBlindIndex: keyName2BlindIndex[secret.secretName],
|
||||
folderId,
|
||||
type: SecretType.Shared
|
||||
});
|
||||
return {
|
||||
type: secret.type,
|
||||
userId: secret.type === SecretType.Personal ? userId : null,
|
||||
secretName: secret.secretName,
|
||||
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
|
||||
secretKeyIV: secretKeyEncrypted.iv,
|
||||
secretKeyTag: secretKeyEncrypted.tag,
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
tags: secret.tags,
|
||||
references: secretReferences
|
||||
};
|
||||
});
|
||||
|
||||
if (!sharedExist)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create personal secret override for no corresponding shared secret"
|
||||
});
|
||||
}
|
||||
|
||||
const tags = secret.tags ? await secretTagDAL.findManyTagsById(projectId, secret.tags) : [];
|
||||
if ((secret.tags || []).length !== tags.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
return {
|
||||
type: secret.type,
|
||||
userId: secret.type === SecretType.Personal ? userId : null,
|
||||
secretName: secret.secretName,
|
||||
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
|
||||
secretKeyIV: secretKeyEncrypted.iv,
|
||||
secretKeyTag: secretKeyEncrypted.tag,
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
tags: secret.tags
|
||||
};
|
||||
})
|
||||
);
|
||||
// get all tags
|
||||
const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags);
|
||||
const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : [];
|
||||
if (tags.length !== tagIds.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkInsert({
|
||||
@ -703,56 +739,35 @@ export const updateManySecretsRawFnFactory = ({
|
||||
userId
|
||||
});
|
||||
|
||||
const inputSecrets = await Promise.all(
|
||||
secrets.map(async (secret) => {
|
||||
if (secret.newSecretName === "") {
|
||||
throw new BadRequestError({ message: "New secret name cannot be empty" });
|
||||
}
|
||||
const inputSecrets = secrets.map((secret) => {
|
||||
if (secret.newSecretName === "") {
|
||||
throw new BadRequestError({ message: "New secret name cannot be empty" });
|
||||
}
|
||||
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretName, botKey);
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretValue || "", botKey);
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretComment || "", botKey);
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretName, botKey);
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretValue || "", botKey);
|
||||
const secretReferences = getAllNestedSecretReferences(secret.secretValue || "");
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secret.secretComment || "", botKey);
|
||||
|
||||
if (secret.type === SecretType.Personal) {
|
||||
if (!userId) throw new BadRequestError({ message: "Missing user id for personal secret" });
|
||||
|
||||
const sharedExist = await secretDAL.findOne({
|
||||
secretBlindIndex: keyName2BlindIndex[secret.secretName],
|
||||
folderId,
|
||||
type: SecretType.Shared
|
||||
});
|
||||
|
||||
if (!sharedExist)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update personal secret override for no corresponding shared secret"
|
||||
});
|
||||
|
||||
if (secret.newSecretName)
|
||||
throw new BadRequestError({ message: "Personal secret cannot change the key name" });
|
||||
}
|
||||
|
||||
const tags = secret.tags ? await secretTagDAL.findManyTagsById(projectId, secret.tags) : [];
|
||||
if ((secret.tags || []).length !== tags.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
return {
|
||||
type: secret.type,
|
||||
userId: secret.type === SecretType.Personal ? userId : null,
|
||||
secretName: secret.secretName,
|
||||
newSecretName: secret.newSecretName,
|
||||
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
|
||||
secretKeyIV: secretKeyEncrypted.iv,
|
||||
secretKeyTag: secretKeyEncrypted.tag,
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
tags: secret.tags
|
||||
};
|
||||
})
|
||||
);
|
||||
return {
|
||||
type: secret.type,
|
||||
userId: secret.type === SecretType.Personal ? userId : null,
|
||||
secretName: secret.secretName,
|
||||
newSecretName: secret.newSecretName,
|
||||
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
|
||||
secretKeyIV: secretKeyEncrypted.iv,
|
||||
secretKeyTag: secretKeyEncrypted.tag,
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
tags: secret.tags,
|
||||
references: secretReferences
|
||||
};
|
||||
});
|
||||
|
||||
const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags);
|
||||
const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : [];
|
||||
|
@ -59,6 +59,7 @@ export type TGetSecrets = {
|
||||
};
|
||||
|
||||
const MAX_SYNC_SECRET_DEPTH = 5;
|
||||
const uniqueIntegrationKey = (environment: string, secretPath: string) => `integration-${environment}-${secretPath}`;
|
||||
|
||||
export const secretQueueFactory = ({
|
||||
queueService,
|
||||
@ -102,28 +103,35 @@ export const secretQueueFactory = ({
|
||||
folderDAL
|
||||
});
|
||||
|
||||
const syncIntegrations = async (dto: TGetSecrets) => {
|
||||
const syncIntegrations = async (dto: TGetSecrets & { deDupeQueue?: Record<string, boolean> }) => {
|
||||
await queueService.queue(QueueName.IntegrationSync, QueueJobs.IntegrationSync, dto, {
|
||||
attempts: 5,
|
||||
attempts: 3,
|
||||
delay: 1000,
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 3000
|
||||
},
|
||||
removeOnComplete: true,
|
||||
removeOnFail: {
|
||||
count: 5 // keep the most recent jobs
|
||||
}
|
||||
removeOnFail: true
|
||||
});
|
||||
};
|
||||
|
||||
const syncSecrets = async (dto: TGetSecrets & { depth?: number }) => {
|
||||
const syncSecrets = async ({
|
||||
deDupeQueue = {},
|
||||
...dto
|
||||
}: TGetSecrets & { depth?: number; deDupeQueue?: Record<string, boolean> }) => {
|
||||
const deDuplicationKey = uniqueIntegrationKey(dto.environment, dto.secretPath);
|
||||
if (deDupeQueue?.[deDuplicationKey]) {
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
deDupeQueue[deDuplicationKey] = true;
|
||||
logger.info(
|
||||
`syncSecrets: syncing project secrets where [projectId=${dto.projectId}] [environment=${dto.environment}] [path=${dto.secretPath}]`
|
||||
);
|
||||
await queueService.queue(QueueName.SecretWebhook, QueueJobs.SecWebhook, dto, {
|
||||
jobId: `secret-webhook-${dto.environment}-${dto.projectId}-${dto.secretPath}`,
|
||||
removeOnFail: { count: 5 },
|
||||
removeOnFail: true,
|
||||
removeOnComplete: true,
|
||||
delay: 1000,
|
||||
attempts: 5,
|
||||
@ -132,7 +140,7 @@ export const secretQueueFactory = ({
|
||||
delay: 3000
|
||||
}
|
||||
});
|
||||
await syncIntegrations(dto);
|
||||
await syncIntegrations({ ...dto, deDupeQueue });
|
||||
};
|
||||
|
||||
const removeSecretReminder = async (dto: TRemoveSecretReminderDTO) => {
|
||||
@ -326,7 +334,7 @@ export const secretQueueFactory = ({
|
||||
};
|
||||
|
||||
queueService.start(QueueName.IntegrationSync, async (job) => {
|
||||
const { environment, projectId, secretPath, depth = 1 } = job.data;
|
||||
const { environment, projectId, secretPath, depth = 1, deDupeQueue = {} } = job.data;
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) {
|
||||
@ -349,21 +357,68 @@ export const secretQueueFactory = ({
|
||||
const importedFolderIds = unique(imports, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
|
||||
const foldersGroupedById = groupBy(importedFolders, (i) => i.child || i.id);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
imports
|
||||
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0].path))
|
||||
.map(({ folderId }) => {
|
||||
const syncDto = {
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueIntegrationKey(
|
||||
foldersGroupedById[folderId][0].environmentSlug,
|
||||
foldersGroupedById[folderId][0].path
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
depth: depth + 1,
|
||||
projectId,
|
||||
secretPath: foldersGroupedById[folderId][0].path,
|
||||
environment: foldersGroupedById[folderId][0].environmentSlug
|
||||
};
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
return syncSecrets(syncDto);
|
||||
})
|
||||
environment: foldersGroupedById[folderId][0].environmentSlug,
|
||||
deDupeQueue
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const secretReferences = await secretDAL.findReferencedSecretReferences(
|
||||
projectId,
|
||||
folder.environment.slug,
|
||||
secretPath
|
||||
);
|
||||
if (secretReferences.length) {
|
||||
const referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const referencedFolders = await folderDAL.findSecretPathByFolderIds(projectId, referencedFolderIds);
|
||||
const referencedFoldersGroupedById = groupBy(referencedFolders, (i) => i.child || i.id);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
secretReferences
|
||||
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0].path))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueIntegrationKey(
|
||||
referencedFoldersGroupedById[folderId][0].environmentSlug,
|
||||
referencedFoldersGroupedById[folderId][0].path
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
depth: depth + 1,
|
||||
projectId,
|
||||
secretPath: referencedFoldersGroupedById[folderId][0].path,
|
||||
environment: referencedFoldersGroupedById[folderId][0].environmentSlug,
|
||||
deDupeQueue
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
@ -408,20 +463,37 @@ export const secretQueueFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
await syncIntegrationSecrets({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets,
|
||||
accessId: accessId as string,
|
||||
accessToken,
|
||||
appendices: {
|
||||
prefix: metadata?.secretPrefix || "",
|
||||
suffix: metadata?.secretSuffix || ""
|
||||
}
|
||||
});
|
||||
try {
|
||||
await syncIntegrationSecrets({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets,
|
||||
accessId: accessId as string,
|
||||
accessToken,
|
||||
appendices: {
|
||||
prefix: metadata?.secretPrefix || "",
|
||||
suffix: metadata?.secretSuffix || ""
|
||||
}
|
||||
});
|
||||
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastSyncJobId: job.id,
|
||||
lastUsed: new Date(),
|
||||
syncMessage: "",
|
||||
isSynced: true
|
||||
});
|
||||
} catch (err: unknown) {
|
||||
logger.info("Secret integration sync error:", err);
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastSyncJobId: job.id,
|
||||
lastUsed: new Date(),
|
||||
syncMessage: (err as Error)?.message,
|
||||
isSynced: false
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Secret integration sync ended: %s", job.id);
|
||||
|
@ -2,12 +2,22 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, SecretsSchema, SecretType } from "@app/db/schemas";
|
||||
import {
|
||||
ProjectMembershipRole,
|
||||
SecretEncryptionAlgo,
|
||||
SecretKeyEncoding,
|
||||
SecretsSchema,
|
||||
SecretType
|
||||
} from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { buildSecretBlindIndexFromName, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import {
|
||||
buildSecretBlindIndexFromName,
|
||||
decryptSymmetric128BitHexKeyUTF8,
|
||||
encryptSymmetric128BitHexKeyUTF8
|
||||
} from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { groupBy, pick } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@ -27,12 +37,14 @@ import {
|
||||
fnSecretBlindIndexCheck,
|
||||
fnSecretBulkInsert,
|
||||
fnSecretBulkUpdate,
|
||||
getAllNestedSecretReferences,
|
||||
interpolateSecrets,
|
||||
recursivelyGetSecretPaths
|
||||
} from "./secret-fns";
|
||||
import { TSecretQueueFactory } from "./secret-queue";
|
||||
import {
|
||||
TAttachSecretTagsDTO,
|
||||
TBackFillSecretReferencesDTO,
|
||||
TCreateBulkSecretDTO,
|
||||
TCreateManySecretRawDTO,
|
||||
TCreateSecretDTO,
|
||||
@ -91,6 +103,22 @@ export const secretServiceFactory = ({
|
||||
secretImportDAL,
|
||||
secretVersionTagDAL
|
||||
}: TSecretServiceFactoryDep) => {
|
||||
const getSecretReference = async (projectId: string) => {
|
||||
// if bot key missing means e2e still exist
|
||||
const botKey = await projectBotService.getBotKey(projectId).catch(() => null);
|
||||
return (el: { ciphertext?: string; iv: string; tag: string }) =>
|
||||
botKey
|
||||
? getAllNestedSecretReferences(
|
||||
decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.ciphertext || "",
|
||||
iv: el.iv,
|
||||
tag: el.tag,
|
||||
key: botKey
|
||||
})
|
||||
)
|
||||
: undefined;
|
||||
};
|
||||
|
||||
// utility function to get secret blind index data
|
||||
const interalGenSecBlindIndexByName = async (projectId: string, secretName: string) => {
|
||||
const appCfg = getConfig();
|
||||
@ -225,6 +253,7 @@ export const secretServiceFactory = ({
|
||||
if ((inputSecret.tags || []).length !== tags.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
const { secretName, type, ...el } = inputSecret;
|
||||
const references = await getSecretReference(projectId);
|
||||
const secret = await secretDAL.transaction((tx) =>
|
||||
fnSecretBulkInsert({
|
||||
folderId,
|
||||
@ -237,7 +266,12 @@ export const secretServiceFactory = ({
|
||||
userId: inputSecret.type === SecretType.Personal ? actorId : null,
|
||||
algorithm: SecretEncryptionAlgo.AES_256_GCM,
|
||||
keyEncoding: SecretKeyEncoding.UTF8,
|
||||
tags: inputSecret.tags
|
||||
tags: inputSecret.tags,
|
||||
references: references({
|
||||
ciphertext: inputSecret.secretValueCiphertext,
|
||||
iv: inputSecret.secretValueIV,
|
||||
tag: inputSecret.secretValueTag
|
||||
})
|
||||
}
|
||||
],
|
||||
secretDAL,
|
||||
@ -251,7 +285,7 @@ export const secretServiceFactory = ({
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
return { ...secret[0], environment, workspace: projectId, tags };
|
||||
return { ...secret[0], environment, workspace: projectId, tags, secretPath: path };
|
||||
};
|
||||
|
||||
const updateSecret = async ({
|
||||
@ -335,6 +369,7 @@ export const secretServiceFactory = ({
|
||||
|
||||
const { secretName, ...el } = inputSecret;
|
||||
|
||||
const references = await getSecretReference(projectId);
|
||||
const updatedSecret = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkUpdate({
|
||||
folderId,
|
||||
@ -360,7 +395,12 @@ export const secretServiceFactory = ({
|
||||
"secretReminderRepeatDays",
|
||||
"tags"
|
||||
]),
|
||||
secretBlindIndex: newSecretNameBlindIndex || keyName2BlindIndex[secretName]
|
||||
secretBlindIndex: newSecretNameBlindIndex || keyName2BlindIndex[secretName],
|
||||
references: references({
|
||||
ciphertext: inputSecret.secretValueCiphertext,
|
||||
iv: inputSecret.secretValueIV,
|
||||
tag: inputSecret.secretValueTag
|
||||
})
|
||||
}
|
||||
}
|
||||
],
|
||||
@ -375,7 +415,7 @@ export const secretServiceFactory = ({
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
return { ...updatedSecret[0], workspace: projectId, environment };
|
||||
return { ...updatedSecret[0], workspace: projectId, environment, secretPath: path };
|
||||
};
|
||||
|
||||
const deleteSecret = async ({
|
||||
@ -444,7 +484,7 @@ export const secretServiceFactory = ({
|
||||
await secretQueueService.syncSecrets({ secretPath: path, projectId, environment });
|
||||
|
||||
// TODO(akhilmhdh-pg): licence check, posthog service and snapshot
|
||||
return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment };
|
||||
return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment, secretPath: path };
|
||||
};
|
||||
|
||||
const getSecrets = async ({
|
||||
@ -641,7 +681,8 @@ export const secretServiceFactory = ({
|
||||
return {
|
||||
...importedSecrets[i].secrets[j],
|
||||
workspace: projectId,
|
||||
environment: importedSecrets[i].environment
|
||||
environment: importedSecrets[i].environment,
|
||||
secretPath: importedSecrets[i].secretPath
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -649,7 +690,7 @@ export const secretServiceFactory = ({
|
||||
}
|
||||
if (!secret) throw new BadRequestError({ message: "Secret not found" });
|
||||
|
||||
return { ...secret, workspace: projectId, environment };
|
||||
return { ...secret, workspace: projectId, environment, secretPath: path };
|
||||
};
|
||||
|
||||
const createManySecret = async ({
|
||||
@ -700,6 +741,7 @@ export const secretServiceFactory = ({
|
||||
const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : [];
|
||||
if (tags.length !== tagIds.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
const references = await getSecretReference(projectId);
|
||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkInsert({
|
||||
inputSecrets: inputSecrets.map(({ secretName, ...el }) => ({
|
||||
@ -708,7 +750,12 @@ export const secretServiceFactory = ({
|
||||
secretBlindIndex: keyName2BlindIndex[secretName],
|
||||
type: SecretType.Shared,
|
||||
algorithm: SecretEncryptionAlgo.AES_256_GCM,
|
||||
keyEncoding: SecretKeyEncoding.UTF8
|
||||
keyEncoding: SecretKeyEncoding.UTF8,
|
||||
references: references({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag
|
||||
})
|
||||
})),
|
||||
folderId,
|
||||
secretDAL,
|
||||
@ -783,6 +830,8 @@ export const secretServiceFactory = ({
|
||||
const tagIds = inputSecrets.flatMap(({ tags = [] }) => tags);
|
||||
const tags = tagIds.length ? await secretTagDAL.findManyTagsById(projectId, tagIds) : [];
|
||||
if (tagIds.length !== tags.length) throw new BadRequestError({ message: "Tag not found" });
|
||||
|
||||
const references = await getSecretReference(projectId);
|
||||
const secrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkUpdate({
|
||||
folderId,
|
||||
@ -799,7 +848,15 @@ export const secretServiceFactory = ({
|
||||
? newKeyName2BlindIndex[newSecretName]
|
||||
: keyName2BlindIndex[secretName],
|
||||
algorithm: SecretEncryptionAlgo.AES_256_GCM,
|
||||
keyEncoding: SecretKeyEncoding.UTF8
|
||||
keyEncoding: SecretKeyEncoding.UTF8,
|
||||
references:
|
||||
el.secretValueIV && el.secretValueTag
|
||||
? references({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag
|
||||
})
|
||||
: undefined
|
||||
}
|
||||
})),
|
||||
secretDAL,
|
||||
@ -924,34 +981,40 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
const batchSecretsExpand = async (
|
||||
secretBatch: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
}[]
|
||||
secretBatch: { secretKey: string; secretValue: string; secretComment?: string; secretPath: string }[]
|
||||
) => {
|
||||
const secretRecord: Record<
|
||||
string,
|
||||
{
|
||||
value: string;
|
||||
comment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
// Group secrets by secretPath
|
||||
const secretsByPath: Record<string, { secretKey: string; secretValue: string; secretComment?: string }[]> = {};
|
||||
|
||||
secretBatch.forEach((secret) => {
|
||||
if (!secretsByPath[secret.secretPath]) {
|
||||
secretsByPath[secret.secretPath] = [];
|
||||
}
|
||||
> = {};
|
||||
|
||||
secretBatch.forEach((decryptedSecret) => {
|
||||
secretRecord[decryptedSecret.secretKey] = {
|
||||
value: decryptedSecret.secretValue,
|
||||
comment: decryptedSecret.secretComment
|
||||
};
|
||||
secretsByPath[secret.secretPath].push(secret);
|
||||
});
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
// Expand secrets for each group
|
||||
for (const secPath in secretsByPath) {
|
||||
if (!Object.hasOwn(secretsByPath, path)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
secretBatch.forEach((decryptedSecret, index) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretBatch[index].secretValue = secretRecord[decryptedSecret.secretKey].value;
|
||||
});
|
||||
const secretRecord: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }> = {};
|
||||
secretsByPath[secPath].forEach((decryptedSecret) => {
|
||||
secretRecord[decryptedSecret.secretKey] = {
|
||||
value: decryptedSecret.secretValue,
|
||||
comment: decryptedSecret.secretComment
|
||||
};
|
||||
});
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
|
||||
secretsByPath[secPath].forEach((decryptedSecret) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
decryptedSecret.secretValue = secretRecord[decryptedSecret.secretKey].value;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// expand secrets
|
||||
@ -999,6 +1062,7 @@ export const secretServiceFactory = ({
|
||||
includeImports,
|
||||
version
|
||||
});
|
||||
|
||||
return decryptSecretRaw(secret, botKey);
|
||||
};
|
||||
|
||||
@ -1171,7 +1235,9 @@ export const secretServiceFactory = ({
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
|
||||
return secrets.map((secret) =>
|
||||
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
|
||||
);
|
||||
};
|
||||
|
||||
const updateManySecretsRaw = async ({
|
||||
@ -1223,7 +1289,9 @@ export const secretServiceFactory = ({
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
|
||||
return secrets.map((secret) =>
|
||||
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
|
||||
);
|
||||
};
|
||||
|
||||
const deleteManySecretsRaw = async ({
|
||||
@ -1257,7 +1325,9 @@ export const secretServiceFactory = ({
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
|
||||
return secrets.map((secret) =>
|
||||
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
|
||||
);
|
||||
};
|
||||
|
||||
const getSecretVersions = async ({
|
||||
@ -1488,6 +1558,52 @@ export const secretServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
// this is a backfilling API for secret references
|
||||
// what it does is it will go through all the secret values and parse all references
|
||||
// populate the secret reference to do sync integrations
|
||||
const backfillSecretReferences = async ({
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TBackFillSecretReferencesDTO) => {
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
if (!hasRole(ProjectMembershipRole.Admin))
|
||||
throw new BadRequestError({ message: "Only admins are allowed to take this action" });
|
||||
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
if (!botKey)
|
||||
throw new BadRequestError({ message: "Please upgrade your project first", name: "bot_not_found_error" });
|
||||
|
||||
await secretDAL.transaction(async (tx) => {
|
||||
const secrets = await secretDAL.findAllProjectSecretValues(projectId, tx);
|
||||
await secretDAL.upsertSecretReferences(
|
||||
secrets.map(({ id, secretValueCiphertext, secretValueIV, secretValueTag }) => ({
|
||||
secretId: id,
|
||||
references: getAllNestedSecretReferences(
|
||||
decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secretValueCiphertext,
|
||||
iv: secretValueIV,
|
||||
tag: secretValueTag,
|
||||
key: botKey
|
||||
})
|
||||
)
|
||||
})),
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
return { message: "Successfully backfilled secret references" };
|
||||
};
|
||||
|
||||
return {
|
||||
attachTags,
|
||||
detachTags,
|
||||
@ -1508,6 +1624,7 @@ export const secretServiceFactory = ({
|
||||
updateManySecretsRaw,
|
||||
deleteManySecretsRaw,
|
||||
getSecretVersions,
|
||||
backfillSecretReferences,
|
||||
// external services function
|
||||
fnSecretBulkDelete,
|
||||
fnSecretBulkUpdate,
|
||||
|
@ -223,11 +223,13 @@ export type TGetSecretVersionsDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
secretId: string;
|
||||
};
|
||||
|
||||
export type TSecretReference = { environment: string; secretPath: string };
|
||||
|
||||
export type TFnSecretBulkInsert = {
|
||||
folderId: string;
|
||||
tx?: Knex;
|
||||
inputSecrets: Array<Omit<TSecretsInsert, "folderId"> & { tags?: string[] }>;
|
||||
secretDAL: Pick<TSecretDALFactory, "insertMany">;
|
||||
inputSecrets: Array<Omit<TSecretsInsert, "folderId"> & { tags?: string[]; references?: TSecretReference[] }>;
|
||||
secretDAL: Pick<TSecretDALFactory, "insertMany" | "upsertSecretReferences">;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
@ -236,8 +238,11 @@ export type TFnSecretBulkInsert = {
|
||||
export type TFnSecretBulkUpdate = {
|
||||
folderId: string;
|
||||
projectId: string;
|
||||
inputSecrets: { filter: Partial<TSecrets>; data: TSecretsUpdate & { tags?: string[] } }[];
|
||||
secretDAL: Pick<TSecretDALFactory, "bulkUpdate">;
|
||||
inputSecrets: {
|
||||
filter: Partial<TSecrets>;
|
||||
data: TSecretsUpdate & { tags?: string[]; references?: TSecretReference[] };
|
||||
}[];
|
||||
secretDAL: Pick<TSecretDALFactory, "bulkUpdate" | "upsertSecretReferences">;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret" | "deleteTagsManySecret">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
@ -294,6 +299,8 @@ export type TRemoveSecretReminderDTO = {
|
||||
repeatDays: number;
|
||||
};
|
||||
|
||||
export type TBackFillSecretReferencesDTO = TProjectPermission;
|
||||
|
||||
// ---
|
||||
|
||||
export type TCreateManySecretsRawFnFactory = {
|
||||
|
@ -91,6 +91,8 @@ services:
|
||||
- TELEMETRY_ENABLED=false
|
||||
volumes:
|
||||
- ./backend/src:/app/src
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
|
||||
frontend:
|
||||
container_name: infisical-dev-frontend
|
||||
@ -128,7 +130,7 @@ services:
|
||||
ports:
|
||||
- 1025:1025 # SMTP server
|
||||
- 8025:8025 # Web UI
|
||||
|
||||
|
||||
openldap: # note: more advanced configuration is available
|
||||
image: osixia/openldap:1.5.0
|
||||
restart: always
|
||||
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Create Identity Membership"
|
||||
openapi: "POST /api/v2/workspace/{projectId}/identity-memberships/{identityId}"
|
||||
---
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get Identity by ID"
|
||||
openapi: "GET /api/v2/workspace/{projectId}/identity-memberships/{identityId}"
|
||||
---
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get By Username"
|
||||
openapi: "POST /api/v1/workspace/{workspaceId}/memberships/details"
|
||||
---
|
@ -1,4 +1,4 @@
|
||||
---
|
||||
title: "Invite Member"
|
||||
openapi: "POST /api/v2/workspace/{projectId}/memberships"
|
||||
---
|
||||
---
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Revoke Access Token"
|
||||
openapi: "POST /api/v1/auth/token/revoke"
|
||||
---
|
@ -128,6 +128,12 @@ infisical export --template=<path to template>
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--include-imports">
|
||||
By default imported secrets are available, you can disable it by setting this option to false.
|
||||
|
||||
Default value: `true`
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--format">
|
||||
Format of the output file. Accepted values: `dotenv`, `dotenv-export`, `csv`, `json` and `yaml`
|
||||
|
||||
|
@ -126,6 +126,12 @@ $ infisical run -- npm run dev
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--include-imports">
|
||||
By default imported secrets are available, you can disable it by setting this option to false.
|
||||
|
||||
Default value: `true`
|
||||
</Accordion>
|
||||
|
||||
{" "}
|
||||
|
||||
<Accordion title="--env">
|
||||
|
@ -13,6 +13,7 @@ If none of the available stores work for you, you can try using the `file` store
|
||||
If you are still experiencing trouble, please seek support.
|
||||
|
||||
[Learn more about vault command](./commands/vault)
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="Can I fetch secrets with Infisical if I am offline?">
|
||||
|
@ -36,7 +36,7 @@ Initialize a new Node.js project with a default `package.json` file.
|
||||
npm init -y
|
||||
```
|
||||
|
||||
Install `express` and [infisical-node](https://github.com/Infisical/infisical-node), the client Node SDK for Infisical.
|
||||
Install `express` and [@infisical/sdk](https://www.npmjs.com/package/@infisical/sdk), the client Node SDK for Infisical.
|
||||
|
||||
```console
|
||||
npm install express @infisical/sdk
|
||||
|
@ -5,7 +5,7 @@ title: "Python"
|
||||
This guide demonstrates how to use Infisical to manage secrets for your Python stack from local development to production. It uses:
|
||||
|
||||
- Infisical (you can use [Infisical Cloud](https://app.infisical.com) or a [self-hosted instance of Infisical](https://infisical.com/docs/self-hosting/overview)) to store your secrets.
|
||||
- The [infisical-python](https://github.com/Infisical/sdk/tree/main/crates/infisical-py) Python client SDK to fetch secrets back to your Python application on demand.
|
||||
- The [infisical-python](https://pypi.org/project/infisical-python/) Python client SDK to fetch secrets back to your Python application on demand.
|
||||
|
||||
## Project Setup
|
||||
|
||||
@ -36,23 +36,27 @@ python3 -m venv env
|
||||
source env/bin/activate
|
||||
```
|
||||
|
||||
Install Flask and [infisical-python](https://github.com/Infisical/sdk/tree/main/crates/infisical-py), the client Python SDK for Infisical.
|
||||
Install Flask and [infisical-python](https://pypi.org/project/infisical-python/), the client Python SDK for Infisical.
|
||||
|
||||
```console
|
||||
pip install Flask infisical-python
|
||||
pip install flask infisical-python
|
||||
```
|
||||
|
||||
Finally, create an `app.py` file containing the application code.
|
||||
|
||||
```py
|
||||
from flask import Flask
|
||||
from infisical_client import ClientSettings, InfisicalClient, GetSecretOptions
|
||||
from infisical_client import ClientSettings, InfisicalClient, GetSecretOptions, AuthenticationOptions, UniversalAuthMethod
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
client = InfisicalClient(ClientSettings(
|
||||
client_id="MACHINE_IDENTITY_CLIENT_ID",
|
||||
client_secret="MACHINE_IDENTITY_CLIENT_SECRET",
|
||||
auth=AuthenticationOptions(
|
||||
universal_auth=UniversalAuthMethod(
|
||||
client_id="CLIENT_ID",
|
||||
client_secret="CLIENT_SECRET",
|
||||
)
|
||||
)
|
||||
))
|
||||
|
||||
@app.route("/")
|
||||
|
@ -39,7 +39,7 @@ then Infisical returns a short-lived access token that can be used to make authe
|
||||
To be more specific:
|
||||
|
||||
1. The client IAM principal signs a `GetCallerIdentity` query using the [AWS Signature v4 algorithm](https://docs.aws.amazon.com/IAM/latest/UserGuide/create-signed-request.html); this is done using the credentials from the AWS environment where the IAM principal is running.
|
||||
2. The client sends the signed query data to Infisical including the request method, request body, and request headers.
|
||||
2. The client sends the signed query data to Infisical including the request method, request body, and request headers at the `/api/v1/auth/aws-auth/login` endpoint.
|
||||
3. Infisical reconstructs the query and sends it to AWS STS API via the [sts:GetCallerIdentity](https://docs.aws.amazon.com/STS/latest/APIReference/API_GetCallerIdentity.html) method for verification and obtains the identity associated with the IAM principal.
|
||||
4. Infisical checks the identity's properties against set criteria such **Allowed Principal ARNs**.
|
||||
5. If all is well, Infisical returns a short-lived access token that the IAM principal can use to make authenticated requests to the Infisical API.
|
||||
@ -83,7 +83,7 @@ access the Infisical API using the AWS Auth authentication method.
|
||||
|
||||
- Allowed Principal ARNs: A comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical. The values should take one of three forms: `arn:aws:iam::123456789012:user/MyUserName`, `arn:aws:iam::123456789012:role/MyRoleName`, or `arn:aws:iam::123456789012:*`. Using a wildcard in this case allows any IAM principal in the account `123456789012` to authenticate with Infisical under the identity.
|
||||
- Allowed Account IDs: A comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.
|
||||
- STS Endpoint (default is `https://sts.amazonaws.com/`): The endpoint URL for the AWS STS API. This is useful for AWS GovCloud or other AWS regions that have different STS endpoints.
|
||||
- STS Endpoint (default is `https://sts.amazonaws.com/`): The endpoint URL for the AWS STS API. This value should be adjusted based on the AWS region you are operating in (e.g. `https://sts.us-east-1.amazonaws.com/`); refer to the list of regional STS endpoints [here](https://docs.aws.amazon.com/general/latest/gr/sts.html).
|
||||
- Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses.
|
||||
@ -264,6 +264,9 @@ access the Infisical API using the AWS Auth authentication method.
|
||||
request.headers["X-Amz-Date"] = AWS.util.date.iso8601(new Date()).replace(/[:-]|\.\d{3}/g, "");
|
||||
request.body = iamRequestBody;
|
||||
request.headers["Content-Length"] = Buffer.byteLength(iamRequestBody);
|
||||
|
||||
const signer = new AWS.Signers.V4(request, "sts");
|
||||
signer.addAuthorization(AWS.config.credentials, new Date());
|
||||
````
|
||||
|
||||
#### Sample request
|
||||
|
@ -46,7 +46,7 @@ then Infisical returns a short-lived access token that can be used to make authe
|
||||
To be more specific:
|
||||
|
||||
1. The client running on a GCP service obtains an [ID token](https://cloud.google.com/docs/authentication/get-id-token) constituting the identity for a GCP resource such as a GCE instance or Cloud Function; this is a unique JWT token that includes details about the instance as well as Google's [RS256 signature](https://datatracker.ietf.org/doc/html/rfc7518#section-3.3).
|
||||
2. The client sends the ID token to Infisical.
|
||||
2. The client sends the ID token to Infisical at the `/api/v1/auth/gcp-auth/login` endpoint.
|
||||
3. Infisical verifies the token against Google's [public OAuth2 certificates](https://www.googleapis.com/oauth2/v3/certs).
|
||||
4. Infisical checks if the entity behind the ID token is allowed to authenticate with Infisical based on set criteria such as **Allowed Service Account Emails**.
|
||||
5. If all is well, Infisical returns a short-lived access token that the client can use to make authenticated requests to the Infisical API.
|
||||
@ -123,7 +123,7 @@ access the Infisical API using the GCP ID Token authentication method.
|
||||
<CodeGroup>
|
||||
```bash curl
|
||||
curl -H "Metadata-Flavor: Google" \
|
||||
'http://metadata/computeMetadata/v1/instance/service-accounts/default/identity?audience=<identityId>'
|
||||
'http://metadata/computeMetadata/v1/instance/service-accounts/default/identity?audience=<identityId>&format=full'
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
@ -208,7 +208,7 @@ then Infisical returns a short-lived access token that can be used to make authe
|
||||
To be more specific:
|
||||
|
||||
1. The client generates a signed JWT token using the `projects.serviceAccounts.signJwt` [API method](https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signJwt); this is done using the service account credentials associated with the client.
|
||||
2. The client sends the signed JWT token to Infisical.
|
||||
2. The client sends the signed JWT token to Infisical at the `/api/v1/auth/gcp-auth/login` endpoint.
|
||||
3. Infisical verifies the signed JWT token.
|
||||
4. Infisical checks if the service account behind the JWT token is allowed to authenticate with Infisical based **Allowed Service Account Emails**.
|
||||
5. If all is well, Infisical returns a short-lived access token that the client can use to make authenticated requests to the Infisical API.
|
||||
|
247
docs/documentation/platform/identities/kubernetes-auth.mdx
Normal file
@ -0,0 +1,247 @@
|
||||
---
|
||||
title: Kubernetes Auth
|
||||
description: "Learn how to authenticate with Infisical in Kubernetes"
|
||||
---
|
||||
|
||||
**Kubernetes Auth** is a Kubernetes-native authentication method for applications (e.g. pods) to access Infisical.
|
||||
|
||||
## Diagram
|
||||
|
||||
The following sequence digram illustrates the Kubernetes Auth workflow for authenticating applications running in pods with Infisical.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Pod as Pod
|
||||
participant Infis as Infisical
|
||||
participant KubernetesServer as K8s API Server
|
||||
|
||||
Note over Pod: Step 1: Service Account JWT Token Retrieval
|
||||
|
||||
Note over Pod,Infis: Step 2: JWT Token Login Operation
|
||||
Pod->>Infis: Send JWT token to /api/v1/auth/kubernetes-auth/login
|
||||
Infis->>KubernetesServer: Forward JWT token for validation
|
||||
KubernetesServer-->>Infis: Return identity info for JWT
|
||||
|
||||
Note over Infis: Step 3: Identity Property Verification
|
||||
Infis->>Pod: Return short-lived access token
|
||||
|
||||
Note over Pod,Infis: Step 4: Access Infisical API with Token
|
||||
Pod->>Infis: Make authenticated requests using the short-lived access token
|
||||
```
|
||||
|
||||
## Concept
|
||||
|
||||
At a high-level, Infisical authenticates an application in Kubernetes by verifying its identity and checking that it meets specific requirements (e.g. it is bound to an allowed service account) at the `/api/v1/auth/kubernetes-auth/login` endpoint. If successful,
|
||||
then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API.
|
||||
|
||||
To be more specific:
|
||||
|
||||
1. The application deployed on Kubernetes retrieves its [service account credential](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/#opt-out-of-api-credential-automounting) that is a JWT token at the `/var/run/secrets/kubernetes.io/serviceaccount/token` pod path.
|
||||
2. The application sends the JWT token to Infisical at the `/api/v1/auth/kubernetes-auth/login` endpoint after which Infisical forwards the JWT token to the Kubernetes API Server at the [TokenReview API](https://kubernetes.io/docs/reference/kubernetes-api/authentication-resources/token-review-v1/) for verification and to obtain the service account information associated with the JWT token. Infisical is able to authenticate and interact with the TokenReview API by using a long-lived service account JWT token itself (referred to onward as the token reviewer JWT token).
|
||||
3. Infisical checks the service account properties against set criteria such **Allowed Service Account Names** and **Allowed Namespaces**.
|
||||
4. If all is well, Infisical returns a short-lived access token that the application can use to make authenticated requests to the Infisical API.
|
||||
|
||||
<Note>
|
||||
We recommend using one of Infisical's clients like SDKs or the Infisical Agent
|
||||
to authenticate with Infisical using Kubernetes Auth as they handle the
|
||||
authentication process including service account credential retrieval for you.
|
||||
</Note>
|
||||
|
||||
## Guide
|
||||
|
||||
In the following steps, we explore how to create and use identities for your applications in Kubernetes to access the Infisical API using the Kubernetes Auth authentication method.
|
||||
|
||||
<Steps>
|
||||
<Step title="Obtaining the token reviewer JWT for Infisical">
|
||||
1.1. Start by creating a service account in your Kubernetes cluster that will be used by Infisical to authenticate with the Kubernetes API Server.
|
||||
|
||||
```yaml infisical-service-account.yaml
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: infisical-auth
|
||||
namespace: default
|
||||
|
||||
```
|
||||
|
||||
```
|
||||
kubectl apply -f infisical-service-account.yaml
|
||||
```
|
||||
|
||||
1.2. Bind the service account to the `system:auth-delegator` cluster role. As described [here](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#other-component-roles), this role allows delegated authentication and authorization checks, specifically for Infisical to access the [TokenReview API](https://kubernetes.io/docs/reference/kubernetes-api/authentication-resources/token-review-v1/). You can apply the following configuration file:
|
||||
|
||||
```yaml cluster-role-binding.yaml
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: role-tokenreview-binding
|
||||
namespace: default
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: system:auth-delegator
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: infisical-auth
|
||||
namespace: default
|
||||
```
|
||||
|
||||
```
|
||||
kubectl apply -f cluster-role-binding.yaml
|
||||
```
|
||||
|
||||
1.3. Next, create a long-lived service account JWT token (i.e. the token reviewer JWT token) for the service account using this configuration file for a new `Secret` resource:
|
||||
|
||||
```yaml service-account-token.yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
type: kubernetes.io/service-account-token
|
||||
metadata:
|
||||
name: infisical-auth-token
|
||||
annotations:
|
||||
kubernetes.io/service-account.name: "infisical-auth"
|
||||
```
|
||||
|
||||
|
||||
```
|
||||
kubectl apply -f service-account-token.yaml
|
||||
```
|
||||
|
||||
1.4. Link the secret in step 1.3 to the service account in step 1.1:
|
||||
|
||||
```bash
|
||||
kubectl patch serviceaccount infisical-auth -p '{"secrets": [{"name": "infisical-auth-token"}]}' -n default
|
||||
```
|
||||
|
||||
1.5. Finally, retrieve the token reviewer JWT token from the secret.
|
||||
|
||||
```bash
|
||||
kubectl get secret infisical-auth-token -n default -o=jsonpath='{.data.token}' | base64 --decode
|
||||
```
|
||||
|
||||
Keep this JWT token handy as you will need it for the **Token Reviewer JWT** field when configuring the Kubernetes Auth authentication method for the identity in step 2.
|
||||
|
||||
</Step>
|
||||
|
||||
<Step title="Creating an identity">
|
||||
To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**.
|
||||
|
||||

|
||||
|
||||
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
|
||||
|
||||

|
||||
|
||||
Now input a few details for your new identity. Here's some guidance for each field:
|
||||
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Kubernetes Auth**.
|
||||
|
||||

|
||||
|
||||
Here's some more guidance on each field:
|
||||
|
||||
- Kubernetes Host / Base Kubernetes API URL: The host string, host:port pair, or URL to the base of the Kubernetes API server. This can usually be obtained by running `kubectl cluster-info`.
|
||||
- Token Reviewer JWT: A long-lived service account JWT token for Infisical to access the [TokenReview API](https://kubernetes.io/docs/reference/kubernetes-api/authentication-resources/token-review-v1/) to validate other service account JWT tokens submitted by applications/pods. This is the JWT token obtained from step 1.5.
|
||||
- Allowed Service Account Names: A comma-separated list of trusted service account names that are allowed to authenticate with Infisical.
|
||||
- Allowed Namespaces: A comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.
|
||||
- Allowed Audience: An optional audience claim that the service account JWT token must have to authenticate with Infisical.
|
||||
- CA Certificate: The PEM-encoded CA cert for the Kubernetes API server. This is used by the TLS client for secure communication with the Kubernetes API server.
|
||||
- Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses.
|
||||
- Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address.
|
||||
|
||||
</Step>
|
||||
<Step title="Adding an identity to a project">
|
||||
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
|
||||
|
||||
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
|
||||
|
||||
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Accessing the Infisical API with the identity">
|
||||
To access the Infisical API as the identity, you should first make sure that the pod running your application is bound to a service account specified in the **Allowed Service Account Names** field of the identity's Kubernetes Auth authentication method configuration in step 2.
|
||||
|
||||
Once bound, the pod will receive automatically mounted service account credentials that is a JWT token at the `/var/run/secrets/kubernetes.io/serviceaccount/token` path. This token should be used to authenticate with Infisical at the `/api/v1/auth/kubernetes-auth/login` endpoint.
|
||||
|
||||
For information on how to configure sevice accounts for pods, refer to the guide [here](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/).
|
||||
|
||||
We provide a code example below of how you might retrieve the JWT token and use it to authenticate with Infisical to gain access to the [Infisical API](/api-reference/overview/introduction).
|
||||
<Accordion
|
||||
title="Sample code for inside an application"
|
||||
>
|
||||
The shown example uses Node.js but you can use any other language to retrieve the service account JWT token and use it to authenticate with Infisical.
|
||||
|
||||
```javascript
|
||||
const fs = require("fs");
|
||||
try {
|
||||
const tokenPath = "/var/run/secrets/kubernetes.io/serviceaccount/token";
|
||||
const jwtToken = fs.readFileSync(tokenPath, "utf8");
|
||||
|
||||
const infisicalUrl = "https://app.infisical.com"; // or your self-hosted Infisical URL
|
||||
const identityId = "<your-identity-id>";
|
||||
|
||||
const { data } = await axios.post(
|
||||
`{infisicalUrl}/api/v1/auth/kubernetes-auth/login`,
|
||||
{
|
||||
identityId,
|
||||
jwt,
|
||||
}
|
||||
);
|
||||
|
||||
console.log("result data: ", data); // access token here
|
||||
} catch(err) {
|
||||
console.error(err);
|
||||
}
|
||||
```
|
||||
</Accordion>
|
||||
|
||||
<Tip>
|
||||
We recommend using one of Infisical's clients like SDKs or the Infisical Agent to authenticate with Infisical using Kubernetes Auth as they handle the authentication process including service account credential retrieval for you.
|
||||
</Tip>
|
||||
|
||||
<Note>
|
||||
Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation;
|
||||
the default TTL is `7200` seconds which can be adjusted.
|
||||
|
||||
If an identity access token exceeds its max ttl, it can no longer authenticate with the Infisical API. In this case,
|
||||
a new access token should be obtained by performing another login operation.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
**FAQ**
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Why is the Infisical API rejecting my service account JWT token?">
|
||||
There are a few reasons for why this might happen:
|
||||
- The Kubernetes Auth authentication method configuration is invalid.
|
||||
- The service account JWT token has expired is malformed or invalid.
|
||||
- The service account associated with the JWT token does not meet the criteria set forth in the Kubernetes Auth authentication method configuration such as **Allowed Service Account Names** and **Allowed Namespaces**.
|
||||
</Accordion>
|
||||
<Accordion title="Why is the Infisical API rejecting my access token?">
|
||||
There are a few reasons for why this might happen:
|
||||
|
||||
- The access token has expired.
|
||||
- The identity is insufficently permissioned to interact with the resources you wish to access.
|
||||
- The client access token is being used from an untrusted IP.
|
||||
</Accordion>
|
||||
<Accordion title="What is access token renewal and TTL/Max TTL?">
|
||||
A identity access token can have a time-to-live (TTL) or incremental lifetime after which it expires.
|
||||
|
||||
In certain cases, you may want to extend the lifespan of an access token; to do so, you must set a max TTL parameter.
|
||||
|
||||
A token can be renewed any number of time and each call to renew it will extend the toke life by increments of access token TTL.
|
||||
Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation
|
||||
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
@ -7,7 +7,7 @@ description: "Learn how to use Machine Identities to programmatically interact w
|
||||
|
||||
An Infisical machine identity is an entity that represents a workload or application that require access to various resources in Infisical. This is conceptually similar to an IAM user in AWS or service account in Google Cloud Platform (GCP).
|
||||
|
||||
Each identity must authenticate with the Infisical API using a supported authentication method like [Universal Auth](/documentation/platform/identities/universal-auth), [AWS Auth](/documentation/platform/identities/aws-auth), or [GCP Auth](/documentation/platform/identities/gcp-auth) to get back a short-lived access token to be used in subsequent requests.
|
||||
Each identity must authenticate with the Infisical API using a supported authentication method like [Universal Auth](/documentation/platform/identities/universal-auth), [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth), [AWS Auth](/documentation/platform/identities/aws-auth), or [GCP Auth](/documentation/platform/identities/gcp-auth) to get back a short-lived access token to be used in subsequent requests.
|
||||
|
||||

|
||||
|
||||
@ -38,6 +38,7 @@ Machine Identity support for the rest of the clients is planned to be released i
|
||||
To interact with various resources in Infisical, Machine Identities are able to authenticate using:
|
||||
|
||||
- [Universal Auth](/documentation/platform/identities/universal-auth): A platform-agnostic authentication method that can be configured on an identity suitable to authenticate from any platform/environment.
|
||||
- [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth): A Kubernetes-native authentication method for applications (e.g. pods) to authenticate with Infisical.
|
||||
- [AWS Auth](/documentation/platform/identities/aws-auth): An AWS-native authentication method for IAM principals like EC2 instances or Lambda functions to authenticate with Infisical.
|
||||
- [GCP Auth](/documentation/platform/identities/gcp-auth): A GCP-native authentication method for GCP resources (e.g. Compute Engine, App Engine, Cloud Run, Google Kubernetes Engine, IAM service accounts, etc.) to authenticate with Infisical.
|
||||
|
||||
|
@ -31,7 +31,7 @@ then Infisical returns a short-lived access token that can be used to make authe
|
||||
|
||||
To be more specific:
|
||||
|
||||
1. The client submits a **Client ID** and **Client Secret** to Infisical.
|
||||
1. The client submits a **Client ID** and **Client Secret** to Infisical at the `/api/v1/auth/universal-auth/login` endpoint.
|
||||
2. Infisical verifies the credential pair.
|
||||
3. If all is well, Infisical returns a short-lived access token that the client can use to make authenticated requests to the Infisical API.
|
||||
|
||||
|
@ -3,7 +3,7 @@ title: "Secret Versioning"
|
||||
description: "Learn how secret versioning works in Infisical."
|
||||
---
|
||||
|
||||
Every time a secret change is persformed, a new version of the same secret is created.
|
||||
Every time a secret change is performed, a new version of the same secret is created.
|
||||
|
||||
Such versions can be accessed visually by opening up the [secret sidebar](/documentation/platform/project#drawer) (as seen below) or [retrieved via API](/api-reference/endpoints/secrets/read)
|
||||
by specifying the `version` query parameter.
|
||||
|
BIN
docs/images/integrations/jenkins/plugin/add-infisical-secret.png
Normal file
After Width: | Height: | Size: 98 KiB |
BIN
docs/images/integrations/jenkins/plugin/install-plugin.png
Normal file
After Width: | Height: | Size: 60 KiB |
After Width: | Height: | Size: 154 KiB |
After Width: | Height: | Size: 96 KiB |
BIN
docs/images/integrations/jenkins/plugin/plugin-checked.png
Normal file
After Width: | Height: | Size: 210 KiB |
After Width: | Height: | Size: 194 KiB |
After Width: | Height: | Size: 484 KiB |
@ -1,5 +1,5 @@
|
||||
---
|
||||
title: "Jenkins"
|
||||
title: "Jenkins Plugin"
|
||||
description: "How to effectively and securely manage secrets in Jenkins using Infisical"
|
||||
---
|
||||
|
||||
@ -18,144 +18,135 @@ Prerequisites:
|
||||
|
||||
<Tabs>
|
||||
|
||||
<Tab title="Machine Identity (Recommended)">
|
||||
## Add Infisical Machine Identity to Jenkins
|
||||
<Tab title="Using Plugin with Machine Identities (Recommended)">
|
||||
|
||||
## Jenkins Infisical Plugin
|
||||
|
||||
After setting up your project in Infisical and installing the Infisical CLI to the environment where your Jenkins builds will run, you will need to add the Infisical Machine Identity to Jenkins.
|
||||
This plugin adds a build wrapper to set environment variables from [Infisical](https://infisical.com). Secrets are generally masked in the build log, so you can't accidentally print them.
|
||||
|
||||
To generate a Infisical machine identity, follow the guide [here](/documentation/platform/identities/machine-identities).
|
||||
Once you have generated the token, navigate to **Manage Jenkins > Manage Credentials** in your Jenkins instance.
|
||||
## Installation
|
||||
|
||||

|
||||
To install the plugin, navigate to `Manage Jenkins -> Plugins -> Available plugins` and search for `Infisical`. Install the plugin and restart Jenkins.
|
||||
|
||||
Click on the credential store you want to store the Infisical Machine Identity in. In this case, we're using the default Jenkins global store.
|
||||

|
||||
|
||||
<Info>
|
||||
Each of your projects will have a different `INFISICAL_TOKEN`.
|
||||
As a result, it may make sense to spread these out into separate credential domains depending on your use case.
|
||||
</Info>
|
||||
## Infisical Authentication
|
||||
|
||||

|
||||
Authenticating with Infisical is done through the use of [Machine Identities](https://infisical.com/docs/documentation/platform/identities/machine-identities).
|
||||
Currently the Jenkins plugin only supports [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth) for authentication. More methods will be added soon.
|
||||
|
||||
Now, click Add Credentials.
|
||||
|
||||

|
||||
|
||||
Choose **Secret text** for the **Kind** option from the dropdown list and enter the Infisical Service Token in the **Secret** field.
|
||||
Although the **ID** can be any value, we'll set it to `infisical-machine-identity-client-id` and `infisical-machine-identity-client-secret` for the sake of this guide.
|
||||
The description is optional and can be any text you prefer.
|
||||
### How does Universal Auth work?
|
||||
To use Universal Auth, you'll need to create a new Credential _(Infisical Universal Auth Credential)_. The credential should contain your Universal Auth client ID, and your Universal Auth client secret.
|
||||
Please [read more here](https://infisical.com/docs/documentation/platform/identities/universal-auth) on how to setup a Machine Identity to use universal auth.
|
||||
|
||||
|
||||

|
||||

|
||||
### Creating a Universal Auth credential
|
||||
|
||||
When you're done, you should see two credentials similar to the one below:
|
||||
Creating a universal auth credential inside Jenkins is very straight forward.
|
||||
|
||||

|
||||
Simply navigate to<br/>
|
||||
`Dashboard -> Manage Jenkins -> Credentials -> System -> Global credentials (unrestricted)`.
|
||||
|
||||
Press the `Add Credentials` button and select `Infisical Universal Auth Credential` in the `Kind` field.
|
||||
|
||||
## Use Infisical in a Freestyle Project
|
||||
The `ID` and `Description` field doesn't matter much in this case, as they won't be read anywhere. The description field will be displayed as the credential name during the plugin configuration.
|
||||
|
||||
To fetch secrets with Infisical in a Freestyle Project job, you'll need to expose the credential you created above as an environment variable to the Infisical CLI.
|
||||
To do so, first click **New Item** from the dashboard navigation sidebar:
|
||||
|
||||

|
||||
|
||||
Enter the name of the job, choose the **Freestyle Project** option, and click **OK**.
|
||||
|
||||

|
||||
|
||||
Scroll down to the **Build Environment** section and enable the **Use secret text(s) or file(s)** option. Then click **Add** under the **Bindings** section and choose **Secret text** from the dropdown menu.
|
||||
|
||||

|
||||
|
||||
Enter `INFISICAL_MACHINE_IDENTITY_CLIENT_ID` in the **Variable** field for the client ID, and `INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET` for the client secret. Then click the **Specific credentials** option from the Credentials section and select the credentials you created earlier.
|
||||
In this case, we saved it as `Infisical Machine Identity Client ID` and `Infisical Machine Identity Client Secret` so we'll choose those from the dropdown menu.
|
||||
|
||||
Make sure to add bindings for both the client ID and the client secret.
|
||||
|
||||

|
||||
|
||||
Scroll down to the **Build** section and choose **Execute shell** from the **Add build step** menu.
|
||||
|
||||

|
||||
|
||||
In the command field, you can now use the Infisical CLI to fetch secrets.
|
||||
The example command below will print the secrets using the service token passed as a credential. When done, click **Save**.
|
||||
|
||||
```bash
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=$INFISICAL_MACHINE_IDENTITY_CLIENT_ID --client-secret=$INFISICAL_MACHINE_IDENTITY_CLIENT_SECRET --silent --plain)
|
||||
infisical secrets --env dev --projectId=<your-project-id>
|
||||
```
|
||||
|
||||

|
||||
|
||||
Finally, click **Build Now** from the navigation sidebar to run your new job.
|
||||
|
||||
<Info>
|
||||
Running into issues? Join Infisical's [community Slack](https://infisical.com/slack) for quick support.
|
||||
</Info>
|
||||

|
||||
|
||||
|
||||
|
||||
## Use Infisical in a Jenkins Pipeline
|
||||
## Plugin Usage
|
||||
### Configuration
|
||||
|
||||
To fetch secrets using Infisical in a Pipeline job, you'll need to expose the Jenkins credential you created above as an environment variable.
|
||||
To do so, click **New Item** from the dashboard navigation sidebar:
|
||||
Configuration takes place on a job-level basis.
|
||||
|
||||

|
||||
Inside your job, you simply tick the `Infisical Plugin` checkbox under "Build Environment". After enabling the plugin, you'll see a new section appear where you'll have to configure the plugin.
|
||||
|
||||
Enter the name of the job, choose the **Pipeline** option, and click OK.
|
||||

|
||||
|
||||

|
||||
You'll be prompted with 4 options to fill:
|
||||
* Infisical URL
|
||||
* This defaults to https://app.infisical.com. This field is only relevant if you're running a managed or self-hosted instance. If you are using Infisical Cloud, leave this as-is, otherwise enter the URL of your Infisical instance.
|
||||
* Infisical Credential
|
||||
* This is where you select your Infisical credential to use for authentication. In the step above [Creating a Universal Auth credential](#creating-a-universal-auth-credential), you can read on how to configure the credential. Simply select the credential you have created for this field.
|
||||
* Infisical Project Slug
|
||||
* This is the slug of the project you wish to fetch secrets from. You can find this in your project settings on Infisical by clicking "Copy project slug".
|
||||
* Environment Slug
|
||||
* This is the slug of the environment to fetch secrets from. In most cases it's either `dev`, `staging`, or `prod`. You can however create custom environments in Infisical. If you are using custom environments, you need to enter the slug of the custom environment you wish to fetch secrets from.
|
||||
|
||||
That's it! Now you're ready to select which secrets you want to fetch into Jenkins.
|
||||
By clicking the `Add an Infisical secret` in the Jenkins UI like seen in the screenshot below.
|
||||
|
||||
Scroll down to the **Pipeline** section, paste the following into the **Script** field, and click **Save**.
|
||||

|
||||
|
||||
```
|
||||
pipeline {
|
||||
agent any
|
||||
You need to select which secrets that should be pulled into Jenkins.
|
||||
You start by specifying a [folder path from Infisical](https://infisical.com/docs/documentation/platform/folder#comparing-folders). The root path is simply `/`. You also need to select wether or not you want to [include imports](https://infisical.com/docs/documentation/platform/secret-reference#secret-imports). Now you can add secrets the secret keys that you want to pull from Infisical into Jenkins. If you want to add multiple secrets, press the "Add key/value pair".
|
||||
|
||||
environment {
|
||||
MACHINE_IDENTITY_CLIENT_ID = credentials('infisical-machine-identity-client-id')
|
||||
MACHINE_IDENTITY_CLIENT_SECRET = credentials('infisical-machine-identity-client-secret')
|
||||
If you wish to pull secrets from multiple paths, you can press the "Add an Infisical secret" button at the bottom, and configure a new set of secrets to pull.
|
||||
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Run Infisical') {
|
||||
steps {
|
||||
sh("export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=${MACHINE_IDENTITY_CLIENT_ID} --client-secret=${MACHINE_IDENTITY_CLIENT_SECRET} --silent --plain)")
|
||||
sh("infisical secrets --env=dev --path=/ --projectId=<your-project-id>")
|
||||
## Pipeline usage
|
||||
|
||||
// doesn't work
|
||||
// sh("docker run --rm test-container infisical secrets --projectId=<your-project-id>")
|
||||
|
||||
// works
|
||||
// sh("docker run -e INFISICAL_TOKEN=${INFISICAL_TOKEN} --rm test-container infisical secrets --env=dev --path=/ --projectId=<your-project-id>")
|
||||
### Generating pipeline block
|
||||
|
||||
// doesn't work
|
||||
// sh("docker-compose up -d")
|
||||
Using the Infisical Plugin in a Jenkins pipeline is very straight forward. To generate a block to use the Infisical Plugin in a Pipeline, simply to go `{JENKINS_URL}/jenkins/job/{JOB_ID}/pipeline-syntax/`.
|
||||
|
||||
You can find a direct link on the Pipeline configuration page in the very bottom of the page, see image below.
|
||||
|
||||

|
||||
|
||||
On the Snippet Generator page, simply configure the Infisical Plugin like it's documented in the [Configuration documentation](#configuration) step.
|
||||
|
||||
Once you have filled out the configuration, press `Generate Pipeline Script`, and it will generate a block you can use in your pipeline.
|
||||
|
||||

|
||||
|
||||
### Using Infisical in a Pipeline
|
||||
|
||||
Using the generated block in a pipeline is very straight forward. There's a few approaches on how to implement the block in a Pipeline script.
|
||||
Here's an example of using the generated block in a pipeline script. Make sure to replace the placeholder values with your own values.
|
||||
|
||||
The script is formatted for clarity. All these fields will be pre-filled for you if you use the `Snippet Generator` like described in the [step above](#generating-pipeline-block).
|
||||
```groovy
|
||||
node {
|
||||
withInfisical(
|
||||
configuration: [
|
||||
infisicalCredentialId: 'YOUR_CREDENTIAL_ID',
|
||||
infisicalEnvironmentSlug: 'PROJECT_ENV_SLUG',
|
||||
infisicalProjectSlug: 'PROJECT_SLUG',
|
||||
infisicalUrl: 'https://app.infisical.com' // Change this to your Infisical instance URL if you aren't using Infisical Cloud.
|
||||
],
|
||||
infisicalSecrets: [
|
||||
infisicalSecret(
|
||||
includeImports: true,
|
||||
path: '/',
|
||||
secretValues: [
|
||||
[infisicalKey: 'DATABASE_URL'],
|
||||
[infisicalKey: "API_URL"],
|
||||
[infisicalKey: 'THIS_KEY_MIGHT_NOT_EXIST', isRequired: false],
|
||||
]
|
||||
)
|
||||
]
|
||||
) {
|
||||
// Code runs here
|
||||
sh "printenv"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
// works
|
||||
// sh("INFISICAL_TOKEN=${INFISICAL_TOKEN} docker-compose up -d")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</Tab>
|
||||
|
||||
<Tab title="Service Token (Deprecated)">
|
||||
<Tab title="Using CLI with Service Tokens (Deprecated)">
|
||||
## Add Infisical Service Token to Jenkins
|
||||
|
||||
<Warning>
|
||||
<Warning>
|
||||
Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities).
|
||||
They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys).
|
||||
|
||||
Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities).
|
||||
|
||||
They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys).
|
||||
|
||||
</Warning>
|
||||
**Please use our Jenkins Plugin instead!**
|
||||
</Warning>
|
||||
|
||||
After setting up your project in Infisical and installing the Infisical CLI to the environment where your Jenkins builds will run, you will need to add the Infisical Service Token to Jenkins.
|
||||
|
||||
|
@ -29,7 +29,9 @@ Prerequisites:
|
||||
"secretsmanager:GetSecretValue",
|
||||
"secretsmanager:CreateSecret",
|
||||
"secretsmanager:UpdateSecret",
|
||||
"secretsmanager:DescribeSecret", // if you need to add tags to secrets
|
||||
"secretsmanager:TagResource", // if you need to add tags to secrets
|
||||
"secretsmanager:UntagResource", // if you need to add tags to secrets
|
||||
"kms:ListKeys", // if you need to specify the KMS key
|
||||
"kms:ListAliases", // if you need to specify the KMS key
|
||||
"kms:Encrypt", // if you need to specify the KMS key
|
||||
|
@ -32,7 +32,10 @@
|
||||
"thumbsRating": true
|
||||
},
|
||||
"api": {
|
||||
"baseUrl": ["https://app.infisical.com", "http://localhost:8080"]
|
||||
"baseUrl": [
|
||||
"https://app.infisical.com",
|
||||
"http://localhost:8080"
|
||||
]
|
||||
},
|
||||
"topbarLinks": [
|
||||
{
|
||||
@ -73,7 +76,9 @@
|
||||
"documentation/getting-started/introduction",
|
||||
{
|
||||
"group": "Quickstart",
|
||||
"pages": ["documentation/guides/local-development"]
|
||||
"pages": [
|
||||
"documentation/guides/local-development"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Guides",
|
||||
@ -153,6 +158,7 @@
|
||||
"documentation/platform/auth-methods/email-password",
|
||||
"documentation/platform/token",
|
||||
"documentation/platform/identities/universal-auth",
|
||||
"documentation/platform/identities/kubernetes-auth",
|
||||
"documentation/platform/identities/gcp-auth",
|
||||
"documentation/platform/identities/aws-auth",
|
||||
"documentation/platform/mfa",
|
||||
@ -213,7 +219,9 @@
|
||||
},
|
||||
{
|
||||
"group": "Reference architectures",
|
||||
"pages": ["self-hosting/reference-architectures/aws-ecs"]
|
||||
"pages": [
|
||||
"self-hosting/reference-architectures/aws-ecs"
|
||||
]
|
||||
},
|
||||
"self-hosting/ee",
|
||||
"self-hosting/faq"
|
||||
@ -369,11 +377,15 @@
|
||||
},
|
||||
{
|
||||
"group": "Build Tool Integrations",
|
||||
"pages": ["integrations/build-tools/gradle"]
|
||||
"pages": [
|
||||
"integrations/build-tools/gradle"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "",
|
||||
"pages": ["sdks/overview"]
|
||||
"pages": [
|
||||
"sdks/overview"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "SDK's",
|
||||
@ -391,7 +403,9 @@
|
||||
"api-reference/overview/authentication",
|
||||
{
|
||||
"group": "Examples",
|
||||
"pages": ["api-reference/overview/examples/integration"]
|
||||
"pages": [
|
||||
"api-reference/overview/examples/integration"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -416,7 +430,8 @@
|
||||
"api-reference/endpoints/universal-auth/create-client-secret",
|
||||
"api-reference/endpoints/universal-auth/list-client-secrets",
|
||||
"api-reference/endpoints/universal-auth/revoke-client-secret",
|
||||
"api-reference/endpoints/universal-auth/renew-access-token"
|
||||
"api-reference/endpoints/universal-auth/renew-access-token",
|
||||
"api-reference/endpoints/universal-auth/revoke-access-token"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -436,17 +451,30 @@
|
||||
"api-reference/endpoints/workspaces/delete-workspace",
|
||||
"api-reference/endpoints/workspaces/get-workspace",
|
||||
"api-reference/endpoints/workspaces/update-workspace",
|
||||
"api-reference/endpoints/workspaces/invite-member-to-workspace",
|
||||
"api-reference/endpoints/workspaces/remove-member-from-workspace",
|
||||
"api-reference/endpoints/workspaces/memberships",
|
||||
"api-reference/endpoints/workspaces/update-membership",
|
||||
"api-reference/endpoints/workspaces/list-identity-memberships",
|
||||
"api-reference/endpoints/workspaces/update-identity-membership",
|
||||
"api-reference/endpoints/workspaces/delete-identity-membership",
|
||||
"api-reference/endpoints/workspaces/secret-snapshots",
|
||||
"api-reference/endpoints/workspaces/rollback-snapshot"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Project Users",
|
||||
"pages": [
|
||||
"api-reference/endpoints/project-users/invite-member-to-workspace",
|
||||
"api-reference/endpoints/project-users/remove-member-from-workspace",
|
||||
"api-reference/endpoints/project-users/memberships",
|
||||
"api-reference/endpoints/project-users/get-by-username",
|
||||
"api-reference/endpoints/project-users/update-membership"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Project Identities",
|
||||
"pages": [
|
||||
"api-reference/endpoints/project-identities/add-identity-membership",
|
||||
"api-reference/endpoints/project-identities/list-identity-memberships",
|
||||
"api-reference/endpoints/project-identities/get-by-id",
|
||||
"api-reference/endpoints/project-identities/update-identity-membership",
|
||||
"api-reference/endpoints/project-identities/delete-identity-membership"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Environments",
|
||||
"pages": [
|
||||
@ -523,11 +551,15 @@
|
||||
},
|
||||
{
|
||||
"group": "Service Tokens",
|
||||
"pages": ["api-reference/endpoints/service-tokens/get"]
|
||||
"pages": [
|
||||
"api-reference/endpoints/service-tokens/get"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Audit Logs",
|
||||
"pages": ["api-reference/endpoints/audit-logs/export-audit-log"]
|
||||
"pages": [
|
||||
"api-reference/endpoints/audit-logs/export-audit-log"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -543,7 +575,9 @@
|
||||
},
|
||||
{
|
||||
"group": "",
|
||||
"pages": ["changelog/overview"]
|
||||
"pages": [
|
||||
"changelog/overview"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Contributing",
|
||||
@ -567,7 +601,9 @@
|
||||
},
|
||||
{
|
||||
"group": "Contributing to SDK",
|
||||
"pages": ["contributing/sdk/developing"]
|
||||
"pages": [
|
||||
"contributing/sdk/developing"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -21,21 +21,28 @@ namespace Example
|
||||
static void Main(string[] args)
|
||||
{
|
||||
|
||||
var settings = new ClientSettings
|
||||
ClientSettings settings = new ClientSettings
|
||||
{
|
||||
Auth = new AuthenticationOptions
|
||||
{
|
||||
ClientId = "CLIENT_ID",
|
||||
ClientSecret = "CLIENT_SECRET",
|
||||
// SiteUrl = "http://localhost:8080", <-- This line can be omitted if you're using Infisical Cloud.
|
||||
};
|
||||
var infisical = new InfisicalClient(settings);
|
||||
UniversalAuth = new UniversalAuthMethod
|
||||
{
|
||||
ClientId = "your-client-id",
|
||||
ClientSecret = "your-client-secret"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var options = new GetSecretOptions
|
||||
|
||||
var infisicalClient = new InfisicalClient(settings);
|
||||
|
||||
var getSecretOptions = new GetSecretOptions
|
||||
{
|
||||
SecretName = "TEST",
|
||||
ProjectId = "PROJECT_ID",
|
||||
Environment = "dev",
|
||||
};
|
||||
var secret = infisical.GetSecret(options);
|
||||
var secret = infisical.GetSecret(getSecretOptions);
|
||||
|
||||
|
||||
Console.WriteLine($"The value of secret '{secret.SecretKey}', is: {secret.SecretValue}");
|
||||
@ -52,8 +59,6 @@ This example demonstrates how to use the Infisical C# SDK in a C# application. T
|
||||
|
||||
# Installation
|
||||
|
||||
Run `npm` to add `@infisical/sdk` to your project.
|
||||
|
||||
```console
|
||||
$ dotnet add package Infisical.Sdk
|
||||
```
|
||||
@ -70,14 +75,20 @@ namespace Example
|
||||
{
|
||||
static void Main(string[] args)
|
||||
{
|
||||
|
||||
var settings = new ClientSettings
|
||||
ClientSettings settings = new ClientSettings
|
||||
{
|
||||
Auth = new AuthenticationOptions
|
||||
{
|
||||
ClientId = "CLIENT_ID",
|
||||
ClientSecret = "CLIENT_SECRET",
|
||||
};
|
||||
UniversalAuth = new UniversalAuthMethod
|
||||
{
|
||||
ClientId = "your-client-id",
|
||||
ClientSecret = "your-client-secret"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var infisical = new InfisicalClient(settings); // <-- Your SDK instance!
|
||||
|
||||
var infisicalClient = new InfisicalClient(settings); // <-- Your SDK client is now ready to use
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -87,14 +98,14 @@ namespace Example
|
||||
|
||||
<ParamField query="options" type="object">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="ClientId" type="string" optional>
|
||||
<ParamField query="ClientId" deprecated type="string" optional>
|
||||
Your machine identity client ID.
|
||||
</ParamField>
|
||||
<ParamField query="ClientSecret" type="string" optional>
|
||||
<ParamField query="ClientSecret" deprecated type="string" optional>
|
||||
Your machine identity client secret.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="AccessToken" type="string" optional>
|
||||
<ParamField query="AccessToken" deprecated type="string" optional>
|
||||
An access token obtained from the machine identity login endpoint.
|
||||
</ParamField>
|
||||
|
||||
@ -103,13 +114,120 @@ namespace Example
|
||||
If manually set to 0, caching will be disabled, this is not recommended.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="SiteUrl()" type="string" default="https://app.infisical.com" optional>
|
||||
<ParamField query="SiteUrl" type="string" default="https://app.infisical.com" optional>
|
||||
Your self-hosted absolute site URL including the protocol (e.g. `https://app.infisical.com`)
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="Auth" type="AuthenticationOptions">
|
||||
The authentication object to use for the client. This is required unless you're using environment variables.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|
||||
### Authentication
|
||||
|
||||
The SDK supports a variety of authentication methods. The most common authentication method is Universal Auth, which uses a client ID and client secret to authenticate.
|
||||
|
||||
#### Universal Auth
|
||||
|
||||
**Using environment variables**
|
||||
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` - Your machine identity client ID.
|
||||
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` - Your machine identity client secret.
|
||||
|
||||
**Using the SDK directly**
|
||||
```csharp
|
||||
ClientSettings settings = new ClientSettings
|
||||
{
|
||||
Auth = new AuthenticationOptions
|
||||
{
|
||||
UniversalAuth = new UniversalAuthMethod
|
||||
{
|
||||
ClientId = "your-client-id",
|
||||
ClientSecret = "your-client-secret"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var infisicalClient = new InfisicalClient(settings);
|
||||
```
|
||||
|
||||
#### GCP ID Token Auth
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Google Cloud Platform.
|
||||
Please [read more](/documentation/platform/identities/gcp-auth) about this authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
- `INFISICAL_GCP_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
```csharp
|
||||
ClientSettings settings = new ClientSettings
|
||||
{
|
||||
Auth = new AuthenticationOptions
|
||||
{
|
||||
GcpIdToken = new GcpIdTokenAuthMethod
|
||||
{
|
||||
IdentityId = "your-machine-identity-id",
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var infisicalClient = new InfisicalClient(settings);
|
||||
```
|
||||
|
||||
#### GCP IAM Auth
|
||||
|
||||
**Using environment variables**
|
||||
- `INFISICAL_GCP_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
- `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` - The path to your GCP service account key file.
|
||||
|
||||
**Using the SDK directly**
|
||||
```csharp
|
||||
ClientSettings settings = new ClientSettings
|
||||
{
|
||||
Auth = new AuthenticationOptions
|
||||
{
|
||||
GcpIam = new GcpIamAuthMethod
|
||||
{
|
||||
IdentityId = "your-machine-identity-id",
|
||||
ServiceAccountKeyFilePath = "./path/to/your/service-account-key.json"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var infisicalClient = new InfisicalClient(settings);
|
||||
```
|
||||
|
||||
#### AWS IAM Auth
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on AWS.
|
||||
Please [read more](/documentation/platform/identities/aws-auth) about this authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
- `INFISICAL_AWS_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
```csharp
|
||||
ClientSettings settings = new ClientSettings
|
||||
{
|
||||
Auth = new AuthenticationOptions
|
||||
{
|
||||
AwsIam = new AwsIamAuthMethod
|
||||
{
|
||||
IdentityId = "your-machine-identity-id",
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var infisicalClient = new InfisicalClient(settings);
|
||||
```
|
||||
|
||||
### Caching
|
||||
|
||||
To reduce the number of API requests, the SDK temporarily stores secrets it retrieves. By default, a secret remains cached for 5 minutes after it's first fetched. Each time it's fetched again, this 5-minute timer resets. You can adjust this caching duration by setting the "cacheTTL" option when creating the client.
|
||||
@ -155,6 +273,14 @@ Retrieve all secrets within the Infisical project and environment that client is
|
||||
<ParamField query="IncludeImports" type="boolean" default="false" optional>
|
||||
Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference)
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="Recursive" type="boolean" default="false" optional>
|
||||
Whether or not to fetch secrets recursively from the specified path. Please note that there's a 20-depth limit for recursive fetching.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="ExpandSecretReferences" type="boolean" default="true" optional>
|
||||
Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference)
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|
@ -19,12 +19,19 @@ import com.infisical.sdk.schema.*;
|
||||
|
||||
public class Example {
|
||||
public static void main(String[] args) {
|
||||
// Create a new Infisical Client
|
||||
|
||||
// Create the authentication settings for the client
|
||||
ClientSettings settings = new ClientSettings();
|
||||
settings.setClientID("MACHINE_IDENTITY_CLIENT_ID");
|
||||
settings.setClientSecret("MACHINE_IDENTITY_CLIENT_SECRET");
|
||||
settings.setCacheTTL(Long.valueOf(300)); // 300 seconds, 5 minutes
|
||||
AuthenticationOptions authOptions = new AuthenticationOptions();
|
||||
UniversalAuthMethod authMethod = new UniversalAuthMethod();
|
||||
|
||||
authMethod.setClientID("YOUR_IDENTITY_ID");
|
||||
authMethod.setClientSecret("YOUR_CLIENT_SECRET");
|
||||
|
||||
authOptions.setUniversalAuth(authMethod);
|
||||
settings.setAuth(authOptions);
|
||||
|
||||
// Create a new Infisical Client
|
||||
InfisicalClient client = new InfisicalClient(settings);
|
||||
|
||||
// Create the options for fetching the secret
|
||||
@ -68,11 +75,18 @@ import com.infisical.sdk.schema.*;
|
||||
|
||||
public class App {
|
||||
public static void main(String[] args) {
|
||||
|
||||
// Create the authentication settings for the client
|
||||
ClientSettings settings = new ClientSettings();
|
||||
settings.setClientID("MACHINE_IDENTITY_CLIENT_ID");
|
||||
settings.setClientSecret("MACHINE_IDENTITY_CLIENT_SECRET");
|
||||
AuthenticationOptions authOptions = new AuthenticationOptions();
|
||||
UniversalAuthMethod authMethod = new UniversalAuthMethod();
|
||||
|
||||
authMethod.setClientID("YOUR_IDENTITY_ID");
|
||||
authMethod.setClientSecret("YOUR_CLIENT_SECRET");
|
||||
|
||||
authOptions.setUniversalAuth(authMethod);
|
||||
settings.setAuth(authOptions);
|
||||
|
||||
// Create a new Infisical Client
|
||||
InfisicalClient client = new InfisicalClient(settings); // Your client!
|
||||
}
|
||||
}
|
||||
@ -82,15 +96,21 @@ public class App {
|
||||
|
||||
<ParamField query="options" type="object">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="setClientID()" type="string" optional>
|
||||
<ParamField query="setClientID()" type="string" deprecated optional>
|
||||
Your machine identity client ID.
|
||||
|
||||
**This field is deprecated and will be removed in future versions.** Please use the `setAuth()` method on the client settings instead.
|
||||
</ParamField>
|
||||
<ParamField query="setClientSecret()" type="string" optional>
|
||||
<ParamField query="setClientSecret()" deprecated type="string" optional>
|
||||
Your machine identity client secret.
|
||||
|
||||
**This field is deprecated and will be removed in future versions.** Please use the `setAuth()` method on the client settings instead.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="setAccessToken()" type="string" optional>
|
||||
<ParamField query="setAccessToken()" deprecatedtype="string" optional>
|
||||
An access token obtained from the machine identity login endpoint.
|
||||
|
||||
**This field is deprecated and will be removed in future versions.** Please use the `setAuth()` method on the client settings instead.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="setCacheTTL()" type="number" default="300" optional>
|
||||
@ -101,10 +121,106 @@ public class App {
|
||||
<ParamField query="setSiteURL()" type="string" default="https://app.infisical.com" optional>
|
||||
Your self-hosted absolute site URL including the protocol (e.g. `https://app.infisical.com`)
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="setAuth()" type="AuthenticationOptions">
|
||||
The authentication object to use for the client. This is required unless you're using environment variables.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|
||||
### Authentication
|
||||
|
||||
The SDK supports a variety of authentication methods. The most common authentication method is Universal Auth, which uses a client ID and client secret to authenticate.
|
||||
|
||||
#### Universal Auth
|
||||
|
||||
**Using environment variables**
|
||||
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` - Your machine identity client ID.
|
||||
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` - Your machine identity client secret.
|
||||
|
||||
**Using the SDK directly**
|
||||
```java
|
||||
ClientSettings settings = new ClientSettings();
|
||||
AuthenticationOptions authOptions = new AuthenticationOptions();
|
||||
UniversalAuthMethod authMethod = new UniversalAuthMethod();
|
||||
|
||||
authMethod.setClientID("YOUR_IDENTITY_ID");
|
||||
authMethod.setClientSecret("YOUR_CLIENT_SECRET");
|
||||
|
||||
authOptions.setUniversalAuth(authMethod);
|
||||
settings.setAuth(authOptions);
|
||||
|
||||
InfisicalClient client = new InfisicalClient(settings);
|
||||
```
|
||||
|
||||
#### GCP ID Token Auth
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Google Cloud Platform.
|
||||
Please [read more](/documentation/platform/identities/gcp-auth) about this authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
- `INFISICAL_GCP_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
```java
|
||||
ClientSettings settings = new ClientSettings();
|
||||
AuthenticationOptions authOptions = new AuthenticationOptions();
|
||||
GCPIDTokenAuthMethod authMethod = new GCPIDTokenAuthMethod();
|
||||
|
||||
authMethod.setIdentityID("YOUR_MACHINE_IDENTITY_ID");
|
||||
|
||||
authOptions.setGcpIDToken(authMethod);
|
||||
settings.setAuth(authOptions);
|
||||
|
||||
InfisicalClient client = new InfisicalClient(settings);
|
||||
```
|
||||
|
||||
#### GCP IAM Auth
|
||||
|
||||
**Using environment variables**
|
||||
- `INFISICAL_GCP_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
- `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` - The path to your GCP service account key file.
|
||||
|
||||
**Using the SDK directly**
|
||||
```java
|
||||
ClientSettings settings = new ClientSettings();
|
||||
AuthenticationOptions authOptions = new AuthenticationOptions();
|
||||
GCPIamAuthMethod authMethod = new GCPIamAuthMethod();
|
||||
|
||||
authMethod.setIdentityID("YOUR_MACHINE_IDENTITY_ID");
|
||||
authMethod.setServiceAccountKeyFilePath("./path/to/your/service-account-key.json");
|
||||
|
||||
authOptions.setGcpIam(authMethod);
|
||||
settings.setAuth(authOptions);
|
||||
|
||||
InfisicalClient client = new InfisicalClient(settings);
|
||||
```
|
||||
|
||||
#### AWS IAM Auth
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on AWS.
|
||||
Please [read more](/documentation/platform/identities/aws-auth) about this authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
- `INFISICAL_AWS_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
```java
|
||||
ClientSettings settings = new ClientSettings();
|
||||
AuthenticationOptions authOptions = new AuthenticationOptions();
|
||||
AWSIamAuthMethod authMethod = new AWSIamAuthMethod();
|
||||
|
||||
authMethod.setIdentityID("YOUR_MACHINE_IDENTITY_ID");
|
||||
|
||||
authOptions.setAwsIam(authMethod);
|
||||
settings.setAuth(authOptions);
|
||||
|
||||
InfisicalClient client = new InfisicalClient(settings);
|
||||
```
|
||||
|
||||
### Caching
|
||||
|
||||
To reduce the number of API requests, the SDK temporarily stores secrets it retrieves. By default, a secret remains cached for 5 minutes after it's first fetched. Each time it's fetched again, this 5-minute timer resets. You can adjust this caching duration by setting the "cacheTTL" option when creating the client.
|
||||
@ -119,6 +235,8 @@ options.setEnvironment("dev");
|
||||
options.setProjectID("PROJECT_ID");
|
||||
options.setPath("/foo/bar");
|
||||
options.setIncludeImports(false);
|
||||
options.setRecursive(false);
|
||||
options.setExpandSecretReferences(true);
|
||||
|
||||
SecretElement[] secrets = client.listSecrets(options);
|
||||
```
|
||||
@ -148,6 +266,14 @@ Retrieve all secrets within the Infisical project and environment that client is
|
||||
<ParamField query="setIncludeImports()" type="boolean" default="false" optional>
|
||||
Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference)
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="setRecursive()" type="boolean" default="false" optional>
|
||||
Whether or not to fetch secrets recursively from the specified path. Please note that there's a 20-depth limit for recursive fetching.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="setExpandSecretReferences()" type="boolean" default="true" optional>
|
||||
Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference)
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|