Compare commits
202 Commits
identity-r
...
daniel/azu
Author | SHA1 | Date | |
---|---|---|---|
7ed8feee6f | |||
9a861499df | |||
d1f3c98f21 | |||
eac621db73 | |||
ab7983973e | |||
ff43773f37 | |||
68574be05b | |||
1d9966af76 | |||
4dddf764bd | |||
8b06215366 | |||
179edd98bf | |||
dc05b34fb1 | |||
20f6dbfbd1 | |||
8ff524a037 | |||
3913e2f462 | |||
ebbccdb857 | |||
28723e9a4e | |||
079e005f49 | |||
df90e4e6f0 | |||
6e9a624697 | |||
94b0cb4697 | |||
5a5226c82f | |||
09cfaec175 | |||
40abc184f2 | |||
3879edfab7 | |||
d20ae39f32 | |||
53c875424e | |||
05bf2e4696 | |||
a06dee66f8 | |||
0eab9233bb | |||
9bf358a57d | |||
93926cc6b7 | |||
59ccabec69 | |||
8b0678cfa1 | |||
3004de459f | |||
7d4e531e5f | |||
f66ef8b066 | |||
a116233979 | |||
454c0b62b9 | |||
2c6decaf6e | |||
d0f0dca3a3 | |||
9efbffe5d2 | |||
c1b242db67 | |||
845f71e8ed | |||
653fc367ac | |||
9f0867559a | |||
a37987b508 | |||
96e485910c | |||
eeb2e89d1a | |||
f3a8fda254 | |||
ccf0c3cd35 | |||
6e15979672 | |||
4e724d15f6 | |||
5eba61b647 | |||
98ef1614c6 | |||
f591f6d428 | |||
795b533fce | |||
35be8e1912 | |||
da70f23bf6 | |||
3ba90cc42d | |||
131ec81744 | |||
c84262b182 | |||
1ee9994df6 | |||
a3356b4bad | |||
f95092e083 | |||
982c51bdc7 | |||
9e7ec88d57 | |||
ce304b26d8 | |||
8deff5adfb | |||
1f8b3b6779 | |||
a87bc66b05 | |||
de57e1af35 | |||
09d8822816 | |||
13aaef4212 | |||
5e9193adda | |||
ec3e886624 | |||
36d30566fe | |||
dfbeac3dfe | |||
87e52ddd06 | |||
a62fbf088f | |||
f186cb4d7b | |||
2ee123c9f6 | |||
18b6c4f73e | |||
50409f0c48 | |||
54e5166bb6 | |||
b9b880d310 | |||
085d1d5a5e | |||
b02c37028b | |||
49248ee13f | |||
bafc6ee129 | |||
eb6dca425c | |||
99c1259f15 | |||
b4770116a8 | |||
eb90f503a9 | |||
e419983249 | |||
b030fe2e69 | |||
eff0604e9d | |||
e90f3af4ce | |||
baf2763287 | |||
d708a3f566 | |||
5b52c33f5f | |||
a116fc2bf3 | |||
39d09eea3d | |||
f7d071e398 | |||
0d4dd5a6fa | |||
b4de012047 | |||
b3720cdbfc | |||
0dc85dff33 | |||
a6e4e3c69a | |||
be9de82ef5 | |||
2566f4dc9e | |||
934bfbb624 | |||
509037e6d0 | |||
f041aa7557 | |||
266e2856e8 | |||
7109d2f785 | |||
2134d2e118 | |||
c2abc383d5 | |||
3a2336da44 | |||
1266949fb1 | |||
62d287f8a6 | |||
0b4e7f0096 | |||
7dda2937ba | |||
91d81bd20c | |||
f329a79771 | |||
31a31f556c | |||
1be2f806d9 | |||
38a6785ca4 | |||
377eb4cfd3 | |||
8df7401e06 | |||
0c79303582 | |||
e6edde57ba | |||
6634675b2a | |||
50840ce26b | |||
4c2f7fff5c | |||
f0a3792a64 | |||
70da6878c1 | |||
754404d905 | |||
85cfac512c | |||
d40b907308 | |||
a5b18cbb72 | |||
d4a2f4590b | |||
7add57ae78 | |||
e5879df7c7 | |||
04298bb1a7 | |||
1a6a5280a0 | |||
da0d8fdbfc | |||
d2759ea378 | |||
c4385af352 | |||
bbe2d2e053 | |||
2c9fdb7fad | |||
38eee5490e | |||
0aa7337ff4 | |||
98371f99e7 | |||
ddfc645cdd | |||
8bc6edd165 | |||
f4d9c61404 | |||
5342c85696 | |||
2497aada8a | |||
b05f3e0f1f | |||
9a2645b511 | |||
cb664bb042 | |||
5921f349a8 | |||
07db1d826b | |||
74db1b75b4 | |||
4927cc804a | |||
2153dd94eb | |||
d7023881e5 | |||
ef3cdd11ac | |||
612cf4f968 | |||
b6a9dc7f53 | |||
08322f46f9 | |||
fc9326272a | |||
b74595cf35 | |||
a45453629c | |||
f7626d03bf | |||
bc14153bb3 | |||
8915b4055b | |||
c90e423e4a | |||
935a3cb036 | |||
148a29db19 | |||
b12de3e4f5 | |||
9e9b9a7b94 | |||
776822d7d5 | |||
fe9af20d8c | |||
398a8f363d | |||
ce5dbca6e2 | |||
ed5a7d72ab | |||
0b4d4c008a | |||
ae953add3d | |||
5a1e43be44 | |||
04f54479cd | |||
59fc34412d | |||
d6881e2e68 | |||
92a663a17d | |||
b3463e0d0f | |||
c460f22665 | |||
db39d03713 | |||
9daa5badec | |||
e1ed37c713 | |||
98a15a901e | |||
3f6b84de3b |
@ -67,3 +67,6 @@ CLIENT_SECRET_GITLAB_LOGIN=
|
||||
CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
99
.github/workflows/build-binaries.yml
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
name: Build Binaries and Deploy
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version number"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./backend
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [x64, arm64]
|
||||
os: [linux, win]
|
||||
include:
|
||||
- os: linux
|
||||
target: node20-linux
|
||||
- os: win
|
||||
target: node20-win
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install pkg
|
||||
run: npm install -g @yao-pkg/pkg
|
||||
|
||||
- name: Install dependencies (backend)
|
||||
run: npm install
|
||||
|
||||
- name: Install dependencies (frontend)
|
||||
run: npm install --prefix ../frontend
|
||||
|
||||
- name: Prerequisites for pkg
|
||||
run: npm run binary:build
|
||||
|
||||
- name: Package into node binary
|
||||
run: |
|
||||
if [ "${{ matrix.os }}" != "linux" ]; then
|
||||
pkg --no-bytecode --public-packages "*" --public --compress Brotli --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
||||
else
|
||||
pkg --no-bytecode --public-packages "*" --public --compress Brotli --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
||||
fi
|
||||
|
||||
# Set up .deb package structure (Debian/Ubuntu only)
|
||||
- name: Set up .deb package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core/DEBIAN
|
||||
mkdir -p infisical-core/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core/usr/local/bin/
|
||||
chmod +x infisical-core/usr/local/bin/infisical-core
|
||||
|
||||
- name: Create control file
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core/DEBIAN/control
|
||||
Package: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Section: base
|
||||
Priority: optional
|
||||
Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }}
|
||||
Maintainer: Infisical <daniel@infisical.com>
|
||||
Description: Infisical Core standalone executable (app.infisical.com)
|
||||
EOF
|
||||
|
||||
# Build .deb file (Debian/Ubunutu only)
|
||||
- name: Build .deb package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
dpkg-deb --build infisical-core
|
||||
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
|
||||
- name: Publish to Cloudsmith (Debian/Ubuntu)
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
|
||||
|
||||
# Publish .exe file to Cloudsmith (Windows only)
|
||||
- name: Publish to Cloudsmith (Windows)
|
||||
if: matrix.os == 'win'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,57 +0,0 @@
|
||||
name: Rename Migrations
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of newly added files in migration folder
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' || true | cut -f2 | xargs -r -n1 basename > added_files.txt
|
||||
if [ ! -s added_files.txt ]; then
|
||||
echo "No new files added. Skipping"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: Script to rename migrations
|
||||
run: python .github/resources/rename_migration_files.py
|
||||
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add ./backend/src/db/migrations
|
||||
rm added_files.txt
|
||||
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
|
||||
|
||||
- name: Get PR details
|
||||
id: pr_details
|
||||
run: |
|
||||
PR_NUMBER=${{ github.event.pull_request.number }}
|
||||
PR_MERGER=$(curl -s "https://api.github.com/repos/${{ github.repository }}/pulls/$PR_NUMBER" | jq -r '.merged_by.login')
|
||||
|
||||
echo "PR Number: $PR_NUMBER"
|
||||
echo "PR Merger: $PR_MERGER"
|
||||
echo "pr_merger=$PR_MERGER" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create Pull Request
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
|
||||
title: 'GH Action: rename new migration file timestamp'
|
||||
branch-suffix: timestamp
|
||||
reviewers: ${{ steps.pr_details.outputs.pr_merger }}
|
1
.gitignore
vendored
@ -69,3 +69,4 @@ frontend-build
|
||||
*.tgz
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
4
backend/babel.config.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"presets": ["@babel/preset-env", "@babel/preset-react"],
|
||||
"plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"]
|
||||
}
|
4865
backend/package-lock.json
generated
@ -3,11 +3,39 @@
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "./dist/main.mjs",
|
||||
"bin": "dist/main.js",
|
||||
"pkg": {
|
||||
"scripts": [
|
||||
"dist/**/*.js",
|
||||
"../frontend/node_modules/next/**/*.js",
|
||||
"../frontend/.next/*/**/*.js",
|
||||
"../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js"
|
||||
],
|
||||
"assets": [
|
||||
"dist/**",
|
||||
"!dist/**/*.js",
|
||||
"node_modules/**",
|
||||
"../frontend/node_modules/**",
|
||||
"../frontend/.next/**",
|
||||
"!../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
|
||||
"../frontend/public/**"
|
||||
],
|
||||
"outputPath": "binary"
|
||||
},
|
||||
"scripts": {
|
||||
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
|
||||
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
|
||||
"binary:babel-backend": " babel ./dist -d ./dist",
|
||||
"binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server",
|
||||
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
|
||||
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
@ -31,6 +59,11 @@
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.18.10",
|
||||
"@babel/core": "^7.18.10",
|
||||
"@babel/plugin-syntax-import-attributes": "^7.24.7",
|
||||
"@babel/preset-env": "^7.18.10",
|
||||
"@babel/preset-react": "^7.24.7",
|
||||
"@types/bcrypt": "^5.0.2",
|
||||
"@types/jmespath": "^0.15.2",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
@ -48,6 +81,8 @@
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
"@yao-pkg/pkg": "^5.12.0",
|
||||
"babel-plugin-transform-import-meta": "^2.2.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-config-airbnb-typescript": "^17.1.0",
|
||||
@ -60,7 +95,7 @@
|
||||
"pino-pretty": "^10.2.3",
|
||||
"prompt-sync": "^4.2.0",
|
||||
"rimraf": "^5.0.5",
|
||||
"ts-node": "^10.9.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsc-alias": "^1.8.8",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"tsup": "^8.0.1",
|
||||
@ -90,7 +125,8 @@
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.10.0",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "^2.2.1",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@team-plain/typescript-sdk": "^4.6.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -112,13 +148,14 @@
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
"jwks-rsa": "^3.1.0",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^5.0.4",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
@ -134,6 +171,7 @@
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
|
27
backend/scripts/rename-mjs.ts
Normal file
@ -0,0 +1,27 @@
|
||||
/* eslint-disable @typescript-eslint/no-shadow */
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
function replaceMjsOccurrences(directory: string) {
|
||||
fs.readdir(directory, (err, files) => {
|
||||
if (err) throw err;
|
||||
files.forEach((file) => {
|
||||
const filePath = path.join(directory, file);
|
||||
if (fs.statSync(filePath).isDirectory()) {
|
||||
replaceMjsOccurrences(filePath);
|
||||
} else {
|
||||
fs.readFile(filePath, "utf8", (err, data) => {
|
||||
if (err) throw err;
|
||||
const result = data.replace(/\.mjs/g, ".js");
|
||||
fs.writeFile(filePath, result, "utf8", (err) => {
|
||||
if (err) throw err;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Updated: ${filePath}`);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
replaceMjsOccurrences("dist");
|
4
backend/src/@types/fastify.d.ts
vendored
@ -41,6 +41,7 @@ import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/
|
||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||
@ -66,6 +67,7 @@ import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
|
||||
declare module "fastify" {
|
||||
@ -134,6 +136,7 @@ declare module "fastify" {
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||
@ -159,6 +162,7 @@ declare module "fastify" {
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
8
backend/src/@types/knex.d.ts
vendored
@ -92,6 +92,9 @@ import {
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate,
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
@ -483,6 +486,11 @@ declare module "knex/types/tables" {
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
tb.specificType("enabledLoginMethods", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("enabledLoginMethods");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { WebhookType } from "@app/services/webhook/webhook-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
|
||||
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
|
||||
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
|
||||
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Webhook)) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (tb) => {
|
||||
if (!hasUrlCipherText) {
|
||||
tb.text("urlCipherText");
|
||||
}
|
||||
if (!hasUrlIV) {
|
||||
tb.string("urlIV");
|
||||
}
|
||||
if (!hasUrlTag) {
|
||||
tb.string("urlTag");
|
||||
}
|
||||
if (!hasType) {
|
||||
tb.string("type").defaultTo(WebhookType.GENERAL);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
|
||||
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
|
||||
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
|
||||
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Webhook)) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasUrlCipherText) {
|
||||
t.dropColumn("urlCipherText");
|
||||
}
|
||||
if (hasUrlIV) {
|
||||
t.dropColumn("urlIV");
|
||||
}
|
||||
if (hasUrlTag) {
|
||||
t.dropColumn("urlTag");
|
||||
}
|
||||
if (hasType) {
|
||||
t.dropColumn("type");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,188 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// migrate secret approval policy approvers to user id
|
||||
const hasApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
|
||||
const hasApproverId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverId");
|
||||
if (!hasApproverUserId) {
|
||||
// add the new fields
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
// if (hasApproverId) tb.setNullable("approverId");
|
||||
tb.uuid("approverUserId");
|
||||
tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
// convert project membership id => user id
|
||||
await knex(TableName.SecretApprovalPolicyApprover).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverId`]))
|
||||
});
|
||||
// drop the old field
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
if (hasApproverId) tb.dropColumn("approverId");
|
||||
tb.uuid("approverUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// migrate secret approval request committer and statusChangeBy to user id
|
||||
const hasSecretApprovalRequestTable = await knex.schema.hasTable(TableName.SecretApprovalRequest);
|
||||
const hasCommitterUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId");
|
||||
const hasCommitterId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerId");
|
||||
const hasStatusChangeBy = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangeBy");
|
||||
const hasStatusChangedByUserId = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"statusChangedByUserId"
|
||||
);
|
||||
if (hasSecretApprovalRequestTable) {
|
||||
// new fields
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
// if (hasCommitterId) tb.setNullable("committerId");
|
||||
if (!hasCommitterUserId) {
|
||||
tb.uuid("committerUserId");
|
||||
tb.foreign("committerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
}
|
||||
if (!hasStatusChangedByUserId) {
|
||||
tb.uuid("statusChangedByUserId");
|
||||
tb.foreign("statusChangedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
|
||||
// copy the assigned project membership => user id to new fields
|
||||
await knex(TableName.SecretApprovalRequest).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
committerUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerId`])),
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
statusChangedByUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangeBy`]))
|
||||
});
|
||||
// drop old fields
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
if (hasStatusChangeBy) tb.dropColumn("statusChangeBy");
|
||||
if (hasCommitterId) tb.dropColumn("committerId");
|
||||
tb.uuid("committerUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// migrate secret approval request reviewer to user id
|
||||
const hasMemberId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "member");
|
||||
const hasReviewerUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "reviewerUserId");
|
||||
if (!hasReviewerUserId) {
|
||||
// new fields
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
// if (hasMemberId) tb.setNullable("member");
|
||||
tb.uuid("reviewerUserId");
|
||||
tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
});
|
||||
// copy project membership => user id to new fields
|
||||
await knex(TableName.SecretApprovalRequestReviewer).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
reviewerUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.member`]))
|
||||
});
|
||||
// drop table
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
if (hasMemberId) tb.dropColumn("member");
|
||||
tb.uuid("reviewerUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
|
||||
const hasApproverId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverId");
|
||||
if (hasApproverUserId) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
if (!hasApproverId) {
|
||||
tb.uuid("approverId");
|
||||
tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasApproverId) {
|
||||
await knex(TableName.SecretApprovalPolicyApprover).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
tb.dropColumn("approverUserId");
|
||||
tb.uuid("approverId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const hasSecretApprovalRequestTable = await knex.schema.hasTable(TableName.SecretApprovalRequest);
|
||||
const hasCommitterUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId");
|
||||
const hasCommitterId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerId");
|
||||
const hasStatusChangeBy = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangeBy");
|
||||
const hasStatusChangedByUser = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangedByUserId");
|
||||
if (hasSecretApprovalRequestTable) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
// if (hasCommitterId) tb.uuid("committerId").notNullable().alter();
|
||||
if (!hasCommitterId) {
|
||||
tb.uuid("committerId");
|
||||
tb.foreign("committerId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
}
|
||||
if (!hasStatusChangeBy) {
|
||||
tb.uuid("statusChangeBy");
|
||||
tb.foreign("statusChangeBy").references("id").inTable(TableName.ProjectMembership).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
|
||||
await knex(TableName.SecretApprovalRequest).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
committerId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
statusChangeBy: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
if (hasCommitterUserId) tb.dropColumn("committerUserId");
|
||||
if (hasStatusChangedByUser) tb.dropColumn("statusChangedByUserId");
|
||||
if (hasCommitterId) tb.uuid("committerId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
const hasMemberId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "member");
|
||||
const hasReviewerUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "reviewerUserId");
|
||||
if (hasReviewerUserId) {
|
||||
if (!hasMemberId) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
// if (hasMemberId) tb.uuid("member").notNullable().alter();
|
||||
tb.uuid("member");
|
||||
tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
await knex(TableName.SecretApprovalRequestReviewer).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member").notNullable().alter();
|
||||
tb.dropColumn("reviewerUserId");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityOidcAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("oidcDiscoveryUrl").notNullable();
|
||||
t.text("encryptedCaCert").notNullable();
|
||||
t.string("caCertIV").notNullable();
|
||||
t.string("caCertTag").notNullable();
|
||||
t.string("boundIssuer").notNullable();
|
||||
t.string("boundAudiences").notNullable();
|
||||
t.jsonb("boundClaims").notNullable();
|
||||
t.string("boundSubject");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityOidcAuth);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityOidcAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityOidcAuth);
|
||||
}
|
31
backend/src/db/schemas/identity-oidc-auths.ts
Normal file
@ -0,0 +1,31 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityOidcAuthsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
accessTokenTTL: z.coerce.number().default(7200),
|
||||
accessTokenMaxTTL: z.coerce.number().default(7200),
|
||||
accessTokenNumUsesLimit: z.coerce.number().default(0),
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
identityId: z.string().uuid(),
|
||||
oidcDiscoveryUrl: z.string(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
boundIssuer: z.string(),
|
||||
boundAudiences: z.string(),
|
||||
boundClaims: z.unknown(),
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
export type TIdentityOidcAuthsInsert = Omit<z.input<typeof IdentityOidcAuthsSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityOidcAuthsUpdate = Partial<Omit<z.input<typeof IdentityOidcAuthsSchema>, TImmutableDBKeys>>;
|
@ -28,6 +28,7 @@ export * from "./identity-aws-auths";
|
||||
export * from "./identity-azure-auths";
|
||||
export * from "./identity-gcp-auths";
|
||||
export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-oidc-auths";
|
||||
export * from "./identity-org-memberships";
|
||||
export * from "./identity-project-additional-privilege";
|
||||
export * from "./identity-project-membership-role";
|
||||
|
@ -60,6 +60,7 @@ export enum TableName {
|
||||
IdentityAzureAuth = "identity_azure_auths",
|
||||
IdentityUaClientSecret = "identity_ua_client_secrets",
|
||||
IdentityAwsAuth = "identity_aws_auths",
|
||||
IdentityOidcAuth = "identity_oidc_auths",
|
||||
IdentityOrgMembership = "identity_org_memberships",
|
||||
IdentityProjectMembership = "identity_project_memberships",
|
||||
IdentityProjectMembershipRole = "identity_project_membership_role",
|
||||
@ -167,5 +168,6 @@ export enum IdentityAuthMethod {
|
||||
KUBERNETES_AUTH = "kubernetes-auth",
|
||||
GCP_AUTH = "gcp-auth",
|
||||
AWS_AUTH = "aws-auth",
|
||||
AZURE_AUTH = "azure-auth"
|
||||
AZURE_AUTH = "azure-auth",
|
||||
OIDC_AUTH = "oidc-auth"
|
||||
}
|
||||
|
@ -9,10 +9,10 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretApprovalPoliciesApproversSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
approverId: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
approverUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;
|
||||
|
@ -9,11 +9,11 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretApprovalRequestsReviewersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
member: z.string().uuid(),
|
||||
status: z.string(),
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||
|
@ -15,11 +15,11 @@ export const SecretApprovalRequestsSchema = z.object({
|
||||
conflicts: z.unknown().nullable().optional(),
|
||||
slug: z.string(),
|
||||
folderId: z.string().uuid(),
|
||||
statusChangeBy: z.string().uuid().nullable().optional(),
|
||||
committerId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isReplicated: z.boolean().nullable().optional()
|
||||
isReplicated: z.boolean().nullable().optional(),
|
||||
committerUserId: z.string().uuid(),
|
||||
statusChangedByUserId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;
|
||||
|
@ -18,7 +18,8 @@ export const SuperAdminSchema = z.object({
|
||||
trustSamlEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustOidcEmails: z.boolean().default(false).nullable().optional(),
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional()
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -21,7 +21,11 @@ export const WebhooksSchema = z.object({
|
||||
keyEncoding: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid()
|
||||
envId: z.string().uuid(),
|
||||
urlCipherText: z.string().nullable().optional(),
|
||||
urlIV: z.string().nullable().optional(),
|
||||
urlTag: z.string().nullable().optional(),
|
||||
type: z.string().default("general").nullable().optional()
|
||||
});
|
||||
|
||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||
|
@ -25,10 +25,10 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val)),
|
||||
approvers: z.string().array().min(1),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
@ -66,7 +66,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
body: z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
approvers: z.string().array().min(1),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
secretPath: z
|
||||
.string()
|
||||
@ -74,7 +74,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
@ -139,7 +139,15 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approvals: sapPubSchema.merge(z.object({ approvers: z.string().array() })).array()
|
||||
approvals: sapPubSchema
|
||||
.extend({
|
||||
userApprovers: z
|
||||
.object({
|
||||
userId: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -170,7 +178,11 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
policy: sapPubSchema.merge(z.object({ approvers: z.string().array() })).optional()
|
||||
policy: sapPubSchema
|
||||
.extend({
|
||||
userApprovers: z.object({ userId: z.string() }).array()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -6,7 +6,8 @@ import {
|
||||
SecretApprovalRequestsSecretsSchema,
|
||||
SecretsSchema,
|
||||
SecretTagsSchema,
|
||||
SecretVersionsSchema
|
||||
SecretVersionsSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
@ -14,6 +15,15 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const approvalRequestUser = z.object({ userId: z.string() }).merge(
|
||||
UsersSchema.pick({
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
username: true
|
||||
})
|
||||
);
|
||||
|
||||
export const registerSecretApprovalRequestRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
@ -41,9 +51,10 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvers: z.string().array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
}),
|
||||
committerUser: approvalRequestUser,
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
environment: z.string(),
|
||||
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
|
||||
reviewers: z.object({ userId: z.string(), status: z.string() }).array(),
|
||||
approvers: z.string().array()
|
||||
}).array()
|
||||
})
|
||||
@ -195,7 +206,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
type: isClosing ? EventType.SECRET_APPROVAL_CLOSED : EventType.SECRET_APPROVAL_REOPENED,
|
||||
// eslint-disable-next-line
|
||||
metadata: {
|
||||
[isClosing ? ("closedBy" as const) : ("reopenedBy" as const)]: approval.statusChangeBy as string,
|
||||
[isClosing ? ("closedBy" as const) : ("reopenedBy" as const)]: approval.statusChangedByUserId as string,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
// eslint-disable-next-line
|
||||
@ -216,6 +227,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
})
|
||||
.array()
|
||||
.optional();
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
@ -235,12 +247,13 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: z.string().array(),
|
||||
approvers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
}),
|
||||
environment: z.string(),
|
||||
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
|
||||
approvers: z.string().array(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser,
|
||||
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
|
||||
secretPath: z.string(),
|
||||
commits: SecretApprovalRequestsSecretsSchema.omit({ secretBlindIndex: true })
|
||||
.merge(
|
||||
|
@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@ -55,13 +56,34 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
|
||||
// delete all audit log that have expired
|
||||
const pruneAuditLog = async (tx?: Knex) => {
|
||||
try {
|
||||
const today = new Date();
|
||||
const docs = await (tx || db)(TableName.AuditLog).where("expiresAt", "<", today).del();
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "PruneAuditLog" });
|
||||
}
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
.where("expiresAt", "<", today)
|
||||
.select("id")
|
||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
|
||||
.whereIn("id", findExpiredLogSubQuery)
|
||||
.del()
|
||||
.returning("id");
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 100); // time to breathe for db
|
||||
});
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
}
|
||||
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@ -45,6 +45,7 @@ export enum EventType {
|
||||
CREATE_SECRETS = "create-secrets",
|
||||
UPDATE_SECRET = "update-secret",
|
||||
UPDATE_SECRETS = "update-secrets",
|
||||
MOVE_SECRETS = "move-secrets",
|
||||
DELETE_SECRET = "delete-secret",
|
||||
DELETE_SECRETS = "delete-secrets",
|
||||
GET_WORKSPACE_KEY = "get-workspace-key",
|
||||
@ -78,6 +79,11 @@ export enum EventType {
|
||||
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
|
||||
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
|
||||
REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth",
|
||||
LOGIN_IDENTITY_OIDC_AUTH = "login-identity-oidc-auth",
|
||||
ADD_IDENTITY_OIDC_AUTH = "add-identity-oidc-auth",
|
||||
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
|
||||
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
|
||||
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
|
||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||
@ -235,6 +241,17 @@ interface UpdateSecretBatchEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface MoveSecretsEvent {
|
||||
type: EventType.MOVE_SECRETS;
|
||||
metadata: {
|
||||
sourceEnvironment: string;
|
||||
sourceSecretPath: string;
|
||||
destinationEnvironment: string;
|
||||
destinationSecretPath: string;
|
||||
secretIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSecretEvent {
|
||||
type: EventType.DELETE_SECRET;
|
||||
metadata: {
|
||||
@ -749,6 +766,63 @@ interface GetIdentityAzureAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginIdentityOidcAuthEvent {
|
||||
type: EventType.LOGIN_IDENTITY_OIDC_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
identityOidcAuthId: string;
|
||||
identityAccessTokenId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AddIdentityOidcAuthEvent {
|
||||
type: EventType.ADD_IDENTITY_OIDC_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
oidcDiscoveryUrl: string;
|
||||
caCert: string;
|
||||
boundIssuer: string;
|
||||
boundAudiences: string;
|
||||
boundClaims: Record<string, string>;
|
||||
boundSubject: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityOidcAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_OIDC_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityOidcAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_OIDC_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
oidcDiscoveryUrl?: string;
|
||||
caCert?: string;
|
||||
boundIssuer?: string;
|
||||
boundAudiences?: string;
|
||||
boundClaims?: Record<string, string>;
|
||||
boundSubject?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetIdentityOidcAuthEvent {
|
||||
type: EventType.GET_IDENTITY_OIDC_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateEnvironmentEvent {
|
||||
type: EventType.CREATE_ENVIRONMENT;
|
||||
metadata: {
|
||||
@ -838,7 +912,6 @@ interface CreateWebhookEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
@ -849,7 +922,6 @@ interface UpdateWebhookStatusEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
@ -860,7 +932,6 @@ interface DeleteWebhookEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
@ -1100,6 +1171,7 @@ export type Event =
|
||||
| CreateSecretBatchEvent
|
||||
| UpdateSecretEvent
|
||||
| UpdateSecretBatchEvent
|
||||
| MoveSecretsEvent
|
||||
| DeleteSecretEvent
|
||||
| DeleteSecretBatchEvent
|
||||
| GetWorkspaceKeyEvent
|
||||
@ -1152,6 +1224,11 @@ export type Event =
|
||||
| DeleteIdentityAzureAuthEvent
|
||||
| UpdateIdentityAzureAuthEvent
|
||||
| GetIdentityAzureAuthEvent
|
||||
| LoginIdentityOidcAuthEvent
|
||||
| AddIdentityOidcAuthEvent
|
||||
| DeleteIdentityOidcAuthEvent
|
||||
| UpdateIdentityOidcAuthEvent
|
||||
| GetIdentityOidcAuthEvent
|
||||
| CreateEnvironmentEvent
|
||||
| UpdateEnvironmentEvent
|
||||
| DeleteEnvironmentEvent
|
||||
|
@ -3,7 +3,8 @@ import { z } from "zod";
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb"
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql"
|
||||
}
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
|
@ -34,6 +34,7 @@ import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@ -417,6 +418,13 @@ export const ldapConfigServiceFactory = ({
|
||||
}: TLdapLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.LDAP)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with LDAP is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
@ -473,7 +481,7 @@ export const ldapConfigServiceFactory = ({
|
||||
|
||||
userAlias = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
if (serverCfg.trustLdapEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
|
@ -5,6 +5,7 @@
|
||||
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@ -200,13 +201,13 @@ export const licenseServiceFactory = ({
|
||||
await licenseServerCloudApi.request.delete(`/api/license-server/v1/customers/${customerId}`);
|
||||
};
|
||||
|
||||
const updateSubscriptionOrgMemberCount = async (orgId: string) => {
|
||||
const updateSubscriptionOrgMemberCount = async (orgId: string, tx?: Knex) => {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
const quantity = await licenseDAL.countOfOrgMembers(orgId);
|
||||
const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId);
|
||||
const quantity = await licenseDAL.countOfOrgMembers(orgId, tx);
|
||||
const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId, tx);
|
||||
if (org?.customerId) {
|
||||
await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, {
|
||||
quantity,
|
||||
@ -215,8 +216,8 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
} else if (instanceType === InstanceType.EnterpriseOnPrem) {
|
||||
const usedSeats = await licenseDAL.countOfOrgMembers(null);
|
||||
const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null);
|
||||
const usedSeats = await licenseDAL.countOfOrgMembers(null, tx);
|
||||
const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null, tx);
|
||||
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, {
|
||||
usedSeats,
|
||||
usedIdentitySeats
|
||||
|
@ -26,6 +26,7 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@ -157,6 +158,13 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with OIDC is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
|
@ -28,6 +28,7 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@ -335,6 +336,13 @@ export const samlConfigServiceFactory = ({
|
||||
}: TSamlLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.SAML)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with SAML is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
|
@ -1,49 +1,59 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretApprovalPolicies } from "@app/db/schemas";
|
||||
import { SecretApprovalPoliciesSchema, TableName, TSecretApprovalPolicies } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, mergeOneToManyRelation, ormify, selectAllTableCols, TFindFilter } from "@app/lib/knex";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
export type TSecretApprovalPolicyDALFactory = ReturnType<typeof secretApprovalPolicyDALFactory>;
|
||||
|
||||
export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
const secretApprovalPolicyOrm = ormify(db, TableName.SecretApprovalPolicy);
|
||||
|
||||
const sapFindQuery = (tx: Knex, filter: TFindFilter<TSecretApprovalPolicies>) =>
|
||||
const secretApprovalPolicyFindQuery = (tx: Knex, filter: TFindFilter<TSecretApprovalPolicies>) =>
|
||||
tx(TableName.SecretApprovalPolicy)
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.join(TableName.Environment, `${TableName.SecretApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
.leftJoin(
|
||||
TableName.SecretApprovalPolicyApprover,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.select(tx.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover))
|
||||
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
|
||||
.select(tx.ref("projectId").withSchema(TableName.Environment))
|
||||
.select(tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover))
|
||||
.select(
|
||||
tx.ref("name").withSchema(TableName.Environment).as("envName"),
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("envSlug"),
|
||||
tx.ref("id").withSchema(TableName.Environment).as("envId"),
|
||||
tx.ref("projectId").withSchema(TableName.Environment)
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
|
||||
.orderBy("createdAt", "asc");
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await sapFindQuery(tx || db.replicaNode(), {
|
||||
const doc = await secretApprovalPolicyFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.SecretApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
doc,
|
||||
"id",
|
||||
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
|
||||
...el,
|
||||
envId,
|
||||
environment: { id: envId, name, slug }
|
||||
const formatedDoc = sqlNestRelationships({
|
||||
data: doc,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
environment: { id: data.envId, name: data.envName, slug: data.envSlug },
|
||||
projectId: data.projectId,
|
||||
...SecretApprovalPoliciesSchema.parse(data)
|
||||
}),
|
||||
({ approverId }) => approverId,
|
||||
"approvers"
|
||||
);
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return formatedDoc?.[0];
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindById" });
|
||||
@ -52,18 +62,25 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await sapFindQuery(tx || db.replicaNode(), filter);
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
docs,
|
||||
"id",
|
||||
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
|
||||
...el,
|
||||
envId,
|
||||
environment: { id: envId, name, slug }
|
||||
const docs = await secretApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
|
||||
const formatedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
environment: { id: data.envId, name: data.envName, slug: data.envSlug },
|
||||
projectId: data.projectId,
|
||||
...SecretApprovalPoliciesSchema.parse(data)
|
||||
}),
|
||||
({ approverId }) => approverId,
|
||||
"approvers"
|
||||
);
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
return formatedDoc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find" });
|
||||
|
@ -7,7 +7,6 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { containsGlobPatterns } from "@app/lib/picomatch";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
|
||||
import { TSecretApprovalPolicyApproverDALFactory } from "./secret-approval-policy-approver-dal";
|
||||
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
|
||||
@ -29,7 +28,6 @@ type TSecretApprovalPolicyServiceFactoryDep = {
|
||||
secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
||||
};
|
||||
|
||||
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
|
||||
@ -38,8 +36,7 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
secretApprovalPolicyDAL,
|
||||
permissionService,
|
||||
secretApprovalPolicyApproverDAL,
|
||||
projectEnvDAL,
|
||||
projectMembershipDAL
|
||||
projectEnvDAL
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
const createSecretApprovalPolicy = async ({
|
||||
name,
|
||||
@ -48,12 +45,12 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
approvers,
|
||||
approverUserIds,
|
||||
projectId,
|
||||
secretPath,
|
||||
environment
|
||||
}: TCreateSapDTO) => {
|
||||
if (approvals > approvers.length)
|
||||
if (approvals > approverUserIds.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -70,13 +67,6 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
|
||||
if (!env) throw new BadRequestError({ message: "Environment not found" });
|
||||
|
||||
const secretApprovers = await projectMembershipDAL.find({
|
||||
projectId,
|
||||
$in: { id: approvers }
|
||||
});
|
||||
if (secretApprovers.length !== approvers.length)
|
||||
throw new BadRequestError({ message: "Approver not found in project" });
|
||||
|
||||
const secretApproval = await secretApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await secretApprovalPolicyDAL.create(
|
||||
{
|
||||
@ -88,8 +78,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
await secretApprovalPolicyApproverDAL.insertMany(
|
||||
secretApprovers.map(({ id }) => ({
|
||||
approverId: id,
|
||||
approverUserIds.map((approverUserId) => ({
|
||||
approverUserId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
@ -100,7 +90,7 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
};
|
||||
|
||||
const updateSecretApprovalPolicy = async ({
|
||||
approvers,
|
||||
approverUserIds,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@ -132,22 +122,11 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (approvers) {
|
||||
const secretApprovers = await projectMembershipDAL.find(
|
||||
{
|
||||
projectId: secretApprovalPolicy.projectId,
|
||||
$in: { id: approvers }
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (secretApprovers.length !== approvers.length)
|
||||
throw new BadRequestError({ message: "Approver not found in project" });
|
||||
if (doc.approvals > secretApprovers.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
if (approverUserIds) {
|
||||
await secretApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
await secretApprovalPolicyApproverDAL.insertMany(
|
||||
secretApprovers.map(({ id }) => ({
|
||||
approverId: id,
|
||||
approverUserIds.map((approverUserId) => ({
|
||||
approverUserId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
|
@ -4,7 +4,7 @@ export type TCreateSapDTO = {
|
||||
approvals: number;
|
||||
secretPath?: string | null;
|
||||
environment: string;
|
||||
approvers: string[];
|
||||
approverUserIds: string[];
|
||||
projectId: string;
|
||||
name: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
@ -13,7 +13,7 @@ export type TUpdateSapDTO = {
|
||||
secretPolicyId: string;
|
||||
approvals?: number;
|
||||
secretPath?: string | null;
|
||||
approvers: string[];
|
||||
approverUserIds: string[];
|
||||
name?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
|
@ -5,7 +5,8 @@ import {
|
||||
SecretApprovalRequestsSchema,
|
||||
TableName,
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsSecrets
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships, stripUndefinedInWhere, TFindFilter } from "@app/lib/knex";
|
||||
@ -16,7 +17,7 @@ export type TSecretApprovalRequestDALFactory = ReturnType<typeof secretApprovalR
|
||||
|
||||
type TFindQueryFilter = {
|
||||
projectId: string;
|
||||
membershipId: string;
|
||||
userId: string;
|
||||
status?: RequestState;
|
||||
environment?: string;
|
||||
committer?: string;
|
||||
@ -37,27 +38,63 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SecretApprovalRequest}.policyId`,
|
||||
`${TableName.SecretApprovalPolicy}.id`
|
||||
)
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("statusChangedByUser"),
|
||||
`${TableName.SecretApprovalRequest}.statusChangedByUserId`,
|
||||
`statusChangedByUser.id`
|
||||
)
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("committerUser"),
|
||||
`${TableName.SecretApprovalRequest}.committerUserId`,
|
||||
`committerUser.id`
|
||||
)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicyApprover,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("secretApprovalPolicyApproverUser"),
|
||||
`${TableName.SecretApprovalPolicyApprover}.approverUserId`,
|
||||
"secretApprovalPolicyApproverUser.id"
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretApprovalRequestReviewer,
|
||||
`${TableName.SecretApprovalRequest}.id`,
|
||||
`${TableName.SecretApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("secretApprovalReviewerUser"),
|
||||
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
|
||||
`secretApprovalReviewerUser.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretApprovalRequest))
|
||||
.select(
|
||||
tx.ref("member").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerMemberId"),
|
||||
tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
|
||||
tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"),
|
||||
tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"),
|
||||
tx.ref("firstName").withSchema("secretApprovalPolicyApproverUser").as("approverFirstName"),
|
||||
tx.ref("lastName").withSchema("secretApprovalPolicyApproverUser").as("approverLastName"),
|
||||
tx.ref("email").withSchema("statusChangedByUser").as("statusChangedByUserEmail"),
|
||||
tx.ref("username").withSchema("statusChangedByUser").as("statusChangedByUserUsername"),
|
||||
tx.ref("firstName").withSchema("statusChangedByUser").as("statusChangedByUserFirstName"),
|
||||
tx.ref("lastName").withSchema("statusChangedByUser").as("statusChangedByUserLastName"),
|
||||
tx.ref("email").withSchema("committerUser").as("committerUserEmail"),
|
||||
tx.ref("username").withSchema("committerUser").as("committerUserUsername"),
|
||||
tx.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
|
||||
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
|
||||
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
|
||||
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
|
||||
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
|
||||
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
|
||||
tx.ref("lastName").withSchema("secretApprovalReviewerUser").as("reviewerLastName"),
|
||||
tx.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
|
||||
tx.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
|
||||
tx.ref("projectId").withSchema(TableName.Environment),
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover)
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals")
|
||||
);
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
@ -71,6 +108,22 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
...SecretApprovalRequestsSchema.parse(el),
|
||||
projectId: el.projectId,
|
||||
environment: el.environment,
|
||||
statusChangedByUser: el.statusChangedByUserId
|
||||
? {
|
||||
userId: el.statusChangedByUserId,
|
||||
email: el.statusChangedByUserEmail,
|
||||
firstName: el.statusChangedByUserFirstName,
|
||||
lastName: el.statusChangedByUserLastName,
|
||||
username: el.statusChangedByUserUsername
|
||||
}
|
||||
: undefined,
|
||||
committerUser: {
|
||||
userId: el.committerUserId,
|
||||
email: el.committerUserEmail,
|
||||
firstName: el.committerUserFirstName,
|
||||
lastName: el.committerUserLastName,
|
||||
username: el.committerUserUsername
|
||||
},
|
||||
policy: {
|
||||
id: el.policyId,
|
||||
name: el.policyName,
|
||||
@ -80,11 +133,34 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "reviewerMemberId",
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
|
||||
mapper: ({
|
||||
reviewerUserId: userId,
|
||||
reviewerStatus: status,
|
||||
reviewerEmail: email,
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName
|
||||
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
|
||||
},
|
||||
{ key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId }
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({
|
||||
approverUserId,
|
||||
approverEmail: email,
|
||||
approverUsername: username,
|
||||
approverLastName: lastName,
|
||||
approverFirstName: firstName
|
||||
}) => ({
|
||||
userId: approverUserId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
if (!formatedDoc?.[0]) return;
|
||||
@ -97,7 +173,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findProjectRequestCount = async (projectId: string, membershipId: string, tx?: Knex) => {
|
||||
const findProjectRequestCount = async (projectId: string, userId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)
|
||||
.with(
|
||||
@ -114,8 +190,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.andWhere(
|
||||
(bd) =>
|
||||
void bd
|
||||
.where(`${TableName.SecretApprovalPolicyApprover}.approverId`, membershipId)
|
||||
.orWhere(`${TableName.SecretApprovalRequest}.committerId`, membershipId)
|
||||
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
|
||||
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
|
||||
)
|
||||
.select("status", `${TableName.SecretApprovalRequest}.id`)
|
||||
.groupBy(`${TableName.SecretApprovalRequest}.id`, "status")
|
||||
@ -142,7 +218,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const findByProjectId = async (
|
||||
{ status, limit = 20, offset = 0, projectId, committer, environment, membershipId }: TFindQueryFilter,
|
||||
{ status, limit = 20, offset = 0, projectId, committer, environment, userId }: TFindQueryFilter,
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
@ -161,6 +237,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("committerUser"),
|
||||
`${TableName.SecretApprovalRequest}.committerUserId`,
|
||||
`committerUser.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretApprovalRequestReviewer,
|
||||
`${TableName.SecretApprovalRequest}.id`,
|
||||
@ -176,20 +257,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
projectId,
|
||||
[`${TableName.Environment}.slug` as "slug"]: environment,
|
||||
[`${TableName.SecretApprovalRequest}.status`]: status,
|
||||
committerId: committer
|
||||
committerUserId: committer
|
||||
})
|
||||
)
|
||||
.andWhere(
|
||||
(bd) =>
|
||||
void bd
|
||||
.where(`${TableName.SecretApprovalPolicyApprover}.approverId`, membershipId)
|
||||
.orWhere(`${TableName.SecretApprovalRequest}.committerId`, membershipId)
|
||||
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
|
||||
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretApprovalRequest))
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.Environment),
|
||||
db.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerMemberId"),
|
||||
db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerId"),
|
||||
db.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
|
||||
db.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
|
||||
db.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
|
||||
db.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
|
||||
@ -201,7 +283,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
),
|
||||
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover)
|
||||
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
|
||||
db.ref("email").withSchema("committerUser").as("committerUserEmail"),
|
||||
db.ref("username").withSchema("committerUser").as("committerUserUsername"),
|
||||
db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
|
||||
db.ref("lastName").withSchema("committerUser").as("committerUserLastName")
|
||||
)
|
||||
.orderBy("createdAt", "desc");
|
||||
|
||||
@ -223,18 +309,26 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
name: el.policyName,
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath
|
||||
},
|
||||
committerUser: {
|
||||
userId: el.committerUserId,
|
||||
email: el.committerUserEmail,
|
||||
firstName: el.committerUserFirstName,
|
||||
lastName: el.committerUserLastName,
|
||||
username: el.committerUserUsername
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "reviewerMemberId",
|
||||
key: "reviewerId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerMemberId: member, reviewerStatus: s }) => (member ? { member, status: s } : undefined)
|
||||
mapper: ({ reviewerUserId, reviewerStatus: s }) =>
|
||||
reviewerUserId ? { userId: reviewerUserId, status: s } : undefined
|
||||
},
|
||||
{
|
||||
key: "approverId",
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverId }) => approverId
|
||||
mapper: ({ approverUserId }) => approverUserId
|
||||
},
|
||||
{
|
||||
key: "commitId",
|
||||
|
@ -87,7 +87,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
await permissionService.getProjectPermission(
|
||||
actor as ActorType.USER,
|
||||
actorId,
|
||||
projectId,
|
||||
@ -95,7 +95,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, membership.id);
|
||||
const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, actorId);
|
||||
return count;
|
||||
};
|
||||
|
||||
@ -113,19 +113,13 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}: TListApprovalsDTO) => {
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId);
|
||||
const approvals = await secretApprovalRequestDAL.findByProjectId({
|
||||
projectId,
|
||||
committer,
|
||||
environment,
|
||||
status,
|
||||
membershipId: membership.id,
|
||||
userId: actorId,
|
||||
limit,
|
||||
offset
|
||||
});
|
||||
@ -145,7 +139,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" });
|
||||
|
||||
const { policy } = secretApprovalRequest;
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
secretApprovalRequest.projectId,
|
||||
@ -154,8 +148,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
);
|
||||
if (
|
||||
!hasRole(ProjectMembershipRole.Admin) &&
|
||||
secretApprovalRequest.committerId !== membership.id &&
|
||||
!policy.approvers.find((approverId) => approverId === membership.id)
|
||||
secretApprovalRequest.committerUserId !== actorId &&
|
||||
!policy.approvers.find(({ userId }) => userId === actorId)
|
||||
) {
|
||||
throw new UnauthorizedError({ message: "User has no access" });
|
||||
}
|
||||
@ -180,7 +174,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
|
||||
|
||||
const { policy } = secretApprovalRequest;
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
actorId,
|
||||
secretApprovalRequest.projectId,
|
||||
@ -189,8 +183,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
);
|
||||
if (
|
||||
!hasRole(ProjectMembershipRole.Admin) &&
|
||||
secretApprovalRequest.committerId !== membership.id &&
|
||||
!policy.approvers.find((approverId) => approverId === membership.id)
|
||||
secretApprovalRequest.committerUserId !== actorId &&
|
||||
!policy.approvers.find(({ userId }) => userId === actorId)
|
||||
) {
|
||||
throw new UnauthorizedError({ message: "User has no access" });
|
||||
}
|
||||
@ -198,7 +192,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const review = await secretApprovalRequestReviewerDAL.findOne(
|
||||
{
|
||||
requestId: secretApprovalRequest.id,
|
||||
member: membership.id
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -207,7 +201,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
{
|
||||
status,
|
||||
requestId: secretApprovalRequest.id,
|
||||
member: membership.id
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -230,7 +224,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
|
||||
|
||||
const { policy } = secretApprovalRequest;
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
actorId,
|
||||
secretApprovalRequest.projectId,
|
||||
@ -239,8 +233,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
);
|
||||
if (
|
||||
!hasRole(ProjectMembershipRole.Admin) &&
|
||||
secretApprovalRequest.committerId !== membership.id &&
|
||||
!policy.approvers.find((approverId) => approverId === membership.id)
|
||||
secretApprovalRequest.committerUserId !== actorId &&
|
||||
!policy.approvers.find(({ userId }) => userId === actorId)
|
||||
) {
|
||||
throw new UnauthorizedError({ message: "User has no access" });
|
||||
}
|
||||
@ -253,7 +247,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
|
||||
const updatedRequest = await secretApprovalRequestDAL.updateById(secretApprovalRequest.id, {
|
||||
status,
|
||||
statusChangeBy: membership.id
|
||||
statusChangedByUserId: actorId
|
||||
});
|
||||
return { ...secretApprovalRequest, ...updatedRequest };
|
||||
};
|
||||
@ -270,7 +264,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
|
||||
|
||||
const { policy, folderId, projectId } = secretApprovalRequest;
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
actorId,
|
||||
projectId,
|
||||
@ -280,19 +274,19 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
|
||||
if (
|
||||
!hasRole(ProjectMembershipRole.Admin) &&
|
||||
secretApprovalRequest.committerId !== membership.id &&
|
||||
!policy.approvers.find((approverId) => approverId === membership.id)
|
||||
secretApprovalRequest.committerUserId !== actorId &&
|
||||
!policy.approvers.find(({ userId }) => userId === actorId)
|
||||
) {
|
||||
throw new UnauthorizedError({ message: "User has no access" });
|
||||
}
|
||||
const reviewers = secretApprovalRequest.reviewers.reduce<Record<string, ApprovalStatus>>(
|
||||
(prev, curr) => ({ ...prev, [curr.member.toString()]: curr.status as ApprovalStatus }),
|
||||
(prev, curr) => ({ ...prev, [curr.userId.toString()]: curr.status as ApprovalStatus }),
|
||||
{}
|
||||
);
|
||||
const hasMinApproval =
|
||||
secretApprovalRequest.policy.approvals <=
|
||||
secretApprovalRequest.policy.approvers.filter(
|
||||
(approverId) => reviewers[approverId.toString()] === ApprovalStatus.APPROVED
|
||||
({ userId: approverId }) => reviewers[approverId.toString()] === ApprovalStatus.APPROVED
|
||||
).length;
|
||||
|
||||
if (!hasMinApproval) throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
|
||||
@ -472,7 +466,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
conflicts: JSON.stringify(conflicts),
|
||||
hasMerged: true,
|
||||
status: RequestState.Closed,
|
||||
statusChangeBy: membership.id
|
||||
statusChangedByUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -509,7 +503,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}: TGenerateSecretApprovalRequestDTO) => {
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
|
||||
const { permission, membership } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
@ -663,7 +657,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
policyId: policy.id,
|
||||
status: "open",
|
||||
hasMerged: false,
|
||||
committerId: membership.id
|
||||
committerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -11,7 +11,6 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns";
|
||||
import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue";
|
||||
@ -46,7 +45,6 @@ type TSecretReplicationServiceFactoryDep = {
|
||||
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "find">;
|
||||
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findOne">;
|
||||
secretApprovalRequestSecretDAL: Pick<
|
||||
TSecretApprovalRequestSecretDALFactory,
|
||||
"insertMany" | "insertApprovalSecretTags"
|
||||
@ -92,7 +90,6 @@ export const secretReplicationServiceFactory = ({
|
||||
secretApprovalRequestSecretDAL,
|
||||
secretApprovalRequestDAL,
|
||||
secretQueueService,
|
||||
projectMembershipDAL,
|
||||
projectBotService
|
||||
}: TSecretReplicationServiceFactoryDep) => {
|
||||
const getReplicatedSecrets = (
|
||||
@ -297,12 +294,6 @@ export const secretReplicationServiceFactory = ({
|
||||
);
|
||||
// this means it should be a approval request rather than direct replication
|
||||
if (policy && actor === ActorType.USER) {
|
||||
const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId });
|
||||
if (!membership) {
|
||||
logger.error("Project membership not found in %s for user %s", projectId, actorId);
|
||||
return;
|
||||
}
|
||||
|
||||
const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id);
|
||||
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
|
||||
destinationReplicationFolderId,
|
||||
@ -316,7 +307,7 @@ export const secretReplicationServiceFactory = ({
|
||||
policyId: policy.id,
|
||||
status: "open",
|
||||
hasMerged: false,
|
||||
committerId: membership.id,
|
||||
committerUserId: actorId,
|
||||
isReplicated: true
|
||||
},
|
||||
tx
|
||||
|
@ -331,7 +331,7 @@ export const secretRotationQueueFactory = ({
|
||||
|
||||
logger.info("Finished rotating: rotation id: ", rotationId);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
logger.error(error, "Failed to execute secret rotation");
|
||||
if (error instanceof DisableRotationErrors) {
|
||||
if (job.id) {
|
||||
await queue.stopRepeatableJobByJobId(QueueName.SecretRotation, job.id);
|
||||
|
@ -133,7 +133,7 @@ export const secretRotationServiceFactory = ({
|
||||
creds: []
|
||||
};
|
||||
const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData));
|
||||
const secretRotation = secretRotationDAL.transaction(async (tx) => {
|
||||
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
|
||||
const doc = await secretRotationDAL.create(
|
||||
{
|
||||
provider,
|
||||
@ -148,13 +148,13 @@ export const secretRotationServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await secretRotationQueue.addToQueue(doc.id, doc.interval);
|
||||
const outputSecretMapping = await secretRotationDAL.secretOutputInsertMany(
|
||||
Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })),
|
||||
tx
|
||||
);
|
||||
return { ...doc, outputs: outputSecretMapping, environment: folder.environment };
|
||||
});
|
||||
await secretRotationQueue.addToQueue(secretRotation.id, secretRotation.interval);
|
||||
return secretRotation;
|
||||
};
|
||||
|
||||
@ -212,9 +212,9 @@ export const secretRotationServiceFactory = ({
|
||||
);
|
||||
const deletedDoc = await secretRotationDAL.transaction(async (tx) => {
|
||||
const strat = await secretRotationDAL.deleteById(rotationId, tx);
|
||||
await secretRotationQueue.removeFromQueue(strat.id, strat.interval);
|
||||
return strat;
|
||||
});
|
||||
await secretRotationQueue.removeFromQueue(deletedDoc.id, deletedDoc.interval);
|
||||
return { ...doc, ...deletedDoc };
|
||||
};
|
||||
|
||||
|
@ -142,6 +142,12 @@ export const KUBERNETES_AUTH = {
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const OIDC_AUTH = {
|
||||
REVOKE: {
|
||||
identityId: "The ID of the identity to revoke."
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const ORGANIZATIONS = {
|
||||
LIST_USER_MEMBERSHIPS: {
|
||||
organizationId: "The ID of the organization to get memberships from."
|
||||
|
@ -5,6 +5,9 @@ import { zpStr } from "../zod";
|
||||
|
||||
export const GITLAB_URL = "https://gitlab.com";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any -- If `process.pkg` is set, and it's true, then it means that the app is currently running in a packaged environment (a binary)
|
||||
export const IS_PACKAGED = (process as any)?.pkg !== undefined;
|
||||
|
||||
const zodStrBool = z
|
||||
.enum(["true", "false"])
|
||||
.optional()
|
||||
@ -20,7 +23,7 @@ const databaseReadReplicaSchema = z
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
PORT: z.coerce.number().default(4000),
|
||||
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
|
||||
DISABLE_SECRET_SCANNING: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
@ -131,11 +134,13 @@ const envSchema = z
|
||||
// GENERIC
|
||||
STANDALONE_MODE: z
|
||||
.enum(["true", "false"])
|
||||
.transform((val) => val === "true")
|
||||
.transform((val) => val === "true" || IS_PACKAGED)
|
||||
.optional(),
|
||||
INFISICAL_CLOUD: zodStrBool.default("false"),
|
||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional())
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional())
|
||||
})
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
@ -146,7 +151,7 @@ const envSchema = z
|
||||
isSmtpConfigured: Boolean(data.SMTP_HOST),
|
||||
isRedisConfigured: Boolean(data.REDIS_URL),
|
||||
isDevelopmentMode: data.NODE_ENV === "development",
|
||||
isProductionMode: data.NODE_ENV === "production",
|
||||
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
||||
isSecretScanningConfigured:
|
||||
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
|
||||
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
|
||||
|
1
backend/src/lib/fn/argv.ts
Normal file
@ -0,0 +1 @@
|
||||
export const isMigrationMode = () => !!process.argv.slice(2).find((arg) => arg === "migration:latest"); // example -> ./binary migration:latest
|
@ -1,6 +1,7 @@
|
||||
// Some of the functions are taken from https://github.com/rayepps/radash
|
||||
// Full credits goes to https://github.com/rayapps to those functions
|
||||
// Code taken to keep in in house and to adjust somethings for our needs
|
||||
export * from "./argv";
|
||||
export * from "./array";
|
||||
export * from "./dates";
|
||||
export * from "./object";
|
||||
|
@ -1,8 +1,10 @@
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
|
||||
import { initDbConnection } from "./db";
|
||||
import { keyStoreFactory } from "./keystore/keystore";
|
||||
import { formatSmtpConfig, initEnvConfig } from "./lib/config/env";
|
||||
import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env";
|
||||
import { isMigrationMode } from "./lib/fn";
|
||||
import { initLogger } from "./lib/logger";
|
||||
import { queueServiceFactory } from "./queue";
|
||||
import { main } from "./server/app";
|
||||
@ -10,6 +12,7 @@ import { bootstrapCheck } from "./server/boot-strap-check";
|
||||
import { smtpServiceFactory } from "./services/smtp/smtp-service";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
@ -22,12 +25,30 @@ const run = async () => {
|
||||
}))
|
||||
});
|
||||
|
||||
// Case: App is running in packaged mode (binary), and migration mode is enabled.
|
||||
// Run the migrations and exit the process after completion.
|
||||
if (IS_PACKAGED && isMigrationMode()) {
|
||||
try {
|
||||
logger.info("Running Postgres migrations..");
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "./db/migrations")
|
||||
});
|
||||
logger.info("Postgres migrations completed");
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to run migrations");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
const bootstrap = await bootstrapCheck({ db });
|
||||
|
||||
// eslint-disable-next-line
|
||||
process.on("SIGINT", async () => {
|
||||
await server.close();
|
||||
|
@ -15,7 +15,7 @@ import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
@ -80,8 +80,8 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
|
||||
|
||||
if (appCfg.isProductionMode) {
|
||||
await server.register(registerExternalNextjs, {
|
||||
standaloneMode: appCfg.STANDALONE_MODE,
|
||||
dir: path.join(__dirname, "../../"),
|
||||
standaloneMode: appCfg.STANDALONE_MODE || IS_PACKAGED,
|
||||
dir: path.join(__dirname, IS_PACKAGED ? "../../../" : "../../"),
|
||||
port: appCfg.PORT
|
||||
});
|
||||
}
|
||||
|
@ -82,3 +82,9 @@ export const publicSecretShareCreationLimit: RateLimitOptions = {
|
||||
max: 5,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const userEngagementLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 5,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
@ -1,9 +1,10 @@
|
||||
// this plugins allows to run infisical in standalone mode
|
||||
// standalone mode = infisical backend and nextjs frontend in one server
|
||||
// this way users don't need to deploy two things
|
||||
|
||||
import path from "node:path";
|
||||
|
||||
import { IS_PACKAGED } from "@app/lib/config/env";
|
||||
|
||||
// to enabled this u need to set standalone mode to true
|
||||
export const registerExternalNextjs = async (
|
||||
server: FastifyZodProvider,
|
||||
@ -18,20 +19,33 @@ export const registerExternalNextjs = async (
|
||||
}
|
||||
) => {
|
||||
if (standaloneMode) {
|
||||
const nextJsBuildPath = path.join(dir, "frontend-build");
|
||||
const frontendName = IS_PACKAGED ? "frontend" : "frontend-build";
|
||||
const nextJsBuildPath = path.join(dir, frontendName);
|
||||
|
||||
const { default: conf } = (await import(
|
||||
path.join(dir, "frontend-build/.next/required-server-files.json"),
|
||||
path.join(dir, `${frontendName}/.next/required-server-files.json`),
|
||||
// @ts-expect-error type
|
||||
{
|
||||
assert: { type: "json" }
|
||||
}
|
||||
)) as { default: { config: string } };
|
||||
|
||||
/* eslint-disable */
|
||||
const { default: NextServer } = (
|
||||
await import(path.join(dir, "frontend-build/node_modules/next/dist/server/next-server.js"))
|
||||
).default;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let NextServer: any;
|
||||
|
||||
if (!IS_PACKAGED) {
|
||||
/* eslint-disable */
|
||||
const { default: nextServer } = (
|
||||
await import(path.join(dir, `${frontendName}/node_modules/next/dist/server/next-server.js`))
|
||||
).default;
|
||||
|
||||
NextServer = nextServer;
|
||||
} else {
|
||||
/* eslint-disable */
|
||||
const nextServer = await import(path.join(dir, `${frontendName}/node_modules/next/dist/server/next-server.js`));
|
||||
|
||||
NextServer = nextServer.default;
|
||||
}
|
||||
|
||||
const nextApp = new NextServer({
|
||||
dev: false,
|
||||
|
@ -102,6 +102,8 @@ import { identityGcpAuthDALFactory } from "@app/services/identity-gcp-auth/ident
|
||||
import { identityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { identityKubernetesAuthDALFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-dal";
|
||||
import { identityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { identityOidcAuthDALFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-dal";
|
||||
import { identityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { identityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { identityProjectMembershipRoleDALFactory } from "@app/services/identity-project/identity-project-membership-role-dal";
|
||||
import { identityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
@ -166,6 +168,7 @@ import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-servi
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
import { userServiceFactory } from "@app/services/user/user-service";
|
||||
import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
import { userEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { webhookDALFactory } from "@app/services/webhook/webhook-dal";
|
||||
import { webhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
|
||||
@ -241,6 +244,7 @@ export const registerRoutes = async (
|
||||
const identityUaClientSecretDAL = identityUaClientSecretDALFactory(db);
|
||||
const identityAwsAuthDAL = identityAwsAuthDALFactory(db);
|
||||
const identityGcpAuthDAL = identityGcpAuthDALFactory(db);
|
||||
const identityOidcAuthDAL = identityOidcAuthDALFactory(db);
|
||||
const identityAzureAuthDAL = identityAzureAuthDALFactory(db);
|
||||
|
||||
const auditLogDAL = auditLogDALFactory(db);
|
||||
@ -322,7 +326,6 @@ export const registerRoutes = async (
|
||||
auditLogStreamDAL
|
||||
});
|
||||
const secretApprovalPolicyService = secretApprovalPolicyServiceFactory({
|
||||
projectMembershipDAL,
|
||||
projectEnvDAL,
|
||||
secretApprovalPolicyApproverDAL: sapApproverDAL,
|
||||
permissionService,
|
||||
@ -709,7 +712,10 @@ export const registerRoutes = async (
|
||||
secretQueueService,
|
||||
secretImportDAL,
|
||||
projectEnvDAL,
|
||||
projectBotService
|
||||
projectBotService,
|
||||
secretApprovalPolicyService,
|
||||
secretApprovalRequestDAL,
|
||||
secretApprovalRequestSecretDAL
|
||||
});
|
||||
|
||||
const secretSharingService = secretSharingServiceFactory({
|
||||
@ -771,7 +777,6 @@ export const registerRoutes = async (
|
||||
secretApprovalRequestDAL,
|
||||
secretApprovalRequestSecretDAL,
|
||||
secretQueueService,
|
||||
projectMembershipDAL,
|
||||
projectBotService
|
||||
});
|
||||
const secretRotationQueue = secretRotationQueueFactory({
|
||||
@ -817,8 +822,7 @@ export const registerRoutes = async (
|
||||
});
|
||||
const identityAccessTokenService = identityAccessTokenServiceFactory({
|
||||
identityAccessTokenDAL,
|
||||
identityOrgMembershipDAL,
|
||||
permissionService
|
||||
identityOrgMembershipDAL
|
||||
});
|
||||
const identityProjectService = identityProjectServiceFactory({
|
||||
permissionService,
|
||||
@ -887,6 +891,16 @@ export const registerRoutes = async (
|
||||
licenseService
|
||||
});
|
||||
|
||||
const identityOidcAuthService = identityOidcAuthServiceFactory({
|
||||
identityOidcAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
orgBotDAL
|
||||
});
|
||||
|
||||
const dynamicSecretProviders = buildDynamicSecretProviders();
|
||||
const dynamicSecretQueueService = dynamicSecretLeaseQueueServiceFactory({
|
||||
queueService,
|
||||
@ -937,6 +951,10 @@ export const registerRoutes = async (
|
||||
oidcConfigDAL
|
||||
});
|
||||
|
||||
const userEngagementService = userEngagementServiceFactory({
|
||||
userDAL
|
||||
});
|
||||
|
||||
await superAdminService.initServerCfg();
|
||||
//
|
||||
// setup the communication with license key server
|
||||
@ -986,6 +1004,7 @@ export const registerRoutes = async (
|
||||
identityGcpAuth: identityGcpAuthService,
|
||||
identityAwsAuth: identityAwsAuthService,
|
||||
identityAzureAuth: identityAzureAuthService,
|
||||
identityOidcAuth: identityOidcAuthService,
|
||||
accessApprovalPolicy: accessApprovalPolicyService,
|
||||
accessApprovalRequest: accessApprovalRequestService,
|
||||
secretApprovalPolicy: secretApprovalPolicyService,
|
||||
@ -1009,7 +1028,8 @@ export const registerRoutes = async (
|
||||
telemetry: telemetryService,
|
||||
projectUserAdditionalPrivilege: projectUserAdditionalPrivilegeService,
|
||||
identityProjectAdditionalPrivilege: identityProjectAdditionalPrivilegeService,
|
||||
secretSharing: secretSharingService
|
||||
secretSharing: secretSharingService,
|
||||
userEngagement: userEngagementService
|
||||
});
|
||||
|
||||
const cronJobs: CronJob[] = [];
|
||||
|
@ -8,6 +8,7 @@ import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
@ -54,7 +55,14 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
trustSamlEmails: z.boolean().optional(),
|
||||
trustLdapEmails: z.boolean().optional(),
|
||||
trustOidcEmails: z.boolean().optional(),
|
||||
defaultAuthOrgId: z.string().optional().nullable()
|
||||
defaultAuthOrgId: z.string().optional().nullable(),
|
||||
enabledLoginMethods: z
|
||||
.nativeEnum(LoginMethod)
|
||||
.array()
|
||||
.optional()
|
||||
.refine((methods) => !methods || methods.length > 0, {
|
||||
message: "At least one login method should be enabled."
|
||||
})
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -70,11 +78,87 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const config = await server.services.superAdmin.updateServerCfg(req.body);
|
||||
const config = await server.services.superAdmin.updateServerCfg(req.body, req.permission.id);
|
||||
return { config };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/user-management/users",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
searchTerm: z.string().default(""),
|
||||
offset: z.coerce.number().default(0),
|
||||
limit: z.coerce.number().max(100).default(20)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
users: UsersSchema.pick({
|
||||
username: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
email: true,
|
||||
id: true
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const users = await server.services.superAdmin.getUsers({
|
||||
...req.query
|
||||
});
|
||||
|
||||
return {
|
||||
users
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/user-management/users/:userId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
userId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
users: UsersSchema.pick({
|
||||
username: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
email: true,
|
||||
id: true
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const users = await server.services.superAdmin.deleteUser(req.params.userId);
|
||||
|
||||
return {
|
||||
users
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/signup",
|
||||
|
@ -2,8 +2,6 @@ import { z } from "zod";
|
||||
|
||||
import { UNIVERSAL_AUTH } from "@app/lib/api-docs";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerIdentityAccessTokenRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -63,37 +61,4 @@ export const registerIdentityAccessTokenRouter = async (server: FastifyZodProvid
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/token/revoke-by-id",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Revoke access token by the id of the token",
|
||||
body: z.object({
|
||||
tokenId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.identityAccessToken.revokeAccessTokenById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
return {
|
||||
message: "Successfully revoked access token"
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
350
backend/src/server/routes/v1/identity-oidc-auth-router.ts
Normal file
@ -0,0 +1,350 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityOidcAuthsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { OIDC_AUTH } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
import {
|
||||
validateOidcAuthAudiencesField,
|
||||
validateOidcBoundClaimsField
|
||||
} from "@app/services/identity-oidc-auth/identity-oidc-auth-validators";
|
||||
|
||||
const IdentityOidcAuthResponseSchema = IdentityOidcAuthsSchema.omit({
|
||||
encryptedCaCert: true,
|
||||
caCertIV: true,
|
||||
caCertTag: true
|
||||
}).extend({
|
||||
caCert: z.string()
|
||||
});
|
||||
|
||||
export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/oidc-auth/login",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Login with OIDC Auth",
|
||||
body: z.object({
|
||||
identityId: z.string().trim(),
|
||||
jwt: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
accessToken: z.string(),
|
||||
expiresIn: z.coerce.number(),
|
||||
accessTokenMaxTTL: z.coerce.number(),
|
||||
tokenType: z.literal("Bearer")
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { identityOidcAuth, accessToken, identityAccessToken, identityMembershipOrg } =
|
||||
await server.services.identityOidcAuth.login({
|
||||
identityId: req.body.identityId,
|
||||
jwt: req.body.jwt
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityMembershipOrg?.orgId,
|
||||
event: {
|
||||
type: EventType.LOGIN_IDENTITY_OIDC_AUTH,
|
||||
metadata: {
|
||||
identityId: identityOidcAuth.identityId,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
identityOidcAuthId: identityOidcAuth.id
|
||||
}
|
||||
}
|
||||
});
|
||||
return {
|
||||
accessToken,
|
||||
tokenType: "Bearer" as const,
|
||||
expiresIn: identityOidcAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/oidc-auth/identities/:identityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Attach OIDC Auth configuration onto identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
accessTokenTrustedIps: z
|
||||
.object({
|
||||
ipAddress: z.string().trim()
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]),
|
||||
accessTokenTTL: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.refine((value) => value !== 0, {
|
||||
message: "accessTokenTTL must have a non zero number"
|
||||
})
|
||||
.default(2592000),
|
||||
accessTokenMaxTTL: z
|
||||
.number()
|
||||
.int()
|
||||
.refine((value) => value !== 0, {
|
||||
message: "accessTokenMaxTTL must have a non zero number"
|
||||
})
|
||||
.default(2592000),
|
||||
accessTokenNumUsesLimit: z.number().int().min(0).default(0),
|
||||
oidcDiscoveryUrl: z.string().url().min(1),
|
||||
caCert: z.string().trim().default(""),
|
||||
boundIssuer: z.string().min(1),
|
||||
boundAudiences: validateOidcAuthAudiencesField,
|
||||
boundClaims: validateOidcBoundClaimsField,
|
||||
boundSubject: z.string().optional().default("")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityOidcAuth: IdentityOidcAuthResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const identityOidcAuth = await server.services.identityOidcAuth.attachOidcAuth({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityOidcAuth.orgId,
|
||||
event: {
|
||||
type: EventType.ADD_IDENTITY_OIDC_AUTH,
|
||||
metadata: {
|
||||
identityId: identityOidcAuth.identityId,
|
||||
oidcDiscoveryUrl: identityOidcAuth.oidcDiscoveryUrl,
|
||||
caCert: identityOidcAuth.caCert,
|
||||
boundIssuer: identityOidcAuth.boundIssuer,
|
||||
boundAudiences: identityOidcAuth.boundAudiences,
|
||||
boundClaims: identityOidcAuth.boundClaims as Record<string, string>,
|
||||
boundSubject: identityOidcAuth.boundSubject as string,
|
||||
accessTokenTTL: identityOidcAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityOidcAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityOidcAuth.accessTokenNumUsesLimit
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
identityOidcAuth
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/oidc-auth/identities/:identityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Update OIDC Auth configuration on identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string().trim()
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
accessTokenTrustedIps: z
|
||||
.object({
|
||||
ipAddress: z.string().trim()
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]),
|
||||
accessTokenTTL: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.refine((value) => value !== 0, {
|
||||
message: "accessTokenTTL must have a non zero number"
|
||||
})
|
||||
.default(2592000),
|
||||
accessTokenMaxTTL: z
|
||||
.number()
|
||||
.int()
|
||||
.refine((value) => value !== 0, {
|
||||
message: "accessTokenMaxTTL must have a non zero number"
|
||||
})
|
||||
.default(2592000),
|
||||
accessTokenNumUsesLimit: z.number().int().min(0).default(0),
|
||||
oidcDiscoveryUrl: z.string().url().min(1),
|
||||
caCert: z.string().trim().default(""),
|
||||
boundIssuer: z.string().min(1),
|
||||
boundAudiences: validateOidcAuthAudiencesField,
|
||||
boundClaims: validateOidcBoundClaimsField,
|
||||
boundSubject: z.string().optional().default("")
|
||||
})
|
||||
.partial(),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityOidcAuth: IdentityOidcAuthResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const identityOidcAuth = await server.services.identityOidcAuth.updateOidcAuth({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
...req.body,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityOidcAuth.orgId,
|
||||
event: {
|
||||
type: EventType.UPDATE_IDENTITY_OIDC_AUTH,
|
||||
metadata: {
|
||||
identityId: identityOidcAuth.identityId,
|
||||
oidcDiscoveryUrl: identityOidcAuth.oidcDiscoveryUrl,
|
||||
caCert: identityOidcAuth.caCert,
|
||||
boundIssuer: identityOidcAuth.boundIssuer,
|
||||
boundAudiences: identityOidcAuth.boundAudiences,
|
||||
boundClaims: identityOidcAuth.boundClaims as Record<string, string>,
|
||||
boundSubject: identityOidcAuth.boundSubject as string,
|
||||
accessTokenTTL: identityOidcAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityOidcAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityOidcAuth.accessTokenNumUsesLimit
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { identityOidcAuth };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/oidc-auth/identities/:identityId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Retrieve OIDC Auth configuration on identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityOidcAuth: IdentityOidcAuthResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const identityOidcAuth = await server.services.identityOidcAuth.getOidcAuth({
|
||||
identityId: req.params.identityId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityOidcAuth.orgId,
|
||||
event: {
|
||||
type: EventType.GET_IDENTITY_OIDC_AUTH,
|
||||
metadata: {
|
||||
identityId: identityOidcAuth.identityId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { identityOidcAuth };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/oidc-auth/identities/:identityId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Delete OIDC Auth configuration on identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string().describe(OIDC_AUTH.REVOKE.identityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityOidcAuth: IdentityOidcAuthResponseSchema.omit({
|
||||
caCert: true
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const identityOidcAuth = await server.services.identityOidcAuth.revokeOidcAuth({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: identityOidcAuth.orgId,
|
||||
event: {
|
||||
type: EventType.REVOKE_IDENTITY_OIDC_AUTH,
|
||||
metadata: {
|
||||
identityId: identityOidcAuth.identityId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { identityOidcAuth };
|
||||
}
|
||||
});
|
||||
};
|
@ -253,6 +253,15 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
});
|
||||
|
||||
// proposed
|
||||
// update token by id: PATCH /token-auth/tokens/:tokenId
|
||||
// revoke token by id: POST /token-auth/tokens/:tokenId/revoke
|
||||
|
||||
// current
|
||||
// revoke token by id: POST /token/revoke-by-id
|
||||
|
||||
// token-auth/identities/:identityId/tokens
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/token-auth/identities/:identityId/tokens",
|
||||
@ -284,7 +293,7 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { identityTokenAuth, accessToken, identityAccessToken, identityMembershipOrg } =
|
||||
await server.services.identityTokenAuth.createTokenTokenAuth({
|
||||
await server.services.identityTokenAuth.createTokenAuthToken({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -342,7 +351,7 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { tokens, identityMembershipOrg } = await server.services.identityTokenAuth.getTokensTokenAuth({
|
||||
const { tokens, identityMembershipOrg } = await server.services.identityTokenAuth.getTokenAuthTokens({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -368,7 +377,7 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/token-auth/identities/:identityId/tokens/:tokenId",
|
||||
url: "/token-auth/tokens/:tokenId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
@ -381,7 +390,6 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string(),
|
||||
tokenId: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
@ -394,12 +402,11 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { token, identityMembershipOrg } = await server.services.identityTokenAuth.updateTokenTokenAuth({
|
||||
const { token, identityMembershipOrg } = await server.services.identityTokenAuth.updateTokenAuthToken({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
identityId: req.params.identityId,
|
||||
tokenId: req.params.tokenId,
|
||||
...req.body
|
||||
});
|
||||
@ -410,7 +417,7 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
event: {
|
||||
type: EventType.UPDATE_TOKEN_IDENTITY_TOKEN_AUTH,
|
||||
metadata: {
|
||||
identityId: req.params.identityId,
|
||||
identityId: token.identityId,
|
||||
tokenId: token.id,
|
||||
name: req.body.name
|
||||
}
|
||||
@ -420,4 +427,42 @@ export const registerIdentityTokenAuthRouter = async (server: FastifyZodProvider
|
||||
return { token };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/token-auth/tokens/:tokenId/revoke",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Revoke token for identity with Token Auth configured",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
tokenId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.identityTokenAuth.revokeTokenAuthToken({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
tokenId: req.params.tokenId
|
||||
});
|
||||
|
||||
return {
|
||||
message: "Successfully revoked access token"
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -8,6 +8,7 @@ import { registerIdentityAwsAuthRouter } from "./identity-aws-iam-auth-router";
|
||||
import { registerIdentityAzureAuthRouter } from "./identity-azure-auth-router";
|
||||
import { registerIdentityGcpAuthRouter } from "./identity-gcp-auth-router";
|
||||
import { registerIdentityKubernetesRouter } from "./identity-kubernetes-auth-router";
|
||||
import { registerIdentityOidcAuthRouter } from "./identity-oidc-auth-router";
|
||||
import { registerIdentityRouter } from "./identity-router";
|
||||
import { registerIdentityTokenAuthRouter } from "./identity-token-auth-router";
|
||||
import { registerIdentityUaRouter } from "./identity-universal-auth-router";
|
||||
@ -26,6 +27,7 @@ import { registerSecretSharingRouter } from "./secret-sharing-router";
|
||||
import { registerSecretTagRouter } from "./secret-tag-router";
|
||||
import { registerSsoRouter } from "./sso-router";
|
||||
import { registerUserActionRouter } from "./user-action-router";
|
||||
import { registerUserEngagementRouter } from "./user-engagement-router";
|
||||
import { registerUserRouter } from "./user-router";
|
||||
import { registerWebhookRouter } from "./webhook-router";
|
||||
|
||||
@ -41,6 +43,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
await authRouter.register(registerIdentityAccessTokenRouter);
|
||||
await authRouter.register(registerIdentityAwsAuthRouter);
|
||||
await authRouter.register(registerIdentityAzureAuthRouter);
|
||||
await authRouter.register(registerIdentityOidcAuthRouter);
|
||||
},
|
||||
{ prefix: "/auth" }
|
||||
);
|
||||
@ -79,4 +82,5 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerWebhookRouter, { prefix: "/webhooks" });
|
||||
await server.register(registerIdentityRouter, { prefix: "/identities" });
|
||||
await server.register(registerSecretSharingRouter, { prefix: "/secret-sharing" });
|
||||
await server.register(registerUserEngagementRouter, { prefix: "/user-engagement" });
|
||||
};
|
||||
|
27
backend/src/server/routes/v1/user-engagement-router.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { userEngagementLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserEngagementRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/wish",
|
||||
config: {
|
||||
rateLimit: userEngagementLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
text: z.string().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.userEngagement.createUserWish(req.permission.id, req.body.text);
|
||||
}
|
||||
});
|
||||
};
|
@ -6,13 +6,17 @@ import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { WebhookType } from "@app/services/webhook/webhook-types";
|
||||
|
||||
export const sanitizedWebhookSchema = WebhooksSchema.omit({
|
||||
encryptedSecretKey: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
algorithm: true,
|
||||
keyEncoding: true
|
||||
keyEncoding: true,
|
||||
urlCipherText: true,
|
||||
urlIV: true,
|
||||
urlTag: true
|
||||
}).merge(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
@ -33,13 +37,24 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z.object({
|
||||
workspaceId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
webhookUrl: z.string().url().trim(),
|
||||
webhookSecretKey: z.string().trim().optional(),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
type: z.nativeEnum(WebhookType).default(WebhookType.GENERAL),
|
||||
workspaceId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
webhookUrl: z.string().url().trim(),
|
||||
webhookSecretKey: z.string().trim().optional(),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash)
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.type === WebhookType.SLACK && !data.webhookUrl.includes("hooks.slack.com")) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Incoming Webhook URL is invalid.",
|
||||
path: ["webhookUrl"]
|
||||
});
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string(),
|
||||
@ -66,8 +81,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => {
|
||||
environment: webhook.environment.slug,
|
||||
webhookId: webhook.id,
|
||||
isDisabled: webhook.isDisabled,
|
||||
secretPath: webhook.secretPath,
|
||||
webhookUrl: webhook.url
|
||||
secretPath: webhook.secretPath
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -116,8 +130,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => {
|
||||
environment: webhook.environment.slug,
|
||||
webhookId: webhook.id,
|
||||
isDisabled: webhook.isDisabled,
|
||||
secretPath: webhook.secretPath,
|
||||
webhookUrl: webhook.url
|
||||
secretPath: webhook.secretPath
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -156,8 +169,7 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => {
|
||||
environment: webhook.environment.slug,
|
||||
webhookId: webhook.id,
|
||||
isDisabled: webhook.isDisabled,
|
||||
secretPath: webhook.secretPath,
|
||||
webhookUrl: webhook.url
|
||||
secretPath: webhook.secretPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -5,6 +5,7 @@ import {
|
||||
IdentitiesSchema,
|
||||
IdentityProjectMembershipsSchema,
|
||||
ProjectMembershipRole,
|
||||
ProjectsSchema,
|
||||
ProjectUserMembershipRolesSchema
|
||||
} from "@app/db/schemas";
|
||||
import { PROJECT_IDENTITIES } from "@app/lib/api-docs";
|
||||
@ -234,7 +235,8 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
temporaryAccessEndTime: z.date().nullable().optional()
|
||||
})
|
||||
),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
|
||||
project: ProjectsSchema.pick({ name: true, id: true })
|
||||
})
|
||||
.array()
|
||||
})
|
||||
@ -291,7 +293,8 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
||||
temporaryAccessEndTime: z.date().nullable().optional()
|
||||
})
|
||||
),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
|
||||
project: ProjectsSchema.pick({ name: true, id: true })
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -297,7 +297,7 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const user = await server.services.user.deleteMe(req.permission.id);
|
||||
const user = await server.services.user.deleteUser(req.permission.id);
|
||||
return { user };
|
||||
}
|
||||
});
|
||||
|
@ -949,7 +949,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: approval.committerId,
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
}
|
||||
@ -1133,7 +1133,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: approval.committerId,
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
}
|
||||
@ -1271,7 +1271,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: approval.committerId,
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
}
|
||||
@ -1325,6 +1325,61 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/move",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
projectSlug: z.string().trim(),
|
||||
sourceEnvironment: z.string().trim(),
|
||||
sourceSecretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
destinationEnvironment: z.string().trim(),
|
||||
destinationSecretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
secretIds: z.string().array(),
|
||||
shouldOverwrite: z.boolean().default(false)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
isSourceUpdated: z.boolean(),
|
||||
isDestinationUpdated: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { projectId, isSourceUpdated, isDestinationUpdated } = await server.services.secret.moveSecrets({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.MOVE_SECRETS,
|
||||
metadata: {
|
||||
sourceEnvironment: req.body.sourceEnvironment,
|
||||
sourceSecretPath: req.body.sourceSecretPath,
|
||||
destinationEnvironment: req.body.destinationEnvironment,
|
||||
destinationSecretPath: req.body.destinationSecretPath,
|
||||
secretIds: req.body.secretIds
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
isSourceUpdated,
|
||||
isDestinationUpdated
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/batch",
|
||||
@ -1397,7 +1452,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: approval.committerId,
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
}
|
||||
@ -1524,7 +1579,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: approval.committerId,
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
}
|
||||
@ -1638,7 +1693,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: approval.committerId,
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||
import { TokenType } from "../auth-token/auth-token-types";
|
||||
import { TOrgDALFactory } from "../org/org-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { LoginMethod } from "../super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { enforceUserLockStatus, validateProviderAuthToken } from "./auth-fns";
|
||||
import {
|
||||
@ -158,9 +159,22 @@ export const authLoginServiceFactory = ({
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (
|
||||
serverCfg.enabledLoginMethods &&
|
||||
!serverCfg.enabledLoginMethods.includes(LoginMethod.EMAIL) &&
|
||||
!providerAuthToken
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with email is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
if (!userEnc || (userEnc && !userEnc.isAccepted)) {
|
||||
throw new Error("Failed to find user");
|
||||
}
|
||||
|
||||
if (!userEnc.authMethods?.includes(AuthMethod.EMAIL)) {
|
||||
validateProviderAuthToken(providerAuthToken as string, email);
|
||||
}
|
||||
@ -507,6 +521,40 @@ export const authLoginServiceFactory = ({
|
||||
let user = await userDAL.findUserByUsername(email);
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods) {
|
||||
switch (authMethod) {
|
||||
case AuthMethod.GITHUB: {
|
||||
if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GITHUB)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with Github is disabled by administrator.",
|
||||
name: "Oauth 2 login"
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case AuthMethod.GOOGLE: {
|
||||
if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GOOGLE)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with Google is disabled by administrator.",
|
||||
name: "Oauth 2 login"
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case AuthMethod.GITLAB: {
|
||||
if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GITLAB)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with Gitlab is disabled by administrator.",
|
||||
name: "Oauth 2 login"
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!user) {
|
||||
|
@ -364,7 +364,7 @@ export const authSignupServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
const uniqueOrgId = [...new Set(updatedMembersips.map(({ orgId }) => orgId))];
|
||||
await Promise.allSettled(uniqueOrgId.map((orgId) => licenseService.updateSubscriptionOrgMemberCount(orgId)));
|
||||
await Promise.allSettled(uniqueOrgId.map((orgId) => licenseService.updateSubscriptionOrgMemberCount(orgId, tx)));
|
||||
|
||||
await convertPendingGroupAdditionsToGroupMemberships({
|
||||
userIds: [user.id],
|
||||
|
@ -51,6 +51,12 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.IdentityOidcAuth, (qb) => {
|
||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.OIDC_AUTH])).andOn(
|
||||
`${TableName.Identity}.id`,
|
||||
`${TableName.IdentityOidcAuth}.identityId`
|
||||
);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.IdentityAccessToken))
|
||||
.select(
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityUniversalAuth).as("accessTokenTrustedIpsUa"),
|
||||
@ -58,6 +64,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityAwsAuth).as("accessTokenTrustedIpsAws"),
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityAzureAuth).as("accessTokenTrustedIpsAzure"),
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityKubernetesAuth).as("accessTokenTrustedIpsK8s"),
|
||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityOidcAuth).as("accessTokenTrustedIpsOidc"),
|
||||
db.ref("name").withSchema(TableName.Identity)
|
||||
)
|
||||
.first();
|
||||
@ -71,7 +78,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
||||
doc.accessTokenTrustedIpsGcp ||
|
||||
doc.accessTokenTrustedIpsAws ||
|
||||
doc.accessTokenTrustedIpsAzure ||
|
||||
doc.accessTokenTrustedIpsK8s
|
||||
doc.accessTokenTrustedIpsK8s ||
|
||||
doc.accessTokenTrustedIpsOidc
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "IdAccessTokenFindOne" });
|
||||
|
@ -1,9 +1,6 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt, { JwtPayload } from "jsonwebtoken";
|
||||
|
||||
import { TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { checkIPAgainstBlocklist, TIp } from "@app/lib/ip";
|
||||
@ -11,24 +8,18 @@ import { checkIPAgainstBlocklist, TIp } from "@app/lib/ip";
|
||||
import { AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "./identity-access-token-dal";
|
||||
import {
|
||||
TIdentityAccessTokenJwtPayload,
|
||||
TRenewAccessTokenDTO,
|
||||
TRevokeAccessTokenByIdDTO
|
||||
} from "./identity-access-token-types";
|
||||
import { TIdentityAccessTokenJwtPayload, TRenewAccessTokenDTO } from "./identity-access-token-types";
|
||||
|
||||
type TIdentityAccessTokenServiceFactoryDep = {
|
||||
identityAccessTokenDAL: TIdentityAccessTokenDALFactory;
|
||||
identityOrgMembershipDAL: TIdentityOrgDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
};
|
||||
|
||||
export type TIdentityAccessTokenServiceFactory = ReturnType<typeof identityAccessTokenServiceFactory>;
|
||||
|
||||
export const identityAccessTokenServiceFactory = ({
|
||||
identityAccessTokenDAL,
|
||||
identityOrgMembershipDAL,
|
||||
permissionService
|
||||
identityOrgMembershipDAL
|
||||
}: TIdentityAccessTokenServiceFactoryDep) => {
|
||||
const validateAccessTokenExp = async (identityAccessToken: TIdentityAccessTokens) => {
|
||||
const {
|
||||
@ -147,43 +138,6 @@ export const identityAccessTokenServiceFactory = ({
|
||||
return { revokedToken };
|
||||
};
|
||||
|
||||
const revokeAccessTokenById = async ({
|
||||
tokenId,
|
||||
actorId,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TRevokeAccessTokenByIdDTO) => {
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
isAccessTokenRevoked: false
|
||||
});
|
||||
if (!identityAccessToken) throw new UnauthorizedError();
|
||||
|
||||
const identityOrgMembership = await identityOrgMembershipDAL.findOne({
|
||||
identityId: identityAccessToken.identityId
|
||||
});
|
||||
|
||||
if (!identityOrgMembership) {
|
||||
throw new UnauthorizedError({ message: "Identity does not belong to any organization" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityOrgMembership.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const revokedToken = await identityAccessTokenDAL.updateById(identityAccessToken.id, {
|
||||
isAccessTokenRevoked: true
|
||||
});
|
||||
|
||||
return { revokedToken };
|
||||
};
|
||||
|
||||
const fnValidateIdentityAccessToken = async (token: TIdentityAccessTokenJwtPayload, ipAddress?: string) => {
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: token.identityAccessTokenId,
|
||||
@ -221,5 +175,5 @@ export const identityAccessTokenServiceFactory = ({
|
||||
return { ...identityAccessToken, orgId: identityOrgMembership.orgId };
|
||||
};
|
||||
|
||||
return { renewAccessToken, revokeAccessToken, revokeAccessTokenById, fnValidateIdentityAccessToken };
|
||||
return { renewAccessToken, revokeAccessToken, fnValidateIdentityAccessToken };
|
||||
};
|
||||
|
@ -1,5 +1,3 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TRenewAccessTokenDTO = {
|
||||
accessToken: string;
|
||||
};
|
||||
@ -10,7 +8,3 @@ export type TIdentityAccessTokenJwtPayload = {
|
||||
identityAccessTokenId: string;
|
||||
authTokenType: string;
|
||||
};
|
||||
|
||||
export type TRevokeAccessTokenByIdDTO = {
|
||||
tokenId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -17,6 +17,7 @@ export const validateAzureIdentity = async ({
|
||||
const jwksUri = `https://login.microsoftonline.com/${tenantId}/discovery/keys`;
|
||||
|
||||
const decodedJwt = jwt.decode(azureJwt, { complete: true }) as TDecodedAzureAuthJwt;
|
||||
|
||||
const { kid } = decodedJwt.header;
|
||||
|
||||
const { data }: { data: TAzureJwksUriResponse } = await axios.get(jwksUri);
|
||||
@ -27,6 +28,13 @@ export const validateAzureIdentity = async ({
|
||||
|
||||
const publicKey = `-----BEGIN CERTIFICATE-----\n${signingKey.x5c[0]}\n-----END CERTIFICATE-----`;
|
||||
|
||||
// Case: This can happen when the user uses a custom resource (such as https://management.azure.com&client_id=value).
|
||||
// In this case, the audience in the decoded JWT will not have a trailing slash, but the resource will.
|
||||
if (!decodedJwt.payload.aud.endsWith("/") && resource.endsWith("/")) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
resource = resource.slice(0, -1);
|
||||
}
|
||||
|
||||
return jwt.verify(azureJwt, publicKey, {
|
||||
audience: resource,
|
||||
issuer: `https://sts.windows.net/${tenantId}/`
|
||||
|
@ -0,0 +1,10 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TIdentityOidcAuthDALFactory = ReturnType<typeof identityOidcAuthDALFactory>;
|
||||
|
||||
export const identityOidcAuthDALFactory = (db: TDbClient) => {
|
||||
const oidcAuthOrm = ormify(db, TableName.IdentityOidcAuth);
|
||||
return oidcAuthOrm;
|
||||
};
|
@ -0,0 +1,534 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import axios from "axios";
|
||||
import https from "https";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { JwksClient } from "jwks-rsa";
|
||||
|
||||
import { IdentityAuthMethod, SecretKeyEncoding, TIdentityOidcAuthsUpdate } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { generateAsymmetricKeyPair } from "@app/lib/crypto";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityDALFactory } from "../identity/identity-dal";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
import { TOrgBotDALFactory } from "../org/org-bot-dal";
|
||||
import { TIdentityOidcAuthDALFactory } from "./identity-oidc-auth-dal";
|
||||
import {
|
||||
TAttachOidcAuthDTO,
|
||||
TGetOidcAuthDTO,
|
||||
TLoginOidcAuthDTO,
|
||||
TRevokeOidcAuthDTO,
|
||||
TUpdateOidcAuthDTO
|
||||
} from "./identity-oidc-auth-types";
|
||||
|
||||
type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
};
|
||||
|
||||
export type TIdentityOidcAuthServiceFactory = ReturnType<typeof identityOidcAuthServiceFactory>;
|
||||
|
||||
export const identityOidcAuthServiceFactory = ({
|
||||
identityOidcAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
identityAccessTokenDAL,
|
||||
orgBotDAL
|
||||
}: TIdentityOidcAuthServiceFactoryDep) => {
|
||||
const login = async ({ identityId, jwt: oidcJwt }: TLoginOidcAuthDTO) => {
|
||||
const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId });
|
||||
if (!identityOidcAuth) {
|
||||
throw new UnauthorizedError();
|
||||
}
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({
|
||||
identityId: identityOidcAuth.identityId
|
||||
});
|
||||
if (!identityMembershipOrg) {
|
||||
throw new BadRequestError({ message: "Failed to find identity" });
|
||||
}
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { encryptedCaCert, caCertIV, caCertTag } = identityOidcAuth;
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCaCert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCaCert,
|
||||
iv: caCertIV,
|
||||
tag: caCertTag,
|
||||
key
|
||||
});
|
||||
}
|
||||
|
||||
const requestAgent = new https.Agent({ ca: caCert, rejectUnauthorized: !!caCert });
|
||||
const { data: discoveryDoc } = await axios.get<{ jwks_uri: string }>(
|
||||
`${identityOidcAuth.oidcDiscoveryUrl}/.well-known/openid-configuration`,
|
||||
{
|
||||
httpsAgent: requestAgent
|
||||
}
|
||||
);
|
||||
const jwksUri = discoveryDoc.jwks_uri;
|
||||
|
||||
const decodedToken = jwt.decode(oidcJwt, { complete: true });
|
||||
if (!decodedToken) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid JWT"
|
||||
});
|
||||
}
|
||||
|
||||
const client = new JwksClient({
|
||||
jwksUri,
|
||||
requestAgent
|
||||
});
|
||||
|
||||
const { kid } = decodedToken.header;
|
||||
const oidcSigningKey = await client.getSigningKey(kid);
|
||||
|
||||
const tokenData = jwt.verify(oidcJwt, oidcSigningKey.getPublicKey(), {
|
||||
issuer: identityOidcAuth.boundIssuer
|
||||
}) as Record<string, string>;
|
||||
|
||||
if (identityOidcAuth.boundSubject) {
|
||||
if (tokenData.sub !== identityOidcAuth.boundSubject) {
|
||||
throw new UnauthorizedError();
|
||||
}
|
||||
}
|
||||
|
||||
if (identityOidcAuth.boundAudiences) {
|
||||
if (!identityOidcAuth.boundAudiences.split(", ").includes(tokenData.aud)) {
|
||||
throw new UnauthorizedError();
|
||||
}
|
||||
}
|
||||
|
||||
if (identityOidcAuth.boundClaims) {
|
||||
Object.keys(identityOidcAuth.boundClaims).forEach((claimKey) => {
|
||||
const claimValue = (identityOidcAuth.boundClaims as Record<string, string>)[claimKey];
|
||||
// handle both single and multi-valued claims
|
||||
if (!claimValue.split(", ").some((claimEntry) => tokenData[claimKey] === claimEntry)) {
|
||||
throw new UnauthorizedError();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityOidcAuth.identityId,
|
||||
isAccessTokenRevoked: false,
|
||||
accessTokenTTL: identityOidcAuth.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityOidcAuth.accessTokenMaxTTL,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityOidcAuth.accessTokenNumUsesLimit
|
||||
},
|
||||
tx
|
||||
);
|
||||
return newToken;
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
const accessToken = jwt.sign(
|
||||
{
|
||||
identityId: identityOidcAuth.identityId,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
|
||||
} as TIdentityAccessTokenJwtPayload,
|
||||
appCfg.AUTH_SECRET,
|
||||
{
|
||||
expiresIn:
|
||||
Number(identityAccessToken.accessTokenMaxTTL) === 0
|
||||
? undefined
|
||||
: Number(identityAccessToken.accessTokenMaxTTL)
|
||||
}
|
||||
);
|
||||
|
||||
return { accessToken, identityOidcAuth, identityAccessToken, identityMembershipOrg };
|
||||
};
|
||||
|
||||
const attachOidcAuth = async ({
|
||||
identityId,
|
||||
oidcDiscoveryUrl,
|
||||
caCert,
|
||||
boundIssuer,
|
||||
boundAudiences,
|
||||
boundClaims,
|
||||
boundSubject,
|
||||
accessTokenTTL,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TAttachOidcAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new BadRequestError({ message: "Failed to find identity" });
|
||||
}
|
||||
if (identityMembershipOrg.identity.authMethod)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to add OIDC Auth to already configured identity"
|
||||
});
|
||||
|
||||
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
|
||||
|
||||
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
|
||||
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
|
||||
if (
|
||||
!plan.ipAllowlisting &&
|
||||
accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" &&
|
||||
accessTokenTrustedIp.ipAddress !== "::/0"
|
||||
)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range."
|
||||
});
|
||||
if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress))
|
||||
throw new BadRequestError({
|
||||
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
|
||||
});
|
||||
return extractIPDetails(accessTokenTrustedIp.ipAddress);
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: identityMembershipOrg.orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedCaCert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
|
||||
const identityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const doc = await identityOidcAuthDAL.create(
|
||||
{
|
||||
identityId: identityMembershipOrg.identityId,
|
||||
oidcDiscoveryUrl,
|
||||
encryptedCaCert,
|
||||
caCertIV,
|
||||
caCertTag,
|
||||
boundIssuer,
|
||||
boundAudiences,
|
||||
boundClaims,
|
||||
boundSubject,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps)
|
||||
},
|
||||
tx
|
||||
);
|
||||
await identityDAL.updateById(
|
||||
identityMembershipOrg.identityId,
|
||||
{
|
||||
authMethod: IdentityAuthMethod.OIDC_AUTH
|
||||
},
|
||||
tx
|
||||
);
|
||||
return doc;
|
||||
});
|
||||
return { ...identityOidcAuth, orgId: identityMembershipOrg.orgId, caCert };
|
||||
};
|
||||
|
||||
const updateOidcAuth = async ({
|
||||
identityId,
|
||||
oidcDiscoveryUrl,
|
||||
caCert,
|
||||
boundIssuer,
|
||||
boundAudiences,
|
||||
boundClaims,
|
||||
boundSubject,
|
||||
accessTokenTTL,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TUpdateOidcAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new BadRequestError({ message: "Failed to find identity" });
|
||||
}
|
||||
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.OIDC_AUTH) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update OIDC Auth"
|
||||
});
|
||||
}
|
||||
|
||||
const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId });
|
||||
|
||||
if (
|
||||
(accessTokenMaxTTL || identityOidcAuth.accessTokenMaxTTL) > 0 &&
|
||||
(accessTokenTTL || identityOidcAuth.accessTokenMaxTTL) > (accessTokenMaxTTL || identityOidcAuth.accessTokenMaxTTL)
|
||||
) {
|
||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
|
||||
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => {
|
||||
if (
|
||||
!plan.ipAllowlisting &&
|
||||
accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" &&
|
||||
accessTokenTrustedIp.ipAddress !== "::/0"
|
||||
)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range."
|
||||
});
|
||||
if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress))
|
||||
throw new BadRequestError({
|
||||
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
|
||||
});
|
||||
return extractIPDetails(accessTokenTrustedIp.ipAddress);
|
||||
});
|
||||
|
||||
const updateQuery: TIdentityOidcAuthsUpdate = {
|
||||
oidcDiscoveryUrl,
|
||||
boundIssuer,
|
||||
boundAudiences,
|
||||
boundClaims,
|
||||
boundSubject,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps: reformattedAccessTokenTrustedIps
|
||||
? JSON.stringify(reformattedAccessTokenTrustedIps)
|
||||
: undefined
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
if (caCert !== undefined) {
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
updateQuery.encryptedCaCert = encryptedCACert;
|
||||
updateQuery.caCertIV = caCertIV;
|
||||
updateQuery.caCertTag = caCertTag;
|
||||
}
|
||||
|
||||
const updatedOidcAuth = await identityOidcAuthDAL.updateById(identityOidcAuth.id, updateQuery);
|
||||
const updatedCACert =
|
||||
updatedOidcAuth.encryptedCaCert && updatedOidcAuth.caCertIV && updatedOidcAuth.caCertTag
|
||||
? decryptSymmetric({
|
||||
ciphertext: updatedOidcAuth.encryptedCaCert,
|
||||
iv: updatedOidcAuth.caCertIV,
|
||||
tag: updatedOidcAuth.caCertTag,
|
||||
key
|
||||
})
|
||||
: "";
|
||||
|
||||
return {
|
||||
...updatedOidcAuth,
|
||||
orgId: identityMembershipOrg.orgId,
|
||||
caCert: updatedCACert
|
||||
};
|
||||
};
|
||||
|
||||
const getOidcAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetOidcAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new BadRequestError({ message: "Failed to find identity" });
|
||||
}
|
||||
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.OIDC_AUTH) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have OIDC Auth attached"
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
|
||||
|
||||
const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId });
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const caCert = decryptSymmetric({
|
||||
ciphertext: identityOidcAuth.encryptedCaCert,
|
||||
iv: identityOidcAuth.caCertIV,
|
||||
tag: identityOidcAuth.caCertTag,
|
||||
key
|
||||
});
|
||||
|
||||
return { ...identityOidcAuth, orgId: identityMembershipOrg.orgId, caCert };
|
||||
};
|
||||
|
||||
const revokeOidcAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TRevokeOidcAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new BadRequestError({ message: "Failed to find identity" });
|
||||
}
|
||||
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.OIDC_AUTH) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have OIDC auth"
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const { permission: rolePermission } = await permissionService.getOrgPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityMembershipOrg.identityId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasPriviledge) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Failed to revoke OIDC auth of identity with more privileged role"
|
||||
});
|
||||
}
|
||||
|
||||
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
||||
await identityDAL.updateById(identityId, { authMethod: null }, tx);
|
||||
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
|
||||
return revokedIdentityOidcAuth;
|
||||
};
|
||||
|
||||
return {
|
||||
attachOidcAuth,
|
||||
updateOidcAuth,
|
||||
getOidcAuth,
|
||||
revokeOidcAuth,
|
||||
login
|
||||
};
|
||||
};
|
@ -0,0 +1,42 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TAttachOidcAuthDTO = {
|
||||
identityId: string;
|
||||
oidcDiscoveryUrl: string;
|
||||
caCert: string;
|
||||
boundIssuer: string;
|
||||
boundAudiences: string;
|
||||
boundClaims: Record<string, string>;
|
||||
boundSubject: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
accessTokenTrustedIps: { ipAddress: string }[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateOidcAuthDTO = {
|
||||
identityId: string;
|
||||
oidcDiscoveryUrl?: string;
|
||||
caCert?: string;
|
||||
boundIssuer?: string;
|
||||
boundAudiences?: string;
|
||||
boundClaims?: Record<string, string>;
|
||||
boundSubject?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
accessTokenTrustedIps?: { ipAddress: string }[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetOidcAuthDTO = {
|
||||
identityId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TLoginOidcAuthDTO = {
|
||||
identityId: string;
|
||||
jwt: string;
|
||||
};
|
||||
|
||||
export type TRevokeOidcAuthDTO = {
|
||||
identityId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
@ -0,0 +1,25 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export const validateOidcAuthAudiencesField = z
|
||||
.string()
|
||||
.trim()
|
||||
.default("")
|
||||
.transform((data) => {
|
||||
if (data === "") return "";
|
||||
return data
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
});
|
||||
|
||||
export const validateOidcBoundClaimsField = z.record(z.string()).transform((data) => {
|
||||
const formattedClaims: Record<string, string> = {};
|
||||
Object.keys(data).forEach((key) => {
|
||||
formattedClaims[key] = data[key]
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
});
|
||||
|
||||
return formattedClaims;
|
||||
});
|
@ -111,6 +111,7 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.IdentityProjectMembership)
|
||||
.where(`${TableName.IdentityProjectMembership}.projectId`, projectId)
|
||||
.join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.join(TableName.Identity, `${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`)
|
||||
.where((qb) => {
|
||||
if (filter.identityId) {
|
||||
@ -149,12 +150,13 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
db.ref("isTemporary").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("temporaryRange").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("temporaryAccessStartTime").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.IdentityProjectMembershipRole)
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.IdentityProjectMembershipRole),
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project)
|
||||
);
|
||||
|
||||
const members = sqlNestRelationships({
|
||||
data: docs,
|
||||
parentMapper: ({ identityId, identityName, identityAuthMethod, id, createdAt, updatedAt }) => ({
|
||||
parentMapper: ({ identityId, identityName, identityAuthMethod, id, createdAt, updatedAt, projectName }) => ({
|
||||
id,
|
||||
identityId,
|
||||
createdAt,
|
||||
@ -163,6 +165,10 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
id: identityId,
|
||||
name: identityName,
|
||||
authMethod: identityAuthMethod
|
||||
},
|
||||
project: {
|
||||
id: projectId,
|
||||
name: projectName
|
||||
}
|
||||
}),
|
||||
key: "id",
|
||||
|
@ -1,13 +1,13 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { IdentityAuthMethod } from "@app/db/schemas";
|
||||
import { IdentityAuthMethod, TableName } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
@ -18,12 +18,13 @@ import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identit
|
||||
import { TIdentityTokenAuthDALFactory } from "./identity-token-auth-dal";
|
||||
import {
|
||||
TAttachTokenAuthDTO,
|
||||
TCreateTokenTokenAuthDTO,
|
||||
TCreateTokenAuthTokenDTO,
|
||||
TGetTokenAuthDTO,
|
||||
TGetTokensTokenAuthDTO,
|
||||
TGetTokenAuthTokensDTO,
|
||||
TRevokeTokenAuthDTO,
|
||||
TRevokeTokenAuthTokenDTO,
|
||||
TUpdateTokenAuthDTO,
|
||||
TUpdateTokenTokenAuthDTO
|
||||
TUpdateTokenAuthTokenDTO
|
||||
} from "./identity-token-auth-types";
|
||||
|
||||
type TIdentityTokenAuthServiceFactoryDep = {
|
||||
@ -33,7 +34,10 @@ type TIdentityTokenAuthServiceFactoryDep = {
|
||||
>;
|
||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "find" | "update">;
|
||||
identityAccessTokenDAL: Pick<
|
||||
TIdentityAccessTokenDALFactory,
|
||||
"create" | "find" | "update" | "findById" | "findOne" | "updateById"
|
||||
>;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -243,7 +247,7 @@ export const identityTokenAuthServiceFactory = ({
|
||||
);
|
||||
const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasPriviledge)
|
||||
throw new ForbiddenRequestError({
|
||||
throw new UnauthorizedError({
|
||||
message: "Failed to revoke Token Auth of identity with more privileged role"
|
||||
});
|
||||
|
||||
@ -255,14 +259,14 @@ export const identityTokenAuthServiceFactory = ({
|
||||
return revokedIdentityTokenAuth;
|
||||
};
|
||||
|
||||
const createTokenTokenAuth = async ({
|
||||
const createTokenAuthToken = async ({
|
||||
identityId,
|
||||
actorId,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
name
|
||||
}: TCreateTokenTokenAuthDTO) => {
|
||||
}: TCreateTokenAuthTokenDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.TOKEN_AUTH)
|
||||
@ -328,7 +332,7 @@ export const identityTokenAuthServiceFactory = ({
|
||||
return { accessToken, identityTokenAuth, identityAccessToken, identityMembershipOrg };
|
||||
};
|
||||
|
||||
const getTokensTokenAuth = async ({
|
||||
const getTokenAuthTokens = async ({
|
||||
identityId,
|
||||
offset = 0,
|
||||
limit = 20,
|
||||
@ -336,7 +340,7 @@ export const identityTokenAuthServiceFactory = ({
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TGetTokensTokenAuthDTO) => {
|
||||
}: TGetTokenAuthTokensDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.TOKEN_AUTH)
|
||||
@ -350,20 +354,7 @@ export const identityTokenAuthServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const { permission: rolePermission } = await permissionService.getOrgPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityMembershipOrg.identityId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasPriviledge)
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Failed to get tokens for identity with more privileged role"
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
|
||||
|
||||
const tokens = await identityAccessTokenDAL.find(
|
||||
{
|
||||
@ -375,16 +366,17 @@ export const identityTokenAuthServiceFactory = ({
|
||||
return { tokens, identityMembershipOrg };
|
||||
};
|
||||
|
||||
const updateTokenTokenAuth = async ({
|
||||
identityId,
|
||||
const updateTokenAuthToken = async ({
|
||||
tokenId,
|
||||
name,
|
||||
actorId,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TUpdateTokenTokenAuthDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
}: TUpdateTokenAuthTokenDTO) => {
|
||||
const foundToken = await identityAccessTokenDAL.findById(tokenId);
|
||||
if (!foundToken) throw new NotFoundError({ message: "Failed to find token" });
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: foundToken.identityId });
|
||||
if (!identityMembershipOrg) throw new BadRequestError({ message: "Failed to find identity" });
|
||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.TOKEN_AUTH)
|
||||
throw new BadRequestError({
|
||||
@ -414,7 +406,7 @@ export const identityTokenAuthServiceFactory = ({
|
||||
|
||||
const [token] = await identityAccessTokenDAL.update(
|
||||
{
|
||||
identityId,
|
||||
identityId: foundToken.identityId,
|
||||
id: tokenId
|
||||
},
|
||||
{
|
||||
@ -425,13 +417,54 @@ export const identityTokenAuthServiceFactory = ({
|
||||
return { token, identityMembershipOrg };
|
||||
};
|
||||
|
||||
const revokeTokenAuthToken = async ({
|
||||
tokenId,
|
||||
actorId,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TRevokeTokenAuthTokenDTO) => {
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
isAccessTokenRevoked: false
|
||||
});
|
||||
if (!identityAccessToken)
|
||||
throw new NotFoundError({
|
||||
message: "Failed to find token"
|
||||
});
|
||||
|
||||
const identityOrgMembership = await identityOrgMembershipDAL.findOne({
|
||||
identityId: identityAccessToken.identityId
|
||||
});
|
||||
|
||||
if (!identityOrgMembership) {
|
||||
throw new UnauthorizedError({ message: "Identity does not belong to any organization" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityOrgMembership.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const revokedToken = await identityAccessTokenDAL.updateById(identityAccessToken.id, {
|
||||
isAccessTokenRevoked: true
|
||||
});
|
||||
|
||||
return { revokedToken };
|
||||
};
|
||||
|
||||
return {
|
||||
attachTokenAuth,
|
||||
updateTokenAuth,
|
||||
getTokenAuth,
|
||||
revokeIdentityTokenAuth,
|
||||
createTokenTokenAuth,
|
||||
getTokensTokenAuth,
|
||||
updateTokenTokenAuth
|
||||
createTokenAuthToken,
|
||||
getTokenAuthTokens,
|
||||
updateTokenAuthToken,
|
||||
revokeTokenAuthToken
|
||||
};
|
||||
};
|
||||
|
@ -24,19 +24,22 @@ export type TRevokeTokenAuthDTO = {
|
||||
identityId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TCreateTokenTokenAuthDTO = {
|
||||
export type TCreateTokenAuthTokenDTO = {
|
||||
identityId: string;
|
||||
name?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetTokensTokenAuthDTO = {
|
||||
export type TGetTokenAuthTokensDTO = {
|
||||
identityId: string;
|
||||
offset: number;
|
||||
limit: number;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateTokenTokenAuthDTO = {
|
||||
identityId: string;
|
||||
export type TUpdateTokenAuthTokenDTO = {
|
||||
tokenId: string;
|
||||
name?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TRevokeTokenAuthTokenDTO = {
|
||||
tokenId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -5,7 +5,7 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
|
||||
@ -213,7 +213,7 @@ export const identityServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TListProjectIdentitiesByIdentityIdDTO) => {
|
||||
const identityOrgMembership = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityOrgMembership) throw new BadRequestError({ message: `Failed to find identity with id ${identityId}` });
|
||||
if (!identityOrgMembership) throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
|
@ -11,6 +11,9 @@ import {
|
||||
} from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
|
||||
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
@ -18,9 +21,10 @@ import {
|
||||
decryptSymmetric128BitHexKeyUTF8,
|
||||
encryptSymmetric128BitHexKeyUTF8
|
||||
} from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { groupBy, pick } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { ActorType } from "../auth/auth-type";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
@ -44,6 +48,7 @@ import {
|
||||
} from "./secret-fns";
|
||||
import { TSecretQueueFactory } from "./secret-queue";
|
||||
import {
|
||||
SecretOperations,
|
||||
TAttachSecretTagsDTO,
|
||||
TBackFillSecretReferencesDTO,
|
||||
TCreateBulkSecretDTO,
|
||||
@ -59,6 +64,7 @@ import {
|
||||
TGetSecretsDTO,
|
||||
TGetSecretsRawDTO,
|
||||
TGetSecretVersionsDTO,
|
||||
TMoveSecretsDTO,
|
||||
TUpdateBulkSecretDTO,
|
||||
TUpdateManySecretRawDTO,
|
||||
TUpdateSecretDTO,
|
||||
@ -84,6 +90,12 @@ type TSecretServiceFactoryDep = {
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
secretApprovalPolicyService: Pick<TSecretApprovalPolicyServiceFactory, "getSecretApprovalPolicy">;
|
||||
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
|
||||
secretApprovalRequestSecretDAL: Pick<
|
||||
TSecretApprovalRequestSecretDALFactory,
|
||||
"insertMany" | "insertApprovalSecretTags"
|
||||
>;
|
||||
};
|
||||
|
||||
export type TSecretServiceFactory = ReturnType<typeof secretServiceFactory>;
|
||||
@ -100,7 +112,10 @@ export const secretServiceFactory = ({
|
||||
projectDAL,
|
||||
projectBotService,
|
||||
secretImportDAL,
|
||||
secretVersionTagDAL
|
||||
secretVersionTagDAL,
|
||||
secretApprovalPolicyService,
|
||||
secretApprovalRequestDAL,
|
||||
secretApprovalRequestSecretDAL
|
||||
}: TSecretServiceFactoryDep) => {
|
||||
const getSecretReference = async (projectId: string) => {
|
||||
// if bot key missing means e2e still exist
|
||||
@ -1683,6 +1698,393 @@ export const secretServiceFactory = ({
|
||||
return { message: "Successfully backfilled secret references" };
|
||||
};
|
||||
|
||||
const moveSecrets = async ({
|
||||
sourceEnvironment,
|
||||
sourceSecretPath,
|
||||
destinationEnvironment,
|
||||
destinationSecretPath,
|
||||
secretIds,
|
||||
projectSlug,
|
||||
shouldOverwrite,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TMoveSecretsDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) {
|
||||
throw new NotFoundError({
|
||||
message: "Project not found."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: sourceEnvironment, secretPath: sourceSecretPath })
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: destinationEnvironment, secretPath: destinationSecretPath })
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: destinationEnvironment, secretPath: destinationSecretPath })
|
||||
);
|
||||
|
||||
const botKey = await projectBotService.getBotKey(project.id);
|
||||
if (!botKey) {
|
||||
throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
|
||||
}
|
||||
|
||||
const sourceFolder = await folderDAL.findBySecretPath(project.id, sourceEnvironment, sourceSecretPath);
|
||||
if (!sourceFolder) {
|
||||
throw new NotFoundError({
|
||||
message: "Source path does not exist."
|
||||
});
|
||||
}
|
||||
|
||||
const destinationFolder = await folderDAL.findBySecretPath(
|
||||
project.id,
|
||||
destinationEnvironment,
|
||||
destinationSecretPath
|
||||
);
|
||||
|
||||
if (!destinationFolder) {
|
||||
throw new NotFoundError({
|
||||
message: "Destination path does not exist."
|
||||
});
|
||||
}
|
||||
|
||||
const sourceSecrets = await secretDAL.find({
|
||||
type: SecretType.Shared,
|
||||
$in: {
|
||||
id: secretIds
|
||||
}
|
||||
});
|
||||
|
||||
if (sourceSecrets.length !== secretIds.length) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid secrets"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptedSourceSecrets = sourceSecrets.map((secret) => ({
|
||||
...secret,
|
||||
secretKey: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: botKey
|
||||
}),
|
||||
secretValue: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: botKey
|
||||
})
|
||||
}));
|
||||
|
||||
let isSourceUpdated = false;
|
||||
let isDestinationUpdated = false;
|
||||
|
||||
// Moving secrets is a two-step process.
|
||||
await secretDAL.transaction(async (tx) => {
|
||||
// First step is to create/update the secret in the destination:
|
||||
const destinationSecretsFromDB = await secretDAL.find(
|
||||
{
|
||||
folderId: destinationFolder.id
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
const decryptedDestinationSecrets = destinationSecretsFromDB.map((secret) => {
|
||||
return {
|
||||
...secret,
|
||||
secretKey: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: botKey
|
||||
}),
|
||||
secretValue: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: botKey
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
const destinationSecretsGroupedByBlindIndex = groupBy(
|
||||
decryptedDestinationSecrets.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex)),
|
||||
(i) => i.secretBlindIndex as string
|
||||
);
|
||||
|
||||
const locallyCreatedSecrets = decryptedSourceSecrets
|
||||
.filter(({ secretBlindIndex }) => !destinationSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0])
|
||||
.map((el) => ({ ...el, operation: SecretOperations.Create }));
|
||||
|
||||
const locallyUpdatedSecrets = decryptedSourceSecrets
|
||||
.filter(
|
||||
({ secretBlindIndex, secretKey, secretValue }) =>
|
||||
destinationSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0] &&
|
||||
// if key or value changed
|
||||
(destinationSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretKey !== secretKey ||
|
||||
destinationSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretValue !== secretValue)
|
||||
)
|
||||
.map((el) => ({ ...el, operation: SecretOperations.Update }));
|
||||
|
||||
if (locallyUpdatedSecrets.length > 0 && !shouldOverwrite) {
|
||||
const existingKeys = locallyUpdatedSecrets.map((s) => s.secretKey);
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to move secrets. The following secrets already exist in the destination: ${existingKeys.join(
|
||||
","
|
||||
)}`
|
||||
});
|
||||
}
|
||||
|
||||
const isEmpty = locallyCreatedSecrets.length + locallyUpdatedSecrets.length === 0;
|
||||
|
||||
if (isEmpty) {
|
||||
throw new BadRequestError({
|
||||
message: "Selected secrets already exist in the destination."
|
||||
});
|
||||
}
|
||||
const destinationFolderPolicy = await secretApprovalPolicyService.getSecretApprovalPolicy(
|
||||
project.id,
|
||||
destinationFolder.environment.slug,
|
||||
destinationFolder.path
|
||||
);
|
||||
|
||||
if (destinationFolderPolicy && actor === ActorType.USER) {
|
||||
// if secret approval policy exists for destination, we create the secret approval request
|
||||
const localSecretsIds = decryptedDestinationSecrets.map(({ id }) => id);
|
||||
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
|
||||
destinationFolder.id,
|
||||
localSecretsIds,
|
||||
tx
|
||||
);
|
||||
|
||||
const approvalRequestDoc = await secretApprovalRequestDAL.create(
|
||||
{
|
||||
folderId: destinationFolder.id,
|
||||
slug: alphaNumericNanoId(),
|
||||
policyId: destinationFolderPolicy.id,
|
||||
status: "open",
|
||||
hasMerged: false,
|
||||
committerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const commits = locallyCreatedSecrets.concat(locallyUpdatedSecrets).map((doc) => {
|
||||
const { operation } = doc;
|
||||
const localSecret = destinationSecretsGroupedByBlindIndex[doc.secretBlindIndex as string]?.[0];
|
||||
|
||||
return {
|
||||
op: operation,
|
||||
keyEncoding: doc.keyEncoding,
|
||||
algorithm: doc.algorithm,
|
||||
requestId: approvalRequestDoc.id,
|
||||
metadata: doc.metadata,
|
||||
secretKeyIV: doc.secretKeyIV,
|
||||
secretKeyTag: doc.secretKeyTag,
|
||||
secretKeyCiphertext: doc.secretKeyCiphertext,
|
||||
secretValueIV: doc.secretValueIV,
|
||||
secretValueTag: doc.secretValueTag,
|
||||
secretValueCiphertext: doc.secretValueCiphertext,
|
||||
secretBlindIndex: doc.secretBlindIndex,
|
||||
secretCommentIV: doc.secretCommentIV,
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding,
|
||||
// except create operation other two needs the secret id and version id
|
||||
...(operation !== SecretOperations.Create
|
||||
? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id }
|
||||
: {})
|
||||
};
|
||||
});
|
||||
await secretApprovalRequestSecretDAL.insertMany(commits, tx);
|
||||
} else {
|
||||
// apply changes directly
|
||||
if (locallyCreatedSecrets.length) {
|
||||
await fnSecretBulkInsert({
|
||||
folderId: destinationFolder.id,
|
||||
secretVersionDAL,
|
||||
secretDAL,
|
||||
tx,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
inputSecrets: locallyCreatedSecrets.map((doc) => {
|
||||
return {
|
||||
keyEncoding: doc.keyEncoding,
|
||||
algorithm: doc.algorithm,
|
||||
type: doc.type,
|
||||
metadata: doc.metadata,
|
||||
secretKeyIV: doc.secretKeyIV,
|
||||
secretKeyTag: doc.secretKeyTag,
|
||||
secretKeyCiphertext: doc.secretKeyCiphertext,
|
||||
secretValueIV: doc.secretValueIV,
|
||||
secretValueTag: doc.secretValueTag,
|
||||
secretValueCiphertext: doc.secretValueCiphertext,
|
||||
secretBlindIndex: doc.secretBlindIndex,
|
||||
secretCommentIV: doc.secretCommentIV,
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding
|
||||
};
|
||||
})
|
||||
});
|
||||
}
|
||||
if (locallyUpdatedSecrets.length) {
|
||||
await fnSecretBulkUpdate({
|
||||
projectId: project.id,
|
||||
folderId: destinationFolder.id,
|
||||
secretVersionDAL,
|
||||
secretDAL,
|
||||
tx,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
inputSecrets: locallyUpdatedSecrets.map((doc) => {
|
||||
return {
|
||||
filter: {
|
||||
folderId: destinationFolder.id,
|
||||
id: destinationSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id
|
||||
},
|
||||
data: {
|
||||
keyEncoding: doc.keyEncoding,
|
||||
algorithm: doc.algorithm,
|
||||
type: doc.type,
|
||||
metadata: doc.metadata,
|
||||
secretKeyIV: doc.secretKeyIV,
|
||||
secretKeyTag: doc.secretKeyTag,
|
||||
secretKeyCiphertext: doc.secretKeyCiphertext,
|
||||
secretValueIV: doc.secretValueIV,
|
||||
secretValueTag: doc.secretValueTag,
|
||||
secretValueCiphertext: doc.secretValueCiphertext,
|
||||
secretBlindIndex: doc.secretBlindIndex,
|
||||
secretCommentIV: doc.secretCommentIV,
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding
|
||||
}
|
||||
};
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
isDestinationUpdated = true;
|
||||
}
|
||||
|
||||
// Next step is to delete the secrets from the source folder:
|
||||
const sourceSecretsGroupByBlindIndex = groupBy(sourceSecrets, (i) => i.secretBlindIndex as string);
|
||||
const locallyDeletedSecrets = decryptedSourceSecrets.map((el) => ({ ...el, operation: SecretOperations.Delete }));
|
||||
|
||||
const sourceFolderPolicy = await secretApprovalPolicyService.getSecretApprovalPolicy(
|
||||
project.id,
|
||||
sourceFolder.environment.slug,
|
||||
sourceFolder.path
|
||||
);
|
||||
|
||||
if (sourceFolderPolicy && actor === ActorType.USER) {
|
||||
// if secret approval policy exists for source, we create the secret approval request
|
||||
const localSecretsIds = decryptedSourceSecrets.map(({ id }) => id);
|
||||
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(sourceFolder.id, localSecretsIds, tx);
|
||||
const approvalRequestDoc = await secretApprovalRequestDAL.create(
|
||||
{
|
||||
folderId: sourceFolder.id,
|
||||
slug: alphaNumericNanoId(),
|
||||
policyId: sourceFolderPolicy.id,
|
||||
status: "open",
|
||||
hasMerged: false,
|
||||
committerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const commits = locallyDeletedSecrets.map((doc) => {
|
||||
const { operation } = doc;
|
||||
const localSecret = sourceSecretsGroupByBlindIndex[doc.secretBlindIndex as string]?.[0];
|
||||
|
||||
return {
|
||||
op: operation,
|
||||
keyEncoding: doc.keyEncoding,
|
||||
algorithm: doc.algorithm,
|
||||
requestId: approvalRequestDoc.id,
|
||||
metadata: doc.metadata,
|
||||
secretKeyIV: doc.secretKeyIV,
|
||||
secretKeyTag: doc.secretKeyTag,
|
||||
secretKeyCiphertext: doc.secretKeyCiphertext,
|
||||
secretValueIV: doc.secretValueIV,
|
||||
secretValueTag: doc.secretValueTag,
|
||||
secretValueCiphertext: doc.secretValueCiphertext,
|
||||
secretBlindIndex: doc.secretBlindIndex,
|
||||
secretCommentIV: doc.secretCommentIV,
|
||||
secretCommentTag: doc.secretCommentTag,
|
||||
secretCommentCiphertext: doc.secretCommentCiphertext,
|
||||
skipMultilineEncoding: doc.skipMultilineEncoding,
|
||||
secretId: localSecret.id,
|
||||
secretVersion: latestSecretVersions[localSecret.id].id
|
||||
};
|
||||
});
|
||||
|
||||
await secretApprovalRequestSecretDAL.insertMany(commits, tx);
|
||||
} else {
|
||||
// if no secret approval policy is present, we delete directly.
|
||||
await secretDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: locallyDeletedSecrets.map(({ id }) => id)
|
||||
},
|
||||
folderId: sourceFolder.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
isSourceUpdated = true;
|
||||
}
|
||||
});
|
||||
|
||||
if (isDestinationUpdated) {
|
||||
await snapshotService.performSnapshot(destinationFolder.id);
|
||||
await secretQueueService.syncSecrets({
|
||||
projectId: project.id,
|
||||
secretPath: destinationFolder.path,
|
||||
environmentSlug: destinationFolder.environment.slug,
|
||||
actorId,
|
||||
actor
|
||||
});
|
||||
}
|
||||
|
||||
if (isSourceUpdated) {
|
||||
await snapshotService.performSnapshot(sourceFolder.id);
|
||||
await secretQueueService.syncSecrets({
|
||||
projectId: project.id,
|
||||
secretPath: sourceFolder.path,
|
||||
environmentSlug: sourceFolder.environment.slug,
|
||||
actorId,
|
||||
actor
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
projectId: project.id,
|
||||
isSourceUpdated,
|
||||
isDestinationUpdated
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
attachTags,
|
||||
detachTags,
|
||||
@ -1703,6 +2105,7 @@ export const secretServiceFactory = ({
|
||||
updateManySecretsRaw,
|
||||
deleteManySecretsRaw,
|
||||
getSecretVersions,
|
||||
backfillSecretReferences
|
||||
backfillSecretReferences,
|
||||
moveSecrets
|
||||
};
|
||||
};
|
||||
|
@ -397,3 +397,13 @@ export type TSyncSecretsDTO<T extends boolean = false> = {
|
||||
// used for import creation to trigger replication
|
||||
pickOnlyImportIds?: string[];
|
||||
});
|
||||
|
||||
export type TMoveSecretsDTO = {
|
||||
projectSlug: string;
|
||||
sourceEnvironment: string;
|
||||
sourceSecretPath: string;
|
||||
destinationEnvironment: string;
|
||||
destinationSecretPath: string;
|
||||
secretIds: string[];
|
||||
shouldOverwrite: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -12,7 +12,7 @@ import { AuthMethod } from "../auth/auth-type";
|
||||
import { TOrgServiceFactory } from "../org/org-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TSuperAdminDALFactory } from "./super-admin-dal";
|
||||
import { TAdminSignUpDTO } from "./super-admin-types";
|
||||
import { LoginMethod, TAdminGetUsersDTO, TAdminSignUpDTO } from "./super-admin-types";
|
||||
|
||||
type TSuperAdminServiceFactoryDep = {
|
||||
serverCfgDAL: TSuperAdminDALFactory;
|
||||
@ -79,7 +79,37 @@ export const superAdminServiceFactory = ({
|
||||
return newCfg;
|
||||
};
|
||||
|
||||
const updateServerCfg = async (data: TSuperAdminUpdate) => {
|
||||
const updateServerCfg = async (data: TSuperAdminUpdate, userId: string) => {
|
||||
if (data.enabledLoginMethods) {
|
||||
const superAdminUser = await userDAL.findById(userId);
|
||||
const loginMethodToAuthMethod = {
|
||||
[LoginMethod.EMAIL]: [AuthMethod.EMAIL],
|
||||
[LoginMethod.GOOGLE]: [AuthMethod.GOOGLE],
|
||||
[LoginMethod.GITLAB]: [AuthMethod.GITLAB],
|
||||
[LoginMethod.GITHUB]: [AuthMethod.GITHUB],
|
||||
[LoginMethod.LDAP]: [AuthMethod.LDAP],
|
||||
[LoginMethod.OIDC]: [AuthMethod.OIDC],
|
||||
[LoginMethod.SAML]: [
|
||||
AuthMethod.AZURE_SAML,
|
||||
AuthMethod.GOOGLE_SAML,
|
||||
AuthMethod.JUMPCLOUD_SAML,
|
||||
AuthMethod.KEYCLOAK_SAML,
|
||||
AuthMethod.OKTA_SAML
|
||||
]
|
||||
};
|
||||
|
||||
if (
|
||||
!data.enabledLoginMethods.some((loginMethod) =>
|
||||
loginMethodToAuthMethod[loginMethod as LoginMethod].some(
|
||||
(authMethod) => superAdminUser.authMethods?.includes(authMethod)
|
||||
)
|
||||
)
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "You must configure at least one auth method to prevent account lockout"
|
||||
});
|
||||
}
|
||||
}
|
||||
const updatedServerCfg = await serverCfgDAL.updateById(ADMIN_CONFIG_DB_UUID, data);
|
||||
|
||||
await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(updatedServerCfg));
|
||||
@ -167,7 +197,7 @@ export const superAdminServiceFactory = ({
|
||||
orgName: initialOrganizationName
|
||||
});
|
||||
|
||||
await updateServerCfg({ initialized: true });
|
||||
await updateServerCfg({ initialized: true }, userInfo.user.id);
|
||||
const token = await authService.generateUserTokens({
|
||||
user: userInfo.user,
|
||||
authMethod: AuthMethod.EMAIL,
|
||||
@ -179,9 +209,25 @@ export const superAdminServiceFactory = ({
|
||||
return { token, user: userInfo, organization };
|
||||
};
|
||||
|
||||
const getUsers = ({ offset, limit, searchTerm }: TAdminGetUsersDTO) => {
|
||||
return userDAL.getUsersByFilter({
|
||||
limit,
|
||||
offset,
|
||||
searchTerm,
|
||||
sortBy: "username"
|
||||
});
|
||||
};
|
||||
|
||||
const deleteUser = async (userId: string) => {
|
||||
const user = await userDAL.deleteById(userId);
|
||||
return user;
|
||||
};
|
||||
|
||||
return {
|
||||
initServerCfg,
|
||||
updateServerCfg,
|
||||
adminSignUp
|
||||
adminSignUp,
|
||||
getUsers,
|
||||
deleteUser
|
||||
};
|
||||
};
|
||||
|
@ -15,3 +15,19 @@ export type TAdminSignUpDTO = {
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
};
|
||||
|
||||
export type TAdminGetUsersDTO = {
|
||||
offset: number;
|
||||
limit: number;
|
||||
searchTerm: string;
|
||||
};
|
||||
|
||||
export enum LoginMethod {
|
||||
EMAIL = "email",
|
||||
GOOGLE = "google",
|
||||
GITHUB = "github",
|
||||
GITLAB = "gitlab",
|
||||
SAML = "saml",
|
||||
LDAP = "ldap",
|
||||
OIDC = "oidc"
|
||||
}
|
||||
|
@ -0,0 +1,89 @@
|
||||
import { PlainClient } from "@team-plain/typescript-sdk";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { InternalServerError } from "@app/lib/errors";
|
||||
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
|
||||
type TUserEngagementServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "findById">;
|
||||
};
|
||||
|
||||
export type TUserEngagementServiceFactory = ReturnType<typeof userEngagementServiceFactory>;
|
||||
|
||||
export const userEngagementServiceFactory = ({ userDAL }: TUserEngagementServiceFactoryDep) => {
|
||||
const createUserWish = async (userId: string, text: string) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.PLAIN_API_KEY) {
|
||||
throw new InternalServerError({
|
||||
message: "Plain is not configured."
|
||||
});
|
||||
}
|
||||
|
||||
const client = new PlainClient({
|
||||
apiKey: appCfg.PLAIN_API_KEY
|
||||
});
|
||||
|
||||
const customerUpsertRes = await client.upsertCustomer({
|
||||
identifier: {
|
||||
emailAddress: user.email
|
||||
},
|
||||
onCreate: {
|
||||
fullName: `${user.firstName} ${user.lastName}`,
|
||||
shortName: user.firstName,
|
||||
email: {
|
||||
email: user.email as string,
|
||||
isVerified: user.isEmailVerified as boolean
|
||||
},
|
||||
|
||||
externalId: user.id
|
||||
},
|
||||
|
||||
onUpdate: {
|
||||
fullName: {
|
||||
value: `${user.firstName} ${user.lastName}`
|
||||
},
|
||||
shortName: {
|
||||
value: user.firstName
|
||||
},
|
||||
email: {
|
||||
email: user.email as string,
|
||||
isVerified: user.isEmailVerified as boolean
|
||||
},
|
||||
externalId: {
|
||||
value: user.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (customerUpsertRes.error) {
|
||||
throw new InternalServerError({ message: customerUpsertRes.error.message });
|
||||
}
|
||||
|
||||
const createThreadRes = await client.createThread({
|
||||
title: "Wish",
|
||||
customerIdentifier: {
|
||||
externalId: customerUpsertRes.data.customer.externalId
|
||||
},
|
||||
components: [
|
||||
{
|
||||
componentText: {
|
||||
text
|
||||
}
|
||||
}
|
||||
],
|
||||
labelTypeIds: appCfg.PLAIN_WISH_LABEL_IDS?.split(",")
|
||||
});
|
||||
|
||||
if (createThreadRes.error) {
|
||||
throw new InternalServerError({
|
||||
message: createThreadRes.error.message
|
||||
});
|
||||
}
|
||||
};
|
||||
return {
|
||||
createUserWish
|
||||
};
|
||||
};
|
@ -7,10 +7,11 @@ import {
|
||||
TUserActionsUpdate,
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate
|
||||
TUserEncryptionKeysUpdate,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TUserDALFactory = ReturnType<typeof userDALFactory>;
|
||||
|
||||
@ -18,6 +19,39 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
const userOrm = ormify(db, TableName.Users);
|
||||
const findUserByUsername = async (username: string, tx?: Knex) => userOrm.findOne({ username }, tx);
|
||||
|
||||
const getUsersByFilter = async ({
|
||||
limit,
|
||||
offset,
|
||||
searchTerm,
|
||||
sortBy
|
||||
}: {
|
||||
limit: number;
|
||||
offset: number;
|
||||
searchTerm: string;
|
||||
sortBy?: keyof TUsers;
|
||||
}) => {
|
||||
try {
|
||||
let query = db.replicaNode()(TableName.Users).where("isGhost", "=", false);
|
||||
if (searchTerm) {
|
||||
query = query.where((qb) => {
|
||||
void qb
|
||||
.whereILike("email", `%${searchTerm}%`)
|
||||
.orWhereILike("firstName", `%${searchTerm}%`)
|
||||
.orWhereILike("lastName", `%${searchTerm}%`)
|
||||
.orWhereLike("username", `%${searchTerm}%`);
|
||||
});
|
||||
}
|
||||
|
||||
if (sortBy) {
|
||||
query = query.orderBy(sortBy);
|
||||
}
|
||||
|
||||
return await query.limit(limit).offset(offset).select(selectAllTableCols(TableName.Users));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Get users by filter" });
|
||||
}
|
||||
};
|
||||
|
||||
// USER ENCRYPTION FUNCTIONS
|
||||
// -------------------------
|
||||
const findUserEncKeyByUsername = async ({ username }: { username: string }) => {
|
||||
@ -159,6 +193,7 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
upsertUserEncryptionKey,
|
||||
createUserEncryption,
|
||||
findOneUserAction,
|
||||
createUserAction
|
||||
createUserAction,
|
||||
getUsersByFilter
|
||||
};
|
||||
};
|
||||
|
@ -201,7 +201,7 @@ export const userServiceFactory = ({
|
||||
return user;
|
||||
};
|
||||
|
||||
const deleteMe = async (userId: string) => {
|
||||
const deleteUser = async (userId: string) => {
|
||||
const user = await userDAL.deleteById(userId);
|
||||
return user;
|
||||
};
|
||||
@ -301,7 +301,7 @@ export const userServiceFactory = ({
|
||||
toggleUserMfa,
|
||||
updateUserName,
|
||||
updateAuthMethods,
|
||||
deleteMe,
|
||||
deleteUser,
|
||||
getMe,
|
||||
createUserAction,
|
||||
getUserAction,
|
||||
|
@ -4,55 +4,63 @@ import { AxiosError } from "axios";
|
||||
import picomatch from "picomatch";
|
||||
|
||||
import { SecretKeyEncoding, TWebhooks } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { decryptSymmetric, decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { TWebhookDALFactory } from "./webhook-dal";
|
||||
import { WebhookType } from "./webhook-types";
|
||||
|
||||
const WEBHOOK_TRIGGER_TIMEOUT = 15 * 1000;
|
||||
export const triggerWebhookRequest = async (
|
||||
{ url, encryptedSecretKey, iv, tag, keyEncoding }: TWebhooks,
|
||||
data: Record<string, unknown>
|
||||
) => {
|
||||
const headers: Record<string, string> = {};
|
||||
const payload = { ...data, timestamp: Date.now() };
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const decryptWebhookDetails = (webhook: TWebhooks) => {
|
||||
const { keyEncoding, iv, encryptedSecretKey, tag, urlCipherText, urlIV, urlTag, url } = webhook;
|
||||
|
||||
let decryptedSecretKey = "";
|
||||
let decryptedUrl = url;
|
||||
|
||||
if (encryptedSecretKey) {
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY;
|
||||
const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY;
|
||||
let secretKey;
|
||||
if (rootEncryptionKey && keyEncoding === SecretKeyEncoding.BASE64) {
|
||||
// case: encoding scheme is base64
|
||||
secretKey = decryptSymmetric({
|
||||
ciphertext: encryptedSecretKey,
|
||||
iv: iv as string,
|
||||
tag: tag as string,
|
||||
key: rootEncryptionKey
|
||||
});
|
||||
} else if (encryptionKey && keyEncoding === SecretKeyEncoding.UTF8) {
|
||||
// case: encoding scheme is utf8
|
||||
secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: encryptedSecretKey,
|
||||
iv: iv as string,
|
||||
tag: tag as string,
|
||||
key: encryptionKey
|
||||
});
|
||||
}
|
||||
if (secretKey) {
|
||||
const webhookSign = crypto.createHmac("sha256", secretKey).update(JSON.stringify(payload)).digest("hex");
|
||||
headers["x-infisical-signature"] = `t=${payload.timestamp};${webhookSign}`;
|
||||
}
|
||||
decryptedSecretKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: encryptedSecretKey,
|
||||
iv: iv as string,
|
||||
tag: tag as string
|
||||
});
|
||||
}
|
||||
|
||||
if (urlCipherText) {
|
||||
decryptedUrl = infisicalSymmetricDecrypt({
|
||||
keyEncoding: keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: urlCipherText,
|
||||
iv: urlIV as string,
|
||||
tag: urlTag as string
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
secretKey: decryptedSecretKey,
|
||||
url: decryptedUrl
|
||||
};
|
||||
};
|
||||
|
||||
export const triggerWebhookRequest = async (webhook: TWebhooks, data: Record<string, unknown>) => {
|
||||
const headers: Record<string, string> = {};
|
||||
const payload = { ...data, timestamp: Date.now() };
|
||||
const { secretKey, url } = decryptWebhookDetails(webhook);
|
||||
|
||||
if (secretKey) {
|
||||
const webhookSign = crypto.createHmac("sha256", secretKey).update(JSON.stringify(payload)).digest("hex");
|
||||
headers["x-infisical-signature"] = `t=${payload.timestamp};${webhookSign}`;
|
||||
}
|
||||
|
||||
const req = await request.post(url, payload, {
|
||||
headers,
|
||||
timeout: WEBHOOK_TRIGGER_TIMEOUT,
|
||||
signal: AbortSignal.timeout(WEBHOOK_TRIGGER_TIMEOUT)
|
||||
});
|
||||
|
||||
return req;
|
||||
};
|
||||
|
||||
@ -60,15 +68,48 @@ export const getWebhookPayload = (
|
||||
eventName: string,
|
||||
workspaceId: string,
|
||||
environment: string,
|
||||
secretPath?: string
|
||||
) => ({
|
||||
event: eventName,
|
||||
project: {
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath
|
||||
secretPath?: string,
|
||||
type?: string | null
|
||||
) => {
|
||||
switch (type) {
|
||||
case WebhookType.SLACK:
|
||||
return {
|
||||
text: "A secret value has been added or modified.",
|
||||
attachments: [
|
||||
{
|
||||
color: "#E7F256",
|
||||
fields: [
|
||||
{
|
||||
title: "Workspace ID",
|
||||
value: workspaceId,
|
||||
short: false
|
||||
},
|
||||
{
|
||||
title: "Environment",
|
||||
value: environment,
|
||||
short: false
|
||||
},
|
||||
{
|
||||
title: "Secret Path",
|
||||
value: secretPath,
|
||||
short: false
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
case WebhookType.GENERAL:
|
||||
default:
|
||||
return {
|
||||
event: eventName,
|
||||
project: {
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export type TFnTriggerWebhookDTO = {
|
||||
projectId: string;
|
||||
@ -95,9 +136,10 @@ export const fnTriggerWebhook = async ({
|
||||
logger.info("Secret webhook job started", { environment, secretPath, projectId });
|
||||
const webhooksTriggered = await Promise.allSettled(
|
||||
toBeTriggeredHooks.map((hook) =>
|
||||
triggerWebhookRequest(hook, getWebhookPayload("secrets.modified", projectId, environment, secretPath))
|
||||
triggerWebhookRequest(hook, getWebhookPayload("secrets.modified", projectId, environment, secretPath, hook.type))
|
||||
)
|
||||
);
|
||||
|
||||
// filter hooks by status
|
||||
const successWebhooks = webhooksTriggered
|
||||
.filter(({ status }) => status === "fulfilled")
|
||||
|
@ -1,15 +1,14 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, TWebhooksInsert } from "@app/db/schemas";
|
||||
import { TWebhooksInsert } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { encryptSymmetric, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { TWebhookDALFactory } from "./webhook-dal";
|
||||
import { getWebhookPayload, triggerWebhookRequest } from "./webhook-fns";
|
||||
import { decryptWebhookDetails, getWebhookPayload, triggerWebhookRequest } from "./webhook-fns";
|
||||
import {
|
||||
TCreateWebhookDTO,
|
||||
TDeleteWebhookDTO,
|
||||
@ -36,7 +35,8 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer
|
||||
webhookUrl,
|
||||
environment,
|
||||
secretPath,
|
||||
webhookSecretKey
|
||||
webhookSecretKey,
|
||||
type
|
||||
}: TCreateWebhookDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -50,30 +50,29 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer
|
||||
if (!env) throw new BadRequestError({ message: "Env not found" });
|
||||
|
||||
const insertDoc: TWebhooksInsert = {
|
||||
url: webhookUrl,
|
||||
url: "", // deprecated - we are moving away from plaintext URLs
|
||||
envId: env.id,
|
||||
isDisabled: false,
|
||||
secretPath: secretPath || "/"
|
||||
secretPath: secretPath || "/",
|
||||
type
|
||||
};
|
||||
|
||||
if (webhookSecretKey) {
|
||||
const appCfg = getConfig();
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY;
|
||||
const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY;
|
||||
if (rootEncryptionKey) {
|
||||
const { ciphertext, iv, tag } = encryptSymmetric(webhookSecretKey, rootEncryptionKey);
|
||||
insertDoc.encryptedSecretKey = ciphertext;
|
||||
insertDoc.iv = iv;
|
||||
insertDoc.tag = tag;
|
||||
insertDoc.algorithm = SecretEncryptionAlgo.AES_256_GCM;
|
||||
insertDoc.keyEncoding = SecretKeyEncoding.BASE64;
|
||||
} else if (encryptionKey) {
|
||||
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(webhookSecretKey, encryptionKey);
|
||||
insertDoc.encryptedSecretKey = ciphertext;
|
||||
insertDoc.iv = iv;
|
||||
insertDoc.tag = tag;
|
||||
insertDoc.algorithm = SecretEncryptionAlgo.AES_256_GCM;
|
||||
insertDoc.keyEncoding = SecretKeyEncoding.UTF8;
|
||||
}
|
||||
const { ciphertext, iv, tag, algorithm, encoding } = infisicalSymmetricEncypt(webhookSecretKey);
|
||||
insertDoc.encryptedSecretKey = ciphertext;
|
||||
insertDoc.iv = iv;
|
||||
insertDoc.tag = tag;
|
||||
insertDoc.algorithm = algorithm;
|
||||
insertDoc.keyEncoding = encoding;
|
||||
}
|
||||
|
||||
if (webhookUrl) {
|
||||
const { ciphertext, iv, tag, algorithm, encoding } = infisicalSymmetricEncypt(webhookUrl);
|
||||
insertDoc.urlCipherText = ciphertext;
|
||||
insertDoc.urlIV = iv;
|
||||
insertDoc.urlTag = tag;
|
||||
insertDoc.algorithm = algorithm;
|
||||
insertDoc.keyEncoding = encoding;
|
||||
}
|
||||
|
||||
const webhook = await webhookDAL.create(insertDoc);
|
||||
@ -131,7 +130,7 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer
|
||||
try {
|
||||
await triggerWebhookRequest(
|
||||
webhook,
|
||||
getWebhookPayload("test", webhook.projectId, webhook.environment.slug, webhook.secretPath)
|
||||
getWebhookPayload("test", webhook.projectId, webhook.environment.slug, webhook.secretPath, webhook.type)
|
||||
);
|
||||
} catch (err) {
|
||||
webhookError = (err as Error).message;
|
||||
@ -162,7 +161,14 @@ export const webhookServiceFactory = ({ webhookDAL, projectEnvDAL, permissionSer
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
|
||||
|
||||
return webhookDAL.findAllWebhooks(projectId, environment, secretPath);
|
||||
const webhooks = await webhookDAL.findAllWebhooks(projectId, environment, secretPath);
|
||||
return webhooks.map((w) => {
|
||||
const { url } = decryptWebhookDetails(w);
|
||||
return {
|
||||
...w,
|
||||
url
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -5,6 +5,7 @@ export type TCreateWebhookDTO = {
|
||||
secretPath?: string;
|
||||
webhookUrl: string;
|
||||
webhookSecretKey?: string;
|
||||
type: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateWebhookDTO = {
|
||||
@ -24,3 +25,8 @@ export type TListWebhookDTO = {
|
||||
environment?: string;
|
||||
secretPath?: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export enum WebhookType {
|
||||
GENERAL = "general",
|
||||
SLACK = "slack"
|
||||
}
|
||||
|
@ -49,8 +49,8 @@ Server-related logic is handled in `/src/server`. To connect the service layer t
|
||||
|
||||
## Writing API Routes
|
||||
|
||||
1. To create a route component, run `npm generate:component`.
|
||||
1. To create a route component, run `npm run generate:component`.
|
||||
2. Select option 3, type the router name in dash-case, and provide the version number. This will generate a router file in `src/server/routes/v<version-number>/<router component name>`
|
||||
1. Implement your logic to connect with the service layer as needed.
|
||||
2. Import the router component in the version folder's index.ts. For instance, if it's in v1, import it in `v1/index.ts`.
|
||||
3. Finally, register it under the appropriate prefix for access.
|
||||
3. Finally, register it under the appropriate prefix for access.
|
||||
|
118
docs/documentation/platform/dynamic-secrets/mssql.mdx
Normal file
@ -0,0 +1,118 @@
|
||||
---
|
||||
title: "MS SQL"
|
||||
description: "How to dynamically generate MS SQL database users."
|
||||
---
|
||||
|
||||
The Infisical MS SQL dynamic secret allows you to generate Microsoft SQL server database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
Create a user with the required permission in your SQL instance. This user will be used to create new accounts on-demand.
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with MS SQL
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select `SQL Database`">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Service" type="string" required>
|
||||
Choose the service you want to generate dynamic secrets for. This must be selected as **MS SQL**.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Database host
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="number" required>
|
||||
Database port
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="User" type="string" required>
|
||||
Username that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
Password that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Database Name" type="string" required>
|
||||
Name of the database for which you want to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions).
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify SQL Statements">
|
||||
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certficate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete the lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
@ -75,7 +75,14 @@ access the Infisical API using the AWS Auth authentication method.
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **AWS Auth**.
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use AWS Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new AWS Auth configuration onto the identity.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
|
@ -75,7 +75,14 @@ access the Infisical API using the Azure Auth authentication method.
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Azure Auth**.
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use Azure Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new Azure Auth configuration onto the identity.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
|
@ -81,7 +81,14 @@ access the Infisical API using the GCP ID Token authentication method.
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **GCP Auth** and set the **Type** to **GCP ID Token Auth**.
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use GCP Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new GCP Auth configuration onto the identity; set the **Type** field to **GCP ID Token Auth**.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
@ -243,9 +250,16 @@ access the Infisical API using the GCP IAM authentication method.
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **GCP IAM Auth** and set the **Type** to **GCP IAM Auth**.
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use GCP Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new GCP Auth configuration onto the identity; set the **Type** field to **GCP IAM Auth**.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Here's some more guidance on each field:
|
||||
|
||||
|
@ -42,9 +42,9 @@ To be more specific:
|
||||
4. If all is well, Infisical returns a short-lived access token that the application can use to make authenticated requests to the Infisical API.
|
||||
|
||||
<Note>
|
||||
We recommend using one of Infisical's clients like SDKs or the Infisical Agent
|
||||
to authenticate with Infisical using Kubernetes Auth as they handle the
|
||||
authentication process including service account credential retrieval for you.
|
||||
We recommend using one of Infisical's clients like SDKs or the Infisical Agent
|
||||
to authenticate with Infisical using Kubernetes Auth as they handle the
|
||||
authentication process including service account credential retrieval for you.
|
||||
</Note>
|
||||
|
||||
## Guide
|
||||
@ -137,9 +137,16 @@ In the following steps, we explore how to create and use identities for your app
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Kubernetes Auth**.
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use Kubernetes Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new Kubernetes Auth configuration onto the identity.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Here's some more guidance on each field:
|
||||
|
||||
@ -240,8 +247,8 @@ In the following steps, we explore how to create and use identities for your app
|
||||
|
||||
In certain cases, you may want to extend the lifespan of an access token; to do so, you must set a max TTL parameter.
|
||||
|
||||
A token can be renewed any number of time and each call to renew it will extend the toke life by increments of access token TTL.
|
||||
Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation
|
||||
A token can be renewed any number of times where each call to renew it can extend the token's lifetime by increments of the access token's TTL.
|
||||
Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation.
|
||||
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
@ -7,7 +7,7 @@ description: "Learn how to use Machine Identities to programmatically interact w
|
||||
|
||||
An Infisical machine identity is an entity that represents a workload or application that require access to various resources in Infisical. This is conceptually similar to an IAM user in AWS or service account in Google Cloud Platform (GCP).
|
||||
|
||||
Each identity must authenticate with the Infisical API using a supported authentication method like [Universal Auth](/documentation/platform/identities/universal-auth), [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth), [AWS Auth](/documentation/platform/identities/aws-auth), [Azure Auth](/documentation/platform/identities/azure-auth), or [GCP Auth](/documentation/platform/identities/gcp-auth) to get back a short-lived access token to be used in subsequent requests.
|
||||
Each identity must authenticate with the Infisical API using a supported authentication method like [Token Auth](/documentation/platform/identities/token-auth), [Universal Auth](/documentation/platform/identities/universal-auth), [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth), [AWS Auth](/documentation/platform/identities/aws-auth), [Azure Auth](/documentation/platform/identities/azure-auth), or [GCP Auth](/documentation/platform/identities/gcp-auth) to get back a short-lived access token to be used in subsequent requests.
|
||||
|
||||

|
||||
|
||||
@ -28,13 +28,15 @@ A typical workflow for using identities consists of four steps:
|
||||
|
||||
## Authentication Methods
|
||||
|
||||
To interact with various resources in Infisical, Machine Identities are able to authenticate using:
|
||||
To interact with various resources in Infisical, Machine Identities can authenticate with the Infisical API using:
|
||||
|
||||
- [Universal Auth](/documentation/platform/identities/universal-auth): A platform-agnostic authentication method that can be configured on an identity suitable to authenticate from any platform/environment.
|
||||
- [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth): A Kubernetes-native authentication method for applications (e.g. pods) to authenticate with Infisical.
|
||||
- [AWS Auth](/documentation/platform/identities/aws-auth): An AWS-native authentication method for AWS services (e.g. EC2, Lambda functions, etc.) to authenticate with Infisical.
|
||||
- [Azure Auth](/documentation/platform/identities/azure-auth): An Azure-native authentication method for Azure resources (e.g. Azure VMs, Azure App Services, Azure Functions, Azure Kubernetes Service, etc.) to authenticate with Infisical.
|
||||
- [GCP Auth](/documentation/platform/identities/gcp-auth): A GCP-native authentication method for GCP resources (e.g. Compute Engine, App Engine, Cloud Run, Google Kubernetes Engine, IAM service accounts, etc.) to authenticate with Infisical.
|
||||
- [Token Auth](/documentation/platform/identities/token-auth): A platform-agnostic, simple authentication method suitable to authenticate with Infisical using a token.
|
||||
- [Universal Auth](/documentation/platform/identities/universal-auth): A platform-agnostic authentication method suitable to authenticate with Infisical using a Client ID and Client Secret.
|
||||
- [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth): A Kubernetes-native authentication method for applications (e.g. pods).
|
||||
- [AWS Auth](/documentation/platform/identities/aws-auth): An AWS-native authentication method for AWS services (e.g. EC2, Lambda functions, etc.).
|
||||
- [Azure Auth](/documentation/platform/identities/azure-auth): An Azure-native authentication method for Azure resources (e.g. Azure VMs, Azure App Services, Azure Functions, Azure Kubernetes Service, etc.).
|
||||
- [GCP Auth](/documentation/platform/identities/gcp-auth): A GCP-native authentication method for GCP resources (e.g. Compute Engine, App Engine, Cloud Run, Google Kubernetes Engine, IAM service accounts, etc.).
|
||||
- [OIDC Auth](/documentation/platform/identities/oidc-auth): A platform-agnostic, JWT-based authentication method for workloads using an OpenID Connect identity provider.
|
||||
|
||||
## FAQ
|
||||
|
||||
@ -52,6 +54,8 @@ You can learn more about how to do this in the CLI quickstart [here](/cli/usage)
|
||||
|
||||
Amongst many differences, identities provide broader access over the Infisical API, utilizes the same
|
||||
permission system as user identities, and come with a significantly larger number of configurable authentication and security features.
|
||||
|
||||
If you're looking for a simple authentication method, similar to service tokens, that can be bound onto an identity, we recommend checking out [Token Auth](/documentation/platform/identities/token-auth).
|
||||
</Accordion>
|
||||
<Accordion title="Why can I not create, read, update, or delete an identity?">
|
||||
There are a few reasons for why this might happen:
|
||||
|
165
docs/documentation/platform/identities/oidc-auth.mdx
Normal file
@ -0,0 +1,165 @@
|
||||
---
|
||||
title: OIDC Auth
|
||||
description: "Learn how to authenticate with Infisical from any platform or environment using OpenID Connect (OIDC)."
|
||||
---
|
||||
|
||||
**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect.
|
||||
|
||||
## Diagram
|
||||
|
||||
The following sequence digram illustrates the OIDC Auth workflow for authenticating clients with Infisical.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Client as Client
|
||||
participant Idp as Identity Provider
|
||||
participant Infis as Infisical
|
||||
|
||||
Client->>Idp: Step 1: Request identity token
|
||||
Idp-->>Client: Return JWT with verifiable claims
|
||||
|
||||
Note over Client,Infis: Step 2: Login Operation
|
||||
Client->>Infis: Send signed JWT to /api/v1/auth/oidc-auth/login
|
||||
|
||||
Note over Infis,Idp: Step 3: Query verification
|
||||
Infis->>Idp: Request JWT public key using OIDC Discovery
|
||||
Idp-->>Infis: Return public key
|
||||
|
||||
Note over Infis: Step 4: JWT validation
|
||||
Infis->>Client: Return short-lived access token
|
||||
|
||||
Note over Client,Infis: Step 5: Access Infisical API with Token
|
||||
Client->>Infis: Make authenticated requests using the short-lived access token
|
||||
```
|
||||
|
||||
## Concept
|
||||
|
||||
At a high-level, Infisical authenticates a client by verifying the JWT and checking that it meets specific requirements (e.g. it is issued by a trusted identity provider) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful,
|
||||
then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API.
|
||||
|
||||
To be more specific:
|
||||
|
||||
1. The client requests an identity token from its identity provider.
|
||||
2. The client sends the identity token to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint.
|
||||
3. Infisical fetches the public key that was used to sign the identity token from the identity provider using OIDC Discovery.
|
||||
4. Infisical validates the JWT using the public key provided by the identity provider and checks that the subject, audience, and claims of the token matches with the set criteria.
|
||||
5. If all is well, Infisical returns a short-lived access token that the client can use to make authenticated requests to the Infisical API.
|
||||
|
||||
<Note>
|
||||
Infisical needs network-level access to the identity provider configuration
|
||||
endpoints.
|
||||
</Note>
|
||||
|
||||
## Guide
|
||||
|
||||
In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method.
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating an identity">
|
||||
To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**.
|
||||
|
||||

|
||||
|
||||
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
|
||||
|
||||

|
||||
|
||||
Now input a few details for your new identity. Here's some guidance for each field:
|
||||
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
<Tip>Restrict access by configuring the Subject, Audiences, and Claims fields</Tip>
|
||||
|
||||
Here's some more guidance on each field:
|
||||
- OIDC Discovery URL: The URL used to retrieve the OpenID Connect configuration information from the identity provider. This will be used to fetch the public key needed for verifying the provided JWT.
|
||||
- Issuer: The unique identifier of the identity provider issuing the JWT. This value is used to verify the iss (issuer) claim in the JWT to ensure the token is issued by a trusted provider.
|
||||
- CA Certificate: The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints.
|
||||
- Subject: The expected principal that is the subject of the JWT. The `sub` (subject) claim in the JWT should match this value.
|
||||
- Audiences: A list of intended recipients. This value is checked against the aud (audience) claim in the token. The token's aud claim should match at least one of the audiences for it to be valid.
|
||||
- Claims: Additional information or attributes that should be present in the JWT for it to be valid.
|
||||
- Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses.
|
||||
- Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address.
|
||||
</Step>
|
||||
<Step title="Adding an identity to a project">
|
||||
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
|
||||
|
||||
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
|
||||
|
||||
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
|
||||
|
||||

|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Accessing the Infisical API with the identity">
|
||||
To access the Infisical API as the identity, you need to fetch an identity token from an identity provider and make a request to the `/api/v1/auth/oidc-auth/login` endpoint in exchange for an access token.
|
||||
|
||||
We provide an example below of how authentication is done with Infisical using OIDC. It is a snippet from the [official Github secrets action](https://github.com/Infisical/secrets-action).
|
||||
|
||||
#### Sample usage
|
||||
```javascript
|
||||
export const oidcLogin = async ({ identityId, domain, oidcAudience }) => {
|
||||
const idToken = await core.getIDToken(oidcAudience);
|
||||
|
||||
const loginData = querystring.stringify({
|
||||
identityId,
|
||||
jwt: idToken,
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await axios({
|
||||
method: "post",
|
||||
url: `${domain}/api/v1/auth/oidc-auth/login`,
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
data: loginData,
|
||||
});
|
||||
|
||||
return response.data.accessToken;
|
||||
} catch (err) {
|
||||
core.error("Error:", err.message);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### Sample OIDC login response
|
||||
|
||||
```bash Response
|
||||
{
|
||||
"accessToken": "...",
|
||||
"expiresIn": 7200,
|
||||
"accessTokenMaxTTL": 43244
|
||||
"tokenType": "Bearer"
|
||||
}
|
||||
```
|
||||
|
||||
<Tip>
|
||||
We recommend using one of Infisical's clients like SDKs or the Infisical Agent to authenticate with Infisical using OIDC Auth as they handle the authentication process including the fetching of identity tokens for you.
|
||||
</Tip>
|
||||
|
||||
<Note>
|
||||
Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation;
|
||||
the default TTL is `7200` seconds which can be adjusted.
|
||||
|
||||
If an identity access token expires, it can no longer authenticate with the Infisical API. In this case,
|
||||
a new access token should be obtained by performing another login operation.
|
||||
</Note>
|
||||
</Step>
|
||||
|
||||
</Steps>
|
138
docs/documentation/platform/identities/token-auth.mdx
Normal file
@ -0,0 +1,138 @@
|
||||
---
|
||||
title: Token Auth
|
||||
description: "Learn how to authenticate to Infisical from any platform or environment using an access token."
|
||||
---
|
||||
|
||||
**Token Auth** is a platform-agnostic, simple authentication method that can be configured for a [machine identity](/documentation/platform/identities/machine-identities) to authenticate from any platform/environment using a token.
|
||||
|
||||
## Diagram
|
||||
|
||||
The following sequence digram illustrates the Token Auth workflow for authenticating clients with Infisical.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Client as Client
|
||||
participant Infis as Infisical
|
||||
|
||||
Note over Client,Infis: Access Infisical API with Token
|
||||
Client->>Infis: Make authenticated requests using the token
|
||||
|
||||
```
|
||||
|
||||
## Concept
|
||||
|
||||
Token Auth is the simplest authentication method that a client can use to authenticate with Infisical.
|
||||
|
||||
Unlike other authentication methods where a client must exchange credential(s) for a short-lived access token to access the Infisical API,
|
||||
Token Auth allows a client to make authenticated requests to the Infisical API directly using a token. Conceptually, this is similar to using an API Key.
|
||||
|
||||
To be more specific:
|
||||
|
||||
1. An operator creates an access token in the Infisical UI.
|
||||
2. The operator shares the access token with the client which it can then use to make authenticated requests to the Infisical API.
|
||||
|
||||
## Guide
|
||||
|
||||
In the following steps, we explore how to create and use identities for your workloads and applications to access the Infisical API
|
||||
using the Token Auth authentication method.
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating an identity">
|
||||
To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**.
|
||||
|
||||

|
||||
|
||||
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
|
||||
|
||||

|
||||
|
||||
Now input a few details for your new identity. Here's some guidance for each field:
|
||||
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use Token Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new Token Auth configuration onto the identity.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Here's some more guidance on each field:
|
||||
|
||||
- Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses.
|
||||
- Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address.
|
||||
|
||||
<Warning>
|
||||
Restricting access token usage to specific trusted IPs is a paid feature.
|
||||
|
||||
If you’re using Infisical Cloud, then it is available under the Pro Tier. If you’re self-hosting Infisical, then you should contact sales@infisical.com to purchase an enterprise license to use it.
|
||||
</Warning>
|
||||
|
||||
</Step>
|
||||
<Step title="Creating a Token">
|
||||
In order to use the identity with Token Auth, you'll need to create an (access) token; you can think of this token akin
|
||||
to an API Key used to authenticate with the Infisical API. With that, press **Create Token**.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Copy the token and keep it handy as you'll need it to authenticate with the Infisical API.
|
||||
|
||||
</Step>
|
||||
<Step title="Adding an identity to a project">
|
||||
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
|
||||
|
||||
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
|
||||
|
||||
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Accessing the Infisical API with the identity">
|
||||
To access the Infisical API as the identity, you can use the generated access token from step 2
|
||||
to authenticate with the [Infisical API](/api-reference/overview/introduction).
|
||||
|
||||
<Note>
|
||||
Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation;
|
||||
the default TTL is `7200` seconds which can be adjusted in the Token Auth configuration.
|
||||
|
||||
If an identity access token expires, it can no longer authenticate with the Infisical API. In this case,
|
||||
a new access token should be obtained.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
**FAQ**
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Why is the Infisical API rejecting my access token?">
|
||||
There are a few reasons for why this might happen:
|
||||
|
||||
- The access token has expired. If this is the case, you should obtain a new access token or consider extending the token's TTL.
|
||||
- The identity is insufficently permissioned to interact with the resources you wish to access.
|
||||
- The access token is being used from an untrusted IP.
|
||||
</Accordion>
|
||||
<Accordion title="What is access token renewal and TTL/Max TTL?">
|
||||
A identity access token can have a time-to-live (TTL) or incremental lifetime after which it expires.
|
||||
|
||||
In certain cases, you may want to extend the lifespan of an access token; to do so, you must set a max TTL parameter.
|
||||
|
||||
A token can be renewed any number of times where each call to renew it can extend the token's lifetime by increments of the access token's TTL.
|
||||
Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation.
|
||||
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
@ -3,7 +3,7 @@ title: Universal Auth
|
||||
description: "Learn how to authenticate to Infisical from any platform or environment."
|
||||
---
|
||||
|
||||
**Universal Auth** is a platform-agnostic authentication method that can be configured for a [machine identity](/documentation/platform/identities/machine-identities) suitable to authenticate from any platform/environment.
|
||||
**Universal Auth** is a platform-agnostic authentication method that can be configured for a [machine identity](/documentation/platform/identities/machine-identities) to authenticate from any platform/environment using a Client ID and Client Secret.
|
||||
|
||||
## Diagram
|
||||
|
||||
@ -55,9 +55,16 @@ using the Universal Auth authentication method.
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||
|
||||
Once you've created an identity, you'll be prompted to configure the **Universal Auth** authentication method for it.
|
||||
|
||||

|
||||
By default, the identity has been configured with Universal Auth. If you wish, you can edit the Universal Auth configuration
|
||||
details by pressing to edit the **Authentication** section.
|
||||
|
||||

|
||||
|
||||
Here's some more guidance on each field:
|
||||
|
||||
@ -77,12 +84,12 @@ using the Universal Auth authentication method.
|
||||
<Step title="Creating a Client Secret">
|
||||
In order to use the identity, you'll need the non-sensitive **Client ID**
|
||||
of the identity and a **Client Secret** for it; you can think of these credentials akin to a username
|
||||
and password used to authenticate with the Infisical API. With that, press on the key icon on the identity to generate a **Client Secret**
|
||||
for it.
|
||||
and password used to authenticate with the Infisical API.
|
||||
With that, press **Create Client Secret**.
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
Feel free to input any (optional) details for the **Client Secret** configuration:
|
||||
|
||||
@ -131,7 +138,7 @@ using the Universal Auth authentication method.
|
||||
|
||||
<Note>
|
||||
Each identity access token has a time-to-live (TLL) which you can infer from the response of the login operation;
|
||||
the default TTL is `7200` seconds which can be adjusted.
|
||||
the default TTL is `7200` seconds which can be adjusted in the Universal Auth configuration.
|
||||
|
||||
If an identity access token expires, it can no longer authenticate with the Infisical API. In this case,
|
||||
a new access token should be obtained by performing another login operation.
|
||||
@ -148,7 +155,6 @@ using the Universal Auth authentication method.
|
||||
|
||||
- The client secret or access token has expired.
|
||||
- The identity is insufficently permissioned to interact with the resources you wish to access.
|
||||
- You are attempting to access a `/raw` secrets endpoint that requires your project to disable E2EE.
|
||||
- The client secret/access token is being used from an untrusted IP.
|
||||
</Accordion>
|
||||
<Accordion title="What is access token renewal and TTL/Max TTL?">
|
||||
@ -156,8 +162,8 @@ using the Universal Auth authentication method.
|
||||
|
||||
In certain cases, you may want to extend the lifespan of an access token; to do so, you must set a max TTL parameter.
|
||||
|
||||
A token can be renewed any number of time and each call to renew it will extend the toke life by increments of access token TTL.
|
||||
Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation
|
||||
A token can be renewed any number of times where each call to renew it can extend the token's lifetime by increments of the access token's TTL.
|
||||
Regardless of how frequently an access token is renewed, its lifespan remains bound to the maximum TTL determined at its creation.
|
||||
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
@ -5,7 +5,7 @@ description: "Learn how to share time & view-count bound secrets securely with a
|
||||
---
|
||||
|
||||
Developers frequently need to share secrets with team members, contractors, or other third parties, which can be risky due to potential leaks or misuse.
|
||||
Infisical offers a secure solution for sharing secrets over the internet in a time and view count bound manner.
|
||||
Infisical offers a secure solution for sharing secrets over the internet in a time and view count bound manner. It is possible to share secrets without signing up via [share.infisical.com](https://share.infisical.com) or via Infisical Dashboard (which has more advanced funcitonality).
|
||||
|
||||
With its zero-knowledge architecture, secrets shared via Infisical remain unreadable even to Infisical itself.
|
||||
|
||||
|
@ -9,7 +9,9 @@ Webhooks can be used to trigger changes to your integrations when secrets are mo
|
||||
|
||||
To create a webhook for a particular project, go to `Project Settings > Webhooks`.
|
||||
|
||||
When creating a webhook, you can specify an environment and folder path (using glob patterns) to trigger only specific integrations.
|
||||
Infisical supports two webhook types - General and Slack. If you need to integrate with Slack, use the Slack type with an [Incoming Webhook](https://api.slack.com/messaging/webhooks). When creating a webhook, you can specify an environment and folder path to trigger only specific integrations.
|
||||
|
||||

|
||||
|
||||
## Secret Key Verification
|
||||
|
||||
@ -27,7 +29,7 @@ If the signature in the header matches the signature that you generated, then yo
|
||||
{
|
||||
"event": "secret.modified",
|
||||
"project": {
|
||||
"workspaceId":"the workspace id",
|
||||
"workspaceId": "the workspace id",
|
||||
"environment": "project environment",
|
||||
"secretPath": "project folder path"
|
||||
},
|
||||
|
After Width: | Height: | Size: 156 KiB |
After Width: | Height: | Size: 152 KiB |
Before Width: | Height: | Size: 538 KiB After Width: | Height: | Size: 510 KiB |
Before Width: | Height: | Size: 513 KiB After Width: | Height: | Size: 491 KiB |
Before Width: | Height: | Size: 529 KiB After Width: | Height: | Size: 549 KiB |
Before Width: | Height: | Size: 517 KiB After Width: | Height: | Size: 526 KiB |