mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-28 02:53:22 +00:00
Compare commits
85 Commits
daniel/del
...
infisical/
Author | SHA1 | Date | |
---|---|---|---|
|
36973b1b5c | ||
|
1ca578ee03 | ||
|
8a7f7ac9fd | ||
|
049fd8e769 | ||
|
2c825616a6 | ||
|
febbd4ade5 | ||
|
874dc01692 | ||
|
b44b8bf647 | ||
|
258e561b84 | ||
|
5802638fc4 | ||
|
e2e7004583 | ||
|
7826324435 | ||
|
2bfc1caec5 | ||
|
4b9e3e44e2 | ||
|
b2a680ebd7 | ||
|
b269bb81fe | ||
|
5ca7ff4f2d | ||
|
ec12d57862 | ||
|
2d16f5f258 | ||
|
93912da528 | ||
|
ffc5e61faa | ||
|
70e68f4441 | ||
|
a004934a28 | ||
|
0811192eed | ||
|
1e09487572 | ||
|
86202caa95 | ||
|
285fca4ded | ||
|
30fb60b441 | ||
|
e531390922 | ||
|
e88ce49463 | ||
|
9214c93ece | ||
|
7a3bfa9e4c | ||
|
7aa0e8572c | ||
|
296efa975c | ||
|
b3e72c338f | ||
|
8c4c969bc2 | ||
|
0d424f332a | ||
|
f0b6382f92 | ||
|
72780c61b4 | ||
|
c4da0305ba | ||
|
4fdfdc1a39 | ||
|
d2cf296250 | ||
|
30284e3458 | ||
|
6f90d3befd | ||
|
f44888afa2 | ||
|
9877b0e5c4 | ||
|
b1e35d4b27 | ||
|
25f6947de5 | ||
|
ff8f1d3bfb | ||
|
b1b4cb1823 | ||
|
78f9ae7fab | ||
|
20bdff0094 | ||
|
ccf19fbcd4 | ||
|
41c526371d | ||
|
4685132409 | ||
|
3380d3e828 | ||
|
74d01c37de | ||
|
4de8888843 | ||
|
b644829bb9 | ||
|
6845ac0f5e | ||
|
6b3b13e40a | ||
|
3ec14ca33a | ||
|
732484d332 | ||
|
2f0b353c4e | ||
|
ddc819dda1 | ||
|
1b15cb4c35 | ||
|
f4e19f8a2e | ||
|
501022752b | ||
|
46821ca2ee | ||
|
9602b864d4 | ||
|
8204e970a8 | ||
|
6671c42d0f | ||
|
b9dee1e6e8 | ||
|
648fde8f37 | ||
|
9eed67c21b | ||
|
1e4164e1c2 | ||
|
cbbafcfa42 | ||
|
cc28ebd387 | ||
|
5f6870fda8 | ||
|
3e5a58eec4 | ||
|
f27d483be0 | ||
|
9ee9d1c0e7 | ||
|
d9a0cf8dd5 | ||
|
00e69e6632 | ||
|
cedb22a39a |
262
.github/workflows/deployment-pipeline.yml
vendored
262
.github/workflows/deployment-pipeline.yml
vendored
@@ -1,262 +0,0 @@
|
||||
name: Deployment pipeline
|
||||
on: [workflow_dispatch]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: "infisical-core-deployment"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Integration tests
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-us:
|
||||
name: US production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
- name: Post slack message
|
||||
uses: slackapi/slack-github-action@v2.0.0
|
||||
with:
|
||||
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
|
||||
webhook-type: incoming-webhook
|
||||
payload: |
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
blocks:
|
||||
- type: "section"
|
||||
text:
|
||||
type: "mrkdwn"
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Application:*\nInfisical Core"
|
||||
- type: "mrkdwn"
|
||||
text: "*Instance Type:*\nShared Infisical Cloud"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Region:*\nUS"
|
||||
- type: "mrkdwn"
|
||||
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
|
||||
|
||||
|
||||
production-eu:
|
||||
name: EU production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [production-us]
|
||||
environment:
|
||||
name: production-eu
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
- name: Post slack message
|
||||
uses: slackapi/slack-github-action@v2.0.0
|
||||
with:
|
||||
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
|
||||
webhook-type: incoming-webhook
|
||||
payload: |
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
blocks:
|
||||
- type: "section"
|
||||
text:
|
||||
type: "mrkdwn"
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Application:*\nInfisical Core"
|
||||
- type: "mrkdwn"
|
||||
text: "*Instance Type:*\nShared Infisical Cloud"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Region:*\nEU"
|
||||
- type: "mrkdwn"
|
||||
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
|
||||
|
@@ -535,6 +535,107 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
mode: "upsert",
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[0].path
|
||||
}));
|
||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[1].path
|
||||
}));
|
||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
mode: "upsert",
|
||||
secrets: testSecrets
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
firstBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
secondBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
|
@@ -23,14 +23,14 @@ export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const logger = initLogger();
|
||||
const envConfig = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
@@ -42,6 +42,7 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
@@ -52,14 +53,24 @@ export default {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||
const server = await main({
|
||||
db,
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@@ -73,8 +84,8 @@ export default {
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
envConfig.AUTH_SECRET,
|
||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
@@ -109,3 +120,4 @@ export default {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
103
backend/package-lock.json
generated
103
backend/package-lock.json
generated
@@ -21,7 +21,7 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "^8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
@@ -48,8 +48,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@@ -81,7 +81,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
@@ -5423,13 +5423,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.1.1.tgz",
|
||||
"integrity": "sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/cookie": {
|
||||
"version": "9.3.1",
|
||||
@@ -5502,19 +5499,41 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/multipart": {
|
||||
"version": "8.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.0.tgz",
|
||||
"integrity": "sha512-A8h80TTyqUzaMVH0Cr9Qcm6RxSkVqmhK/MVBYHYeRRSUbUYv08WecjWKSlG2aSnD4aGI841pVxAjC+G1GafUeQ==",
|
||||
"version": "8.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.1.tgz",
|
||||
"integrity": "sha512-pncbnG28S6MIskFSVRtzTKE9dK+GrKAJl0NbaQ/CG8ded80okWFsYKzSlP9haaLNQhNRDOoHqmGQNvgbiPVpWQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.1.0",
|
||||
"@fastify/deepmerge": "^1.0.0",
|
||||
"@fastify/error": "^3.0.0",
|
||||
"@fastify/busboy": "^3.0.0",
|
||||
"@fastify/deepmerge": "^2.0.0",
|
||||
"@fastify/error": "^4.0.0",
|
||||
"fastify-plugin": "^4.0.0",
|
||||
"secure-json-parse": "^2.4.0",
|
||||
"stream-wormhole": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/multipart/node_modules/@fastify/deepmerge": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/deepmerge/-/deepmerge-2.0.1.tgz",
|
||||
"integrity": "sha512-hx+wJQr9Ph1hY/dyzY0SxqjumMyqZDlIF6oe71dpRKDHUg7dFQfjG94qqwQ274XRjmUrwKiYadex8XplNHx3CA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/multipart/node_modules/@fastify/error": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.0.0.tgz",
|
||||
"integrity": "sha512-OO/SA8As24JtT1usTUTKgGH7uLvhfwZPwlptRi2Dp5P4KKmJI3gvsZ8MIHnNwDs4sLf/aai5LzTyl66xr7qMxA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/passport": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/passport/-/passport-2.4.0.tgz",
|
||||
@@ -9049,6 +9068,7 @@
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/logger/-/logger-4.0.0.tgz",
|
||||
"integrity": "sha512-Wz7QYfPAlG/DR+DfABddUZeNgoeY7d1J39OCR2jR+v7VBsB8ezulDK5szTnDDPDwLH5IWhLvXIHlCFZV7MSKgA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": ">=18.0.0"
|
||||
},
|
||||
@@ -9058,12 +9078,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/oauth": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.1.tgz",
|
||||
"integrity": "sha512-TuR9PI6bYKX6qHC7FQI4keMnhj45TNfSNQtTU3mtnHUX4XLM2dYLvRkUNADyiLTle2qu2rsOQtCIsZJw6H0sDA==",
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.2.tgz",
|
||||
"integrity": "sha512-MdPS8AP9n3u/hBeqRFu+waArJLD/q+wOSZ48ktMTwxQLc6HJyaWPf8soqAyS/b0D6IlvI5TxAdyRyyv3wQ5IVw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@slack/logger": "^4",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@types/jsonwebtoken": "^9",
|
||||
"@types/node": ">=18",
|
||||
"jsonwebtoken": "^9",
|
||||
@@ -9075,24 +9096,26 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/types": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.12.0.tgz",
|
||||
"integrity": "sha512-yFewzUomYZ2BYaGJidPuIgjoYj5wqPDmi7DLSaGIkf+rCi4YZ2Z3DaiYIbz7qb/PL2NmamWjCvB7e9ArI5HkKg==",
|
||||
"version": "2.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.14.0.tgz",
|
||||
"integrity": "sha512-n0EGm7ENQRxlXbgKSrQZL69grzg1gHLAVd+GlRVQJ1NSORo0FrApR7wql/gaKdu2n4TO83Sq/AmeUOqD60aXUA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 12.13.0",
|
||||
"npm": ">= 6.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/web-api": {
|
||||
"version": "7.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.3.4.tgz",
|
||||
"integrity": "sha512-KwLK8dlz2lhr3NO7kbYQ7zgPTXPKrhq1JfQc0etJ0K8LSJhYYnf8GbVznvgDT/Uz1/pBXfFQnoXjrQIOKAdSuw==",
|
||||
"version": "7.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.8.0.tgz",
|
||||
"integrity": "sha512-d4SdG+6UmGdzWw38a4sN3lF/nTEzsDxhzU13wm10ejOpPehtmRoqBKnPztQUfFiWbNvSb4czkWYJD4kt+5+Fuw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@slack/logger": "^4.0.0",
|
||||
"@slack/types": "^2.9.0",
|
||||
"@types/node": ">=18.0.0",
|
||||
"@types/retry": "0.12.0",
|
||||
"axios": "^1.7.4",
|
||||
"axios": "^1.7.8",
|
||||
"eventemitter3": "^5.0.1",
|
||||
"form-data": "^4.0.0",
|
||||
"is-electron": "2.2.2",
|
||||
@@ -9110,6 +9133,7 @@
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
@@ -10526,7 +10550,8 @@
|
||||
"node_modules/@types/retry": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz",
|
||||
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA=="
|
||||
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/safe-regex": {
|
||||
"version": "1.1.6",
|
||||
@@ -11969,9 +11994,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.4",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
|
||||
"integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
@@ -13926,7 +13952,8 @@
|
||||
"node_modules/eventemitter3": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz",
|
||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="
|
||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/events": {
|
||||
"version": "3.3.0",
|
||||
@@ -15942,7 +15969,8 @@
|
||||
"node_modules/is-electron": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/is-electron/-/is-electron-2.2.2.tgz",
|
||||
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg=="
|
||||
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-extglob": {
|
||||
"version": "2.1.1",
|
||||
@@ -18182,6 +18210,7 @@
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
|
||||
"integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
@@ -18228,6 +18257,7 @@
|
||||
"version": "6.6.2",
|
||||
"resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz",
|
||||
"integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"eventemitter3": "^4.0.4",
|
||||
"p-timeout": "^3.2.0"
|
||||
@@ -18242,12 +18272,14 @@
|
||||
"node_modules/p-queue/node_modules/eventemitter3": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/p-retry": {
|
||||
"version": "4.6.2",
|
||||
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz",
|
||||
"integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/retry": "0.12.0",
|
||||
"retry": "^0.13.1"
|
||||
@@ -18271,6 +18303,7 @@
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz",
|
||||
"integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"p-finally": "^1.0.0"
|
||||
},
|
||||
|
@@ -45,24 +45,24 @@
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
||||
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
@@ -129,7 +129,7 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
@@ -156,8 +156,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@@ -189,7 +189,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
|
6
backend/src/@types/fastify.d.ts
vendored
6
backend/src/@types/fastify.d.ts
vendored
@@ -93,6 +93,12 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
|
105
backend/src/auto-start-migrations.ts
Normal file
105
backend/src/auto-start-migrations.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { PgSqlLock } from "./keystore/keystore";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
type TArgs = {
|
||||
auditLogDb?: Knex;
|
||||
applicationDb: Knex;
|
||||
logger: Logger;
|
||||
};
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
const migrationConfig = {
|
||||
directory: path.join(__dirname, "./db/migrations"),
|
||||
loadExtensions: [".mjs", ".ts"],
|
||||
tableName: "infisical_migrations"
|
||||
};
|
||||
|
||||
const migrationStatusCheckErrorHandler = (err: Error) => {
|
||||
// happens for first time in which the migration table itself is not created yet
|
||||
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
||||
if (err?.message?.includes("does not exist")) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
};
|
||||
|
||||
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
||||
try {
|
||||
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
||||
if (isProduction) {
|
||||
const migrationTable = migrationConfig.tableName;
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldRunMigration = Boolean(
|
||||
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
||||
); // db.length - code.length
|
||||
if (!shouldRunMigration) {
|
||||
logger.info("No migrations pending: Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (auditLogDb) {
|
||||
await auditLogDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running audit log migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await auditLogDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished audit log migrations.");
|
||||
});
|
||||
}
|
||||
|
||||
await applicationDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running application migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await applicationDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished application migrations.");
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Boot up migration failed");
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
@@ -49,6 +49,9 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
@@ -64,6 +67,9 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -98,6 +104,9 @@ export const initAuditLogDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
|
@@ -38,7 +38,8 @@ export default {
|
||||
directory: "./seeds"
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs"]
|
||||
}
|
||||
},
|
||||
production: {
|
||||
@@ -62,7 +63,8 @@ export default {
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs"]
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
||||
|
130
backend/src/db/migrations/20250210101840_webhook-to-kms.ts
Normal file
130
backend/src/db/migrations/20250210101840_webhook-to-kms.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedKey) t.binary("encryptedPassKey");
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl");
|
||||
if (hasUrl) t.string("url").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const webhooks = await knex(TableName.Webhook)
|
||||
.where({})
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
|
||||
.select(
|
||||
"url",
|
||||
"encryptedSecretKey",
|
||||
"iv",
|
||||
"tag",
|
||||
"keyEncoding",
|
||||
"urlCipherText",
|
||||
"urlIV",
|
||||
"urlTag",
|
||||
knex.ref("id").withSchema(TableName.Webhook),
|
||||
"envId"
|
||||
)
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedWebhooks = await Promise.all(
|
||||
webhooks.map(async (el) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: el.projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
||||
}
|
||||
|
||||
let encryptedSecretKey = null;
|
||||
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
|
||||
const decyptedSecretKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.iv,
|
||||
tag: el.tag,
|
||||
ciphertext: el.encryptedSecretKey
|
||||
});
|
||||
encryptedSecretKey = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decyptedSecretKey, "utf8")
|
||||
}).cipherTextBlob;
|
||||
}
|
||||
|
||||
const decryptedUrl =
|
||||
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.urlIV,
|
||||
tag: el.urlTag,
|
||||
ciphertext: el.urlCipherText
|
||||
})
|
||||
: null;
|
||||
|
||||
const encryptedUrl = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedUrl || el.url || "")
|
||||
}).cipherTextBlob;
|
||||
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.Webhook)
|
||||
.insert(
|
||||
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
|
||||
id: el.id,
|
||||
envId: el.envId,
|
||||
url: "",
|
||||
encryptedUrl: el.encryptedUrl,
|
||||
encryptedPassKey: el.encryptedSecretKey
|
||||
}))
|
||||
)
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
|
||||
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,111 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
|
||||
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
|
||||
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
|
||||
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
|
||||
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
|
||||
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedDynamicSecrets = await Promise.all(
|
||||
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedInputData =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.inputIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.inputTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.inputCiphertext
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedInput = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedInputData)
|
||||
}).cipherTextBlob;
|
||||
|
||||
return { ...el, encryptedInput };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.DynamicSecret)
|
||||
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,103 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const secretRotations = await knex(TableName.SecretRotation)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
|
||||
.select(selectAllTableCols(TableName.SecretRotation))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedRotationData = await Promise.all(
|
||||
secretRotations.map(async ({ projectId, ...el }) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedRotationData =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.encryptedDataIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.encryptedDataTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedData
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedRotationData = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedRotationData)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedRotationData };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SecretRotation)
|
||||
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,200 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesCaCertificate"
|
||||
);
|
||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||
|
||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
|
||||
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedTokenReviewerJwt"
|
||||
);
|
||||
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"tokenReviewerJwtIV"
|
||||
);
|
||||
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"tokenReviewerJwtTag"
|
||||
);
|
||||
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
|
||||
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
|
||||
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
)
|
||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedIdentityKubernetesConfigs = [];
|
||||
|
||||
for await (const {
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyKeyEncoding,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyIV,
|
||||
orgId,
|
||||
...el
|
||||
} of identityKubernetesConfigs) {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedTokenReviewerJwt =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.tokenReviewerJwtIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.tokenReviewerJwtTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedTokenReviewerJwt
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCaCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedTokenReviewerJwt)
|
||||
}).cipherTextBlob;
|
||||
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
|
||||
updatedIdentityKubernetesConfigs.push({
|
||||
...el,
|
||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||
encryptedKubernetesCaCertificate,
|
||||
encryptedKubernetesTokenReviewerJwt
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityKubernetesAuth)
|
||||
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (!hasEncryptedKubernetesTokenReviewerJwt)
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptIdentityK8sAuth(knex);
|
||||
}
|
||||
|
||||
const dropIdentityK8sColumns = async (knex: Knex) => {
|
||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesCaCertificate"
|
||||
);
|
||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropIdentityK8sColumns(knex);
|
||||
}
|
@@ -0,0 +1,141 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptIdentityOidcAuth = async (knex: Knex) => {
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityOidcAuth,
|
||||
"encryptedCaCertificate"
|
||||
);
|
||||
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||
|
||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
|
||||
|
||||
if (hasidentityOidcAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.IdentityOidcAuth}.identityId`
|
||||
)
|
||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.IdentityOidcAuth))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedIdentityOidcConfigs = await Promise.all(
|
||||
identityOidcConfig.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCaCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
|
||||
return {
|
||||
...el,
|
||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||
encryptedCaCertificate
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityOidcAuth)
|
||||
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptIdentityOidcAuth(knex);
|
||||
}
|
||||
|
||||
const dropIdentityOidcColumns = async (knex: Knex) => {
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityOidcAuth,
|
||||
"encryptedCaCertificate"
|
||||
);
|
||||
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||
|
||||
if (hasidentityOidcTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropIdentityOidcColumns(knex);
|
||||
}
|
@@ -0,0 +1,493 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptSamlConfig = async (knex: Knex) => {
|
||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
|
||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const samlConfigs = await knex(TableName.SamlConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.SamlConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedSamlConfigs = await Promise.all(
|
||||
samlConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedEntryPoint =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.entryPointIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.entryPointTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedEntryPoint
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedIssuer =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedIssuer && el.issuerIV && el.issuerTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.issuerIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.issuerTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedIssuer
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCert && el.certIV && el.certTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.certIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.certTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedSamlIssuer = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedIssuer)
|
||||
}).cipherTextBlob;
|
||||
const encryptedSamlCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
const encryptedSamlEntryPoint = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedEntryPoint)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SamlConfig)
|
||||
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
|
||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const reencryptLdapConfig = async (knex: Knex) => {
|
||||
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||
|
||||
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
|
||||
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
|
||||
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
|
||||
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
|
||||
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
|
||||
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
|
||||
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
|
||||
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
|
||||
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
|
||||
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
|
||||
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
|
||||
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
|
||||
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
|
||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const ldapConfigs = await knex(TableName.LdapConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.LdapConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedLdapConfigs = await Promise.all(
|
||||
ldapConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedBindDN =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindDNIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.bindDNTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedBindDN
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedBindPass =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindPassIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.bindPassTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedBindPass
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCACert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCACert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedLdapBindDN = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedBindDN)
|
||||
}).cipherTextBlob;
|
||||
const encryptedLdapBindPass = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedBindPass)
|
||||
}).cipherTextBlob;
|
||||
const encryptedLdapCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.LdapConfig)
|
||||
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
|
||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const reencryptOidcConfig = async (knex: Knex) => {
|
||||
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
|
||||
TableName.OidcConfig,
|
||||
"encryptedOidcClientSecret"
|
||||
);
|
||||
|
||||
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
|
||||
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
|
||||
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
|
||||
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
|
||||
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
|
||||
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
|
||||
|
||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
|
||||
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
|
||||
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
|
||||
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
|
||||
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
|
||||
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
|
||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const oidcConfigs = await knex(TableName.OidcConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.OidcConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedOidcConfigs = await Promise.all(
|
||||
oidcConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedClientId =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientId && el.clientIdIV && el.clientIdTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientIdIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.clientIdTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedClientId
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedClientSecret =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientSecretIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.clientSecretTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedClientSecret
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedOidcClientId = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedClientId)
|
||||
}).cipherTextBlob;
|
||||
const encryptedOidcClientSecret = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedClientSecret)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.OidcConfig)
|
||||
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
|
||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptSamlConfig(knex);
|
||||
await reencryptLdapConfig(knex);
|
||||
await reencryptOidcConfig(knex);
|
||||
}
|
||||
|
||||
const dropSamlConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
|
||||
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const dropLdapConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
|
||||
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const dropOidcConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
|
||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
|
||||
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropSamlConfigColumns(knex);
|
||||
await dropLdapConfigColumns(knex);
|
||||
await dropOidcConfigColumns(knex);
|
||||
}
|
53
backend/src/db/migrations/utils/env-config.ts
Normal file
53
backend/src/db/migrations/utils/env-config.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { zpStr } from "@app/lib/zod";
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
||||
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
|
||||
),
|
||||
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
||||
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
||||
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
// TODO(akhilmhdh): will be changed to one
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
||||
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||
)
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
|
||||
}));
|
||||
|
||||
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
|
||||
|
||||
export const getMigrationEnvConfig = () => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Invalid environment variables. Check the error below");
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(
|
||||
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
|
||||
);
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
return Object.freeze(parsedEnv.data);
|
||||
};
|
@@ -1,105 +0,0 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { randomSecureBytes } from "@app/lib/crypto";
|
||||
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
const getInstanceRootKey = async (knex: Knex) => {
|
||||
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
|
||||
// if root key its base64 encoded
|
||||
const isBase64 = !process.env.ENCRYPTION_KEY;
|
||||
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
|
||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||
|
||||
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
if (kmsRootConfig) {
|
||||
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
||||
// set the flag so that other instancen nodes can start
|
||||
return decryptedRootKey;
|
||||
}
|
||||
|
||||
const newRootKey = randomSecureBytes(32);
|
||||
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
|
||||
await knex(TableName.KmsServerRootConfig).insert({
|
||||
encryptedRootKey,
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
|
||||
id: KMS_ROOT_CONFIG_UUID
|
||||
});
|
||||
return encryptedRootKey;
|
||||
};
|
||||
|
||||
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
|
||||
const KMS_VERSION = "v01";
|
||||
const KMS_VERSION_BLOB_LENGTH = 3;
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
const project = await knex(TableName.Project).where({ id: projectId }).first();
|
||||
if (!project) throw new Error("Missing project id");
|
||||
|
||||
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
|
||||
|
||||
let secretManagerKmsKey;
|
||||
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
|
||||
if (projectSecretManagerKmsId) {
|
||||
const kmsDoc = await knex(TableName.KmsKey)
|
||||
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
|
||||
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
|
||||
.first();
|
||||
if (!kmsDoc) throw new Error("missing kms");
|
||||
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
|
||||
} else {
|
||||
const [kmsDoc] = await knex(TableName.KmsKey)
|
||||
.insert({
|
||||
name: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||
orgId: project.orgId,
|
||||
isReserved: false
|
||||
})
|
||||
.returning("*");
|
||||
|
||||
secretManagerKmsKey = randomSecureBytes(32);
|
||||
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
|
||||
await knex(TableName.InternalKms).insert({
|
||||
version: 1,
|
||||
encryptedKey: encryptedKeyMaterial,
|
||||
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
|
||||
kmsKeyId: kmsDoc.id
|
||||
});
|
||||
}
|
||||
|
||||
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
|
||||
let dataKey: Buffer;
|
||||
if (!encryptedSecretManagerDataKey) {
|
||||
dataKey = randomSecureBytes();
|
||||
// the below versioning we do it automatically in kms service
|
||||
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
await knex(TableName.Project)
|
||||
.where({ id: projectId })
|
||||
.update({
|
||||
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
|
||||
});
|
||||
} else {
|
||||
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
|
||||
}
|
||||
|
||||
return {
|
||||
encryptor: ({ plainText }: { plainText: Buffer }) => {
|
||||
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
|
||||
|
||||
// Buffer#1 encrypted text + Buffer#2 version number
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
|
||||
return { cipherTextBlob };
|
||||
},
|
||||
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
|
||||
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
|
||||
return decryptedBlob;
|
||||
}
|
||||
};
|
||||
};
|
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
export const createCircularCache = <T>(bufferSize = 10) => {
|
||||
const bufferItems: { id: string; item: T }[] = [];
|
||||
let bufferIndex = 0;
|
||||
|
||||
const push = (id: string, item: T) => {
|
||||
if (bufferItems.length < bufferSize) {
|
||||
bufferItems.push({ id, item });
|
||||
} else {
|
||||
bufferItems[bufferIndex] = { id, item };
|
||||
}
|
||||
bufferIndex = (bufferIndex + 1) % bufferSize;
|
||||
};
|
||||
|
||||
const getItem = (id: string) => {
|
||||
return bufferItems.find((i) => i.id === id)?.item;
|
||||
};
|
||||
|
||||
return { push, getItem };
|
||||
};
|
52
backend/src/db/migrations/utils/services.ts
Normal file
52
backend/src/db/migrations/utils/services.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TMigrationEnvConfig } from "./env-config";
|
||||
|
||||
type TDependencies = {
|
||||
envConfig: TMigrationEnvConfig;
|
||||
db: Knex;
|
||||
keyStore: TKeyStoreFactory;
|
||||
};
|
||||
|
||||
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule: hsmModule.getModule(),
|
||||
envConfig
|
||||
});
|
||||
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
const kmsDAL = kmskeyDALFactory(db);
|
||||
const internalKmsDAL = internalKmsDALFactory(db);
|
||||
const projectDAL = projectDALFactory(db);
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
await hsmService.startService();
|
||||
await kmsService.startService();
|
||||
|
||||
return { kmsService };
|
||||
};
|
56
backend/src/db/rename-migrations-to-mjs.ts
Normal file
56
backend/src/db/rename-migrations-to-mjs.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initAuditLogDbConnection, initDbConnection } from "./instance";
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env.migration")
|
||||
});
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
const runRename = async () => {
|
||||
if (!isProduction) return;
|
||||
const migrationTable = "infisical_migrations";
|
||||
const applicationDb = initDbConnection({
|
||||
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
|
||||
dbRootCert: process.env.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
|
||||
? initAuditLogDbConnection({
|
||||
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
})
|
||||
: undefined;
|
||||
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
await applicationDb.destroy();
|
||||
await auditLogDb?.destroy();
|
||||
};
|
||||
|
||||
void runRename();
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const DynamicSecretsSchema = z.object({
|
||||
@@ -14,16 +16,17 @@ export const DynamicSecretsSchema = z.object({
|
||||
type: z.string(),
|
||||
defaultTTL: z.string(),
|
||||
maxTTL: z.string().nullable().optional(),
|
||||
inputIV: z.string(),
|
||||
inputCiphertext: z.string(),
|
||||
inputTag: z.string(),
|
||||
inputIV: z.string().nullable().optional(),
|
||||
inputCiphertext: z.string().nullable().optional(),
|
||||
inputTag: z.string().nullable().optional(),
|
||||
algorithm: z.string().default("aes-256-gcm"),
|
||||
keyEncoding: z.string().default("utf8"),
|
||||
folderId: z.string().uuid(),
|
||||
status: z.string().nullable().optional(),
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityKubernetesAuthsSchema = z.object({
|
||||
@@ -17,15 +19,17 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
kubernetesHost: z.string(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedTokenReviewerJwt: z.string(),
|
||||
tokenReviewerJwtIV: z.string(),
|
||||
tokenReviewerJwtTag: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
encryptedTokenReviewerJwt: z.string().nullable().optional(),
|
||||
tokenReviewerJwtIV: z.string().nullable().optional(),
|
||||
tokenReviewerJwtTag: z.string().nullable().optional(),
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string()
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityOidcAuthsSchema = z.object({
|
||||
@@ -15,15 +17,16 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
identityId: z.string().uuid(),
|
||||
oidcDiscoveryUrl: z.string(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
boundIssuer: z.string(),
|
||||
boundAudiences: z.string(),
|
||||
boundClaims: z.unknown(),
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const LdapConfigsSchema = z.object({
|
||||
@@ -12,22 +14,25 @@ export const LdapConfigsSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
isActive: z.boolean(),
|
||||
url: z.string(),
|
||||
encryptedBindDN: z.string(),
|
||||
bindDNIV: z.string(),
|
||||
bindDNTag: z.string(),
|
||||
encryptedBindPass: z.string(),
|
||||
bindPassIV: z.string(),
|
||||
bindPassTag: z.string(),
|
||||
encryptedBindDN: z.string().nullable().optional(),
|
||||
bindDNIV: z.string().nullable().optional(),
|
||||
bindDNTag: z.string().nullable().optional(),
|
||||
encryptedBindPass: z.string().nullable().optional(),
|
||||
bindPassIV: z.string().nullable().optional(),
|
||||
bindPassTag: z.string().nullable().optional(),
|
||||
searchBase: z.string(),
|
||||
encryptedCACert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedCACert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
groupSearchBase: z.string().default(""),
|
||||
groupSearchFilter: z.string().default(""),
|
||||
searchFilter: z.string().default(""),
|
||||
uniqueUserAttribute: z.string().default("")
|
||||
uniqueUserAttribute: z.string().default(""),
|
||||
encryptedLdapBindDN: zodBuffer,
|
||||
encryptedLdapBindPass: zodBuffer,
|
||||
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OidcConfigsSchema = z.object({
|
||||
@@ -15,20 +17,22 @@ export const OidcConfigsSchema = z.object({
|
||||
jwksUri: z.string().nullable().optional(),
|
||||
tokenEndpoint: z.string().nullable().optional(),
|
||||
userinfoEndpoint: z.string().nullable().optional(),
|
||||
encryptedClientId: z.string(),
|
||||
encryptedClientId: z.string().nullable().optional(),
|
||||
configurationType: z.string(),
|
||||
clientIdIV: z.string(),
|
||||
clientIdTag: z.string(),
|
||||
encryptedClientSecret: z.string(),
|
||||
clientSecretIV: z.string(),
|
||||
clientSecretTag: z.string(),
|
||||
clientIdIV: z.string().nullable().optional(),
|
||||
clientIdTag: z.string().nullable().optional(),
|
||||
encryptedClientSecret: z.string().nullable().optional(),
|
||||
clientSecretIV: z.string().nullable().optional(),
|
||||
clientSecretTag: z.string().nullable().optional(),
|
||||
allowedEmailDomains: z.string().nullable().optional(),
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
manageGroupMemberships: z.boolean().default(false)
|
||||
manageGroupMemberships: z.boolean().default(false),
|
||||
encryptedOidcClientId: zodBuffer,
|
||||
encryptedOidcClientSecret: zodBuffer
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SamlConfigsSchema = z.object({
|
||||
@@ -23,7 +25,10 @@ export const SamlConfigsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
encryptedSamlEntryPoint: zodBuffer,
|
||||
encryptedSamlIssuer: zodBuffer,
|
||||
encryptedSamlCertificate: zodBuffer
|
||||
});
|
||||
|
||||
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationsSchema = z.object({
|
||||
@@ -22,7 +24,8 @@ export const SecretRotationsSchema = z.object({
|
||||
keyEncoding: z.string().nullable().optional(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedRotationData: zodBuffer
|
||||
});
|
||||
|
||||
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
|
||||
|
@@ -5,12 +5,14 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const WebhooksSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
secretPath: z.string().default("/"),
|
||||
url: z.string(),
|
||||
url: z.string().nullable().optional(),
|
||||
lastStatus: z.string().nullable().optional(),
|
||||
lastRunErrorMessage: z.string().nullable().optional(),
|
||||
isDisabled: z.boolean().default(false),
|
||||
@@ -25,7 +27,9 @@ export const WebhooksSchema = z.object({
|
||||
urlCipherText: z.string().nullable().optional(),
|
||||
urlIV: z.string().nullable().optional(),
|
||||
urlTag: z.string().nullable().optional(),
|
||||
type: z.string().default("general").nullable().optional()
|
||||
type: z.string().default("general").nullable().optional(),
|
||||
encryptedPassKey: zodBuffer.nullable().optional(),
|
||||
encryptedUrl: zodBuffer
|
||||
});
|
||||
|
||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||
|
@@ -14,7 +14,7 @@ import { FastifyRequest } from "fastify";
|
||||
import LdapStrategy from "passport-ldapauth";
|
||||
import { z } from "zod";
|
||||
|
||||
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
||||
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@@ -22,6 +22,7 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedLdapConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -187,7 +188,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
caCert: z.string().trim().default("")
|
||||
}),
|
||||
response: {
|
||||
200: LdapConfigsSchema
|
||||
200: SanitizedLdapConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
@@ -228,7 +229,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
.partial()
|
||||
.merge(z.object({ organizationId: z.string() })),
|
||||
response: {
|
||||
200: LdapConfigsSchema
|
||||
200: SanitizedLdapConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
|
@@ -11,13 +11,28 @@ import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
import { OidcConfigsSchema } from "@app/db/schemas";
|
||||
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true,
|
||||
manageGroupMemberships: true
|
||||
});
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
@@ -142,7 +157,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
orgSlug: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
200: SanitizedOidcConfigSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
@@ -214,7 +229,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
.partial()
|
||||
.merge(z.object({ orgSlug: z.string() })),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
200: SanitizedOidcConfigSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
@@ -327,20 +342,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true,
|
||||
manageGroupMemberships: true
|
||||
})
|
||||
200: SanitizedOidcConfigSchema
|
||||
}
|
||||
},
|
||||
|
||||
|
@@ -9,7 +9,7 @@ import { ProjectTemplates } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||
|
@@ -12,13 +12,13 @@ import { MultiSamlStrategy } from "@node-saml/passport-saml";
|
||||
import { FastifyRequest } from "fastify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SamlConfigsSchema } from "@app/db/schemas";
|
||||
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedSamlConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
type TSAMLConfig = {
|
||||
@@ -298,7 +298,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
cert: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: SamlConfigsSchema
|
||||
200: SanitizedSamlConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
@@ -333,7 +333,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
.partial()
|
||||
.merge(z.object({ organizationId: z.string() })),
|
||||
response: {
|
||||
200: SamlConfigsSchema
|
||||
200: SanitizedSamlConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
|
@@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/sanitizedSchema/user-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchema/identitiy-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -352,6 +352,7 @@ interface CreateSecretBatchEvent {
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretPath?: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
}>;
|
||||
@@ -374,8 +375,14 @@ interface UpdateSecretBatchEvent {
|
||||
type: EventType.UPDATE_SECRETS;
|
||||
metadata: {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number; secretMetadata?: TSecretMetadata }>;
|
||||
secretPath?: string;
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
secretPath?: string;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -37,11 +37,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
|
||||
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
|
||||
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
|
||||
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
|
||||
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
|
||||
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
|
||||
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
|
||||
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
|
||||
db.ref("encryptedInput").withSchema(TableName.DynamicSecret).as("dynEncryptedInput"),
|
||||
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
|
||||
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
|
||||
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
|
||||
@@ -59,11 +55,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
type: doc.dynType,
|
||||
defaultTTL: doc.dynDefaultTTL,
|
||||
maxTTL: doc.dynMaxTTL,
|
||||
inputIV: doc.dynInputIV,
|
||||
inputTag: doc.dynInputTag,
|
||||
inputCiphertext: doc.dynInputCiphertext,
|
||||
algorithm: doc.dynAlgorithm,
|
||||
keyEncoding: doc.dynKeyEncoding,
|
||||
encryptedInput: doc.dynEncryptedInput,
|
||||
folderId: doc.dynFolderId,
|
||||
status: doc.dynStatus,
|
||||
statusDetails: doc.dynStatusDetails,
|
||||
|
@@ -1,8 +1,10 @@
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
|
||||
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
|
||||
@@ -14,6 +16,8 @@ type TDynamicSecretLeaseQueueServiceFactoryDep = {
|
||||
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
|
||||
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
|
||||
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findById">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
|
||||
@@ -22,7 +26,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
queueService,
|
||||
dynamicSecretDAL,
|
||||
dynamicSecretProviders,
|
||||
dynamicSecretLeaseDAL
|
||||
dynamicSecretLeaseDAL,
|
||||
kmsService,
|
||||
folderDAL
|
||||
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
|
||||
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
|
||||
await queueService.queue(
|
||||
@@ -76,15 +82,21 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
|
||||
|
||||
const folder = await folderDAL.findById(dynamicSecretLease.dynamicSecret.folderId);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find folder with ${dynamicSecretLease.dynamicSecret.folderId}`
|
||||
});
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: folder.projectId
|
||||
});
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
|
||||
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
|
||||
@@ -100,16 +112,22 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
|
||||
throw new DisableRotationErrors({ message: "Document not deleted" });
|
||||
|
||||
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find folder with ${dynamicSecretCfg.folderId}`
|
||||
});
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: folder.projectId
|
||||
});
|
||||
|
||||
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
|
||||
if (dynamicSecretLeases.length) {
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
|
||||
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
@@ -9,9 +9,10 @@ import {
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@@ -37,6 +38,7 @@ type TDynamicSecretLeaseServiceFactoryDep = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
|
||||
@@ -49,7 +51,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
kmsService
|
||||
}: TDynamicSecretLeaseServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
environmentSlug,
|
||||
@@ -104,13 +107,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
@@ -160,6 +164,11 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan?.dynamicSecret) {
|
||||
throw new BadRequestError({
|
||||
@@ -181,12 +190,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
@@ -240,6 +244,11 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
@@ -253,12 +262,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const revokeResponse = await selectedProvider
|
||||
|
@@ -1,15 +1,16 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@@ -42,6 +43,7 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
|
||||
@@ -54,7 +56,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
dynamicSecretProviders,
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
kmsService
|
||||
}: TDynamicSecretServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
path,
|
||||
@@ -108,16 +111,15 @@ export const dynamicSecretServiceFactory = ({
|
||||
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.create({
|
||||
type: provider.type,
|
||||
version: 1,
|
||||
inputIV: encryptedInput.iv,
|
||||
inputTag: encryptedInput.tag,
|
||||
inputCiphertext: encryptedInput.ciphertext,
|
||||
algorithm: encryptedInput.algorithm,
|
||||
keyEncoding: encryptedInput.encoding,
|
||||
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(inputs)) }).cipherTextBlob,
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
folderId: folder.id,
|
||||
@@ -180,15 +182,15 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (existingDynamicSecret)
|
||||
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
|
||||
}
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
||||
@@ -196,13 +198,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
const isConnected = await selectedProvider.validateConnection(newInput);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
|
||||
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
|
||||
inputIV: encryptedInput.iv,
|
||||
inputTag: encryptedInput.tag,
|
||||
inputCiphertext: encryptedInput.ciphertext,
|
||||
algorithm: encryptedInput.algorithm,
|
||||
keyEncoding: encryptedInput.encoding,
|
||||
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(updatedInput)) }).cipherTextBlob,
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
name: newName ?? name,
|
||||
@@ -315,13 +312,13 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (!dynamicSecretCfg) {
|
||||
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
|
||||
}
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
|
||||
|
@@ -111,7 +111,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
if (search) {
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike '%${search}%'`);
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike ?`, [`%${search}%`]);
|
||||
} else if (username) {
|
||||
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
|
||||
}
|
||||
|
@@ -1,25 +1,23 @@
|
||||
import * as pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmModule } from "./hsm-types";
|
||||
|
||||
export const initializeHsmModule = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured" | "HSM_LIB_PATH">) => {
|
||||
// Create a new instance of PKCS11 module
|
||||
const pkcs11 = new pkcs11js.PKCS11();
|
||||
let isInitialized = false;
|
||||
|
||||
const initialize = () => {
|
||||
if (!appCfg.isHsmConfigured) {
|
||||
if (!envConfig.isHsmConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Load the PKCS#11 module
|
||||
pkcs11.load(appCfg.HSM_LIB_PATH!);
|
||||
pkcs11.load(envConfig.HSM_LIB_PATH!);
|
||||
|
||||
// Initialize the module
|
||||
pkcs11.C_Initialize();
|
||||
|
@@ -1,12 +1,13 @@
|
||||
import pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmKeyType, HsmModule } from "./hsm-types";
|
||||
|
||||
type THsmServiceFactoryDep = {
|
||||
hsmModule: HsmModule;
|
||||
envConfig: Pick<TEnvConfig, "HSM_PIN" | "HSM_SLOT" | "HSM_LIB_PATH" | "HSM_KEY_LABEL" | "isHsmConfigured">;
|
||||
};
|
||||
|
||||
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
||||
@@ -15,9 +16,7 @@ type SyncOrAsync<T> = T | Promise<T>;
|
||||
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
||||
|
||||
// eslint-disable-next-line no-empty-pattern
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envConfig }: THsmServiceFactoryDep) => {
|
||||
// Constants for buffer structures
|
||||
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
||||
const BLOCK_SIZE = 16;
|
||||
@@ -63,11 +62,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
throw new Error("No slots available");
|
||||
}
|
||||
|
||||
if (appCfg.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
|
||||
if (envConfig.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${envConfig.HSM_SLOT} not found or not initialized`);
|
||||
}
|
||||
|
||||
const slotId = slots[appCfg.HSM_SLOT];
|
||||
const slotId = slots[envConfig.HSM_SLOT];
|
||||
|
||||
const startTime = Date.now();
|
||||
while (Date.now() - startTime < MAX_TIMEOUT) {
|
||||
@@ -78,7 +77,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
|
||||
// Login
|
||||
try {
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, envConfig.HSM_PIN);
|
||||
logger.info("HSM: Successfully authenticated");
|
||||
break;
|
||||
} catch (error) {
|
||||
@@ -86,7 +85,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
||||
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${envConfig.HSM_SLOT}`);
|
||||
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
||||
}
|
||||
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
||||
@@ -133,7 +132,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
};
|
||||
|
||||
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
||||
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
|
||||
const label = type === HsmKeyType.HMAC ? `${envConfig.HSM_KEY_LABEL}_HMAC` : envConfig.HSM_KEY_LABEL;
|
||||
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
||||
|
||||
const template = [
|
||||
@@ -360,7 +359,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
};
|
||||
|
||||
const isActive = async () => {
|
||||
if (!isInitialized || !appCfg.isHsmConfigured) {
|
||||
if (!isInitialized || !envConfig.isHsmConfigured) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -372,11 +371,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
logger.error(err, "HSM: Error testing PKCS#11 module");
|
||||
}
|
||||
|
||||
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
return envConfig.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
};
|
||||
|
||||
const startService = async () => {
|
||||
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
if (!envConfig.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
|
||||
try {
|
||||
await $withSession(async (sessionHandle) => {
|
||||
@@ -395,7 +394,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
||||
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_LABEL, value: envConfig.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
||||
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
||||
...genericAttributes
|
||||
@@ -410,7 +409,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
keyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
|
||||
logger.info(`HSM: Master key created successfully with label: ${envConfig.HSM_KEY_LABEL}`);
|
||||
}
|
||||
|
||||
// Check if HMAC key exists, create if not
|
||||
@@ -419,7 +418,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${envConfig.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
||||
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
||||
...genericAttributes
|
||||
@@ -434,7 +433,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
hmacKeyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${envConfig.HSM_KEY_LABEL}_HMAC`);
|
||||
}
|
||||
|
||||
// Get slot info to check supported mechanisms
|
||||
|
@@ -5,7 +5,7 @@ import ms from "ms";
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
@@ -5,7 +5,7 @@ import ms from "ms";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
@@ -1,25 +1,18 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipStatus, SecretKeyEncoding, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||
import { OrgMembershipStatus, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
@@ -59,7 +52,6 @@ type TLdapConfigServiceFactoryDep = {
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||
@@ -84,6 +76,7 @@ type TLdapConfigServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
|
||||
@@ -93,7 +86,6 @@ export const ldapConfigServiceFactory = ({
|
||||
ldapGroupMapDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
orgBotDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
@@ -105,7 +97,8 @@ export const ldapConfigServiceFactory = ({
|
||||
permissionService,
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
smtpService,
|
||||
kmsService
|
||||
}: TLdapConfigServiceFactoryDep) => {
|
||||
const createLdapCfg = async ({
|
||||
actor,
|
||||
@@ -133,77 +126,23 @@ export const ldapConfigServiceFactory = ({
|
||||
message:
|
||||
"Failed to create LDAP configuration due to plan restriction. Upgrade plan to create LDAP configuration."
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
|
||||
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.create({
|
||||
orgId,
|
||||
isActive,
|
||||
url,
|
||||
encryptedBindDN,
|
||||
bindDNIV,
|
||||
bindDNTag,
|
||||
encryptedBindPass,
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
uniqueUserAttribute,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
encryptedCACert,
|
||||
caCertIV,
|
||||
caCertTag
|
||||
encryptedLdapCaCertificate: encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob,
|
||||
encryptedLdapBindDN: encryptor({ plainText: Buffer.from(bindDN) }).cipherTextBlob,
|
||||
encryptedLdapBindPass: encryptor({ plainText: Buffer.from(bindPass) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return ldapConfig;
|
||||
@@ -246,38 +185,21 @@ export const ldapConfigServiceFactory = ({
|
||||
uniqueUserAttribute
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot in organization with ID '${orgId}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (bindDN !== undefined) {
|
||||
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
|
||||
updateQuery.encryptedBindDN = encryptedBindDN;
|
||||
updateQuery.bindDNIV = bindDNIV;
|
||||
updateQuery.bindDNTag = bindDNTag;
|
||||
updateQuery.encryptedLdapBindDN = encryptor({ plainText: Buffer.from(bindDN) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (bindPass !== undefined) {
|
||||
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
|
||||
updateQuery.encryptedBindPass = encryptedBindPass;
|
||||
updateQuery.bindPassIV = bindPassIV;
|
||||
updateQuery.bindPassTag = bindPassTag;
|
||||
updateQuery.encryptedLdapBindPass = encryptor({ plainText: Buffer.from(bindPass) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (caCert !== undefined) {
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
updateQuery.encryptedCACert = encryptedCACert;
|
||||
updateQuery.caCertIV = caCertIV;
|
||||
updateQuery.caCertTag = caCertTag;
|
||||
updateQuery.encryptedLdapCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
|
||||
@@ -293,61 +215,24 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: ldapConfig.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found in organization with ID ${ldapConfig.orgId}`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: ldapConfig.orgId
|
||||
});
|
||||
|
||||
const {
|
||||
encryptedBindDN,
|
||||
bindDNIV,
|
||||
bindDNTag,
|
||||
encryptedBindPass,
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
encryptedCACert,
|
||||
caCertIV,
|
||||
caCertTag
|
||||
} = ldapConfig;
|
||||
|
||||
let bindDN = "";
|
||||
if (encryptedBindDN && bindDNIV && bindDNTag) {
|
||||
bindDN = decryptSymmetric({
|
||||
ciphertext: encryptedBindDN,
|
||||
key,
|
||||
tag: bindDNTag,
|
||||
iv: bindDNIV
|
||||
});
|
||||
if (ldapConfig.encryptedLdapBindDN) {
|
||||
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
||||
}
|
||||
|
||||
let bindPass = "";
|
||||
if (encryptedBindPass && bindPassIV && bindPassTag) {
|
||||
bindPass = decryptSymmetric({
|
||||
ciphertext: encryptedBindPass,
|
||||
key,
|
||||
tag: bindPassTag,
|
||||
iv: bindPassIV
|
||||
});
|
||||
if (ldapConfig.encryptedLdapBindPass) {
|
||||
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
||||
}
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCACert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCACert,
|
||||
key,
|
||||
tag: caCertTag,
|
||||
iv: caCertIV
|
||||
});
|
||||
if (ldapConfig.encryptedLdapCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
|
@@ -3,7 +3,7 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
|
||||
|
||||
import { OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
||||
import { OrgMembershipStatus, TableName, TUsers } from "@app/db/schemas";
|
||||
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@@ -14,21 +14,14 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { ActorType, AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
@@ -70,7 +63,6 @@ type TOidcConfigServiceFactoryDep = {
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||
@@ -91,6 +83,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
|
||||
@@ -103,7 +96,6 @@ export const oidcConfigServiceFactory = ({
|
||||
licenseService,
|
||||
permissionService,
|
||||
tokenService,
|
||||
orgBotDAL,
|
||||
smtpService,
|
||||
oidcConfigDAL,
|
||||
userGroupMembershipDAL,
|
||||
@@ -112,7 +104,8 @@ export const oidcConfigServiceFactory = ({
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
auditLogService
|
||||
auditLogService,
|
||||
kmsService
|
||||
}: TOidcConfigServiceFactoryDep) => {
|
||||
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
@@ -143,43 +136,19 @@ export const oidcConfigServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
// decrypt and return cfg
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot for organization with ID '${oidcCfg.orgId}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: oidcCfg.orgId
|
||||
});
|
||||
|
||||
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
|
||||
oidcCfg;
|
||||
|
||||
let clientId = "";
|
||||
if (encryptedClientId && clientIdIV && clientIdTag) {
|
||||
clientId = decryptSymmetric({
|
||||
ciphertext: encryptedClientId,
|
||||
key,
|
||||
tag: clientIdTag,
|
||||
iv: clientIdIV
|
||||
});
|
||||
if (oidcCfg.encryptedOidcClientId) {
|
||||
clientId = decryptor({ cipherTextBlob: oidcCfg.encryptedOidcClientId }).toString();
|
||||
}
|
||||
|
||||
let clientSecret = "";
|
||||
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
|
||||
clientSecret = decryptSymmetric({
|
||||
key,
|
||||
tag: clientSecretTag,
|
||||
iv: clientSecretIV,
|
||||
ciphertext: encryptedClientSecret
|
||||
});
|
||||
if (oidcCfg.encryptedOidcClientSecret) {
|
||||
clientSecret = decryptor({ cipherTextBlob: oidcCfg.encryptedOidcClientSecret }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -540,12 +509,10 @@ export const oidcConfigServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: org.id
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (isActive && !serverCfg.trustOidcEmails) {
|
||||
@@ -558,13 +525,6 @@ export const oidcConfigServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const updateQuery: TOidcConfigsUpdate = {
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
@@ -580,22 +540,11 @@ export const oidcConfigServiceFactory = ({
|
||||
};
|
||||
|
||||
if (clientId !== undefined) {
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
updateQuery.encryptedClientId = encryptedClientId;
|
||||
updateQuery.clientIdIV = clientIdIV;
|
||||
updateQuery.clientIdTag = clientIdTag;
|
||||
updateQuery.encryptedOidcClientId = encryptor({ plainText: Buffer.from(clientId) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (clientSecret !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
updateQuery.encryptedClientSecret = encryptedClientSecret;
|
||||
updateQuery.clientSecretIV = clientSecretIV;
|
||||
updateQuery.clientSecretTag = clientSecretTag;
|
||||
updateQuery.encryptedOidcClientSecret = encryptor({ plainText: Buffer.from(clientSecret) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
|
||||
@@ -647,61 +596,11 @@ export const oidcConfigServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: org.id,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: org.id
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.create({
|
||||
issuer,
|
||||
isActive,
|
||||
@@ -713,13 +612,9 @@ export const oidcConfigServiceFactory = ({
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
orgId: org.id,
|
||||
encryptedClientId,
|
||||
clientIdIV,
|
||||
clientIdTag,
|
||||
encryptedClientSecret,
|
||||
clientSecretIV,
|
||||
clientSecretTag,
|
||||
manageGroupMemberships
|
||||
manageGroupMemberships,
|
||||
encryptedOidcClientId: encryptor({ plainText: Buffer.from(clientId) }).cipherTextBlob,
|
||||
encryptedOidcClientSecret: encryptor({ plainText: Buffer.from(clientSecret) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return oidcCfg;
|
||||
|
@@ -6,7 +6,7 @@ import {
|
||||
CASL_ACTION_SCHEMA_NATIVE_ENUM
|
||||
} from "@app/ee/services/permission/permission-schemas";
|
||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
|
||||
import { PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
|
@@ -15,7 +15,7 @@ import {
|
||||
} from "@app/ee/services/project-template/project-template-types";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
|
||||
|
||||
import { TProjectTemplateDALFactory } from "./project-template-dal";
|
||||
|
@@ -2,7 +2,7 @@ import { z } from "zod";
|
||||
|
||||
import { TProjectEnvironments } from "@app/db/schemas";
|
||||
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
|
||||
export type TProjectTemplateEnvironment = Pick<TProjectEnvironments, "name" | "slug" | "position">;
|
||||
|
||||
|
@@ -5,7 +5,7 @@ import ms from "ms";
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
|
||||
|
@@ -1,29 +1,15 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import {
|
||||
OrgMembershipStatus,
|
||||
SecretKeyEncoding,
|
||||
TableName,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsUpdate,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
import { OrgMembershipStatus, TableName, TSamlConfigs, TSamlConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TIdentityMetadataDALFactory } from "@app/services/identity/identity-metadata-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
@@ -52,21 +38,19 @@ type TSamlConfigServiceFactoryDep = {
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
|
||||
identityMetadataDAL: Pick<TIdentityMetadataDALFactory, "delete" | "insertMany" | "transaction">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSamlConfigServiceFactory = ReturnType<typeof samlConfigServiceFactory>;
|
||||
|
||||
export const samlConfigServiceFactory = ({
|
||||
samlConfigDAL,
|
||||
orgBotDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
@@ -75,7 +59,8 @@ export const samlConfigServiceFactory = ({
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService,
|
||||
identityMetadataDAL
|
||||
identityMetadataDAL,
|
||||
kmsService
|
||||
}: TSamlConfigServiceFactoryDep) => {
|
||||
const createSamlCfg = async ({
|
||||
cert,
|
||||
@@ -99,70 +84,18 @@ export const samlConfigServiceFactory = ({
|
||||
"Failed to create SAML SSO configuration due to plan restriction. Upgrade plan to create SSO configuration."
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedEntryPoint, iv: entryPointIV, tag: entryPointTag } = encryptSymmetric(entryPoint, key);
|
||||
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
|
||||
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
|
||||
const samlConfig = await samlConfigDAL.create({
|
||||
orgId,
|
||||
authProvider,
|
||||
isActive,
|
||||
encryptedEntryPoint,
|
||||
entryPointIV,
|
||||
entryPointTag,
|
||||
encryptedIssuer,
|
||||
issuerIV,
|
||||
issuerTag,
|
||||
encryptedCert,
|
||||
certIV,
|
||||
certTag
|
||||
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob,
|
||||
encryptedSamlEntryPoint: encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob,
|
||||
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return samlConfig;
|
||||
@@ -190,40 +123,21 @@ export const samlConfigServiceFactory = ({
|
||||
});
|
||||
|
||||
const updateQuery: TSamlConfigsUpdate = { authProvider, isActive, lastUsed: null };
|
||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found for organization with ID '${orgId}'`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (entryPoint !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedEntryPoint,
|
||||
iv: entryPointIV,
|
||||
tag: entryPointTag
|
||||
} = encryptSymmetric(entryPoint, key);
|
||||
updateQuery.encryptedEntryPoint = encryptedEntryPoint;
|
||||
updateQuery.entryPointIV = entryPointIV;
|
||||
updateQuery.entryPointTag = entryPointTag;
|
||||
updateQuery.encryptedSamlEntryPoint = encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (issuer !== undefined) {
|
||||
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
|
||||
updateQuery.encryptedIssuer = encryptedIssuer;
|
||||
updateQuery.issuerIV = issuerIV;
|
||||
updateQuery.issuerTag = issuerTag;
|
||||
updateQuery.encryptedSamlIssuer = encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (cert !== undefined) {
|
||||
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
|
||||
updateQuery.encryptedCert = encryptedCert;
|
||||
updateQuery.certIV = certIV;
|
||||
updateQuery.certTag = certTag;
|
||||
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);
|
||||
@@ -233,14 +147,14 @@ export const samlConfigServiceFactory = ({
|
||||
};
|
||||
|
||||
const getSaml = async (dto: TGetSamlCfgDTO) => {
|
||||
let ssoConfig: TSamlConfigs | undefined;
|
||||
let samlConfig: TSamlConfigs | undefined;
|
||||
if (dto.type === "org") {
|
||||
ssoConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
||||
if (!ssoConfig) return;
|
||||
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
||||
if (!samlConfig) return;
|
||||
} else if (dto.type === "orgSlug") {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
if (!org) return;
|
||||
ssoConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
||||
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
||||
} else if (dto.type === "ssoId") {
|
||||
// TODO:
|
||||
// We made this change because saml config ids were not moved over during the migration
|
||||
@@ -259,81 +173,51 @@ export const samlConfigServiceFactory = ({
|
||||
|
||||
const id = UUIDToMongoId[dto.id] ?? dto.id;
|
||||
|
||||
ssoConfig = await samlConfigDAL.findById(id);
|
||||
samlConfig = await samlConfigDAL.findById(id);
|
||||
}
|
||||
if (!ssoConfig) throw new NotFoundError({ message: `Failed to find SSO data` });
|
||||
if (!samlConfig) throw new NotFoundError({ message: `Failed to find SSO data` });
|
||||
|
||||
// when dto is type id means it's internally used
|
||||
if (dto.type === "org") {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
dto.actor,
|
||||
dto.actorId,
|
||||
ssoConfig.orgId,
|
||||
samlConfig.orgId,
|
||||
dto.actorAuthMethod,
|
||||
dto.actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
}
|
||||
const {
|
||||
entryPointTag,
|
||||
entryPointIV,
|
||||
encryptedEntryPoint,
|
||||
certTag,
|
||||
certIV,
|
||||
encryptedCert,
|
||||
issuerTag,
|
||||
issuerIV,
|
||||
encryptedIssuer
|
||||
} = ssoConfig;
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: ssoConfig.orgId });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found in organization with ID '${ssoConfig.orgId}'`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: samlConfig.orgId
|
||||
});
|
||||
|
||||
let entryPoint = "";
|
||||
if (encryptedEntryPoint && entryPointIV && entryPointTag) {
|
||||
entryPoint = decryptSymmetric({
|
||||
ciphertext: encryptedEntryPoint,
|
||||
key,
|
||||
tag: entryPointTag,
|
||||
iv: entryPointIV
|
||||
});
|
||||
if (samlConfig.encryptedSamlEntryPoint) {
|
||||
entryPoint = decryptor({ cipherTextBlob: samlConfig.encryptedSamlEntryPoint }).toString();
|
||||
}
|
||||
|
||||
let issuer = "";
|
||||
if (encryptedIssuer && issuerTag && issuerIV) {
|
||||
issuer = decryptSymmetric({
|
||||
key,
|
||||
tag: issuerTag,
|
||||
iv: issuerIV,
|
||||
ciphertext: encryptedIssuer
|
||||
});
|
||||
if (samlConfig.encryptedSamlIssuer) {
|
||||
issuer = decryptor({ cipherTextBlob: samlConfig.encryptedSamlIssuer }).toString();
|
||||
}
|
||||
|
||||
let cert = "";
|
||||
if (encryptedCert && certTag && certIV) {
|
||||
cert = decryptSymmetric({ key, tag: certTag, iv: certIV, ciphertext: encryptedCert });
|
||||
if (samlConfig.encryptedSamlCertificate) {
|
||||
cert = decryptor({ cipherTextBlob: samlConfig.encryptedSamlCertificate }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
id: ssoConfig.id,
|
||||
organization: ssoConfig.orgId,
|
||||
orgId: ssoConfig.orgId,
|
||||
authProvider: ssoConfig.authProvider,
|
||||
isActive: ssoConfig.isActive,
|
||||
id: samlConfig.id,
|
||||
organization: samlConfig.orgId,
|
||||
orgId: samlConfig.orgId,
|
||||
authProvider: samlConfig.authProvider,
|
||||
isActive: samlConfig.isActive,
|
||||
entryPoint,
|
||||
issuer,
|
||||
cert,
|
||||
lastUsed: ssoConfig.lastUsed
|
||||
lastUsed: samlConfig.lastUsed
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -5,13 +5,9 @@ import {
|
||||
IAMClient
|
||||
} from "@aws-sdk/client-iam";
|
||||
|
||||
import { SecretKeyEncoding, SecretType } from "@app/db/schemas";
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
encryptSymmetric128BitHexKeyUTF8,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
|
||||
import { daysToMillisecond, secondsToMillis } from "@app/lib/dates";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@@ -135,20 +131,15 @@ export const secretRotationQueueFactory = ({
|
||||
|
||||
// deep copy
|
||||
const provider = JSON.parse(JSON.stringify(rotationProvider)) as TSecretRotationProviderTemplate;
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: secretRotation.projectId
|
||||
});
|
||||
|
||||
// now get the encrypted variable values
|
||||
// in includes the inputs, the previous outputs
|
||||
// internal mapping variables etc
|
||||
const { encryptedDataTag, encryptedDataIV, encryptedData, keyEncoding } = secretRotation;
|
||||
if (!encryptedDataTag || !encryptedDataIV || !encryptedData || !keyEncoding) {
|
||||
throw new DisableRotationErrors({ message: "No inputs found" });
|
||||
}
|
||||
const decryptedData = infisicalSymmetricDecrypt({
|
||||
keyEncoding: keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: encryptedData,
|
||||
iv: encryptedDataIV,
|
||||
tag: encryptedDataTag
|
||||
});
|
||||
const decryptedData = secretManagerDecryptor({
|
||||
cipherTextBlob: secretRotation.encryptedRotationData
|
||||
}).toString();
|
||||
|
||||
const variables = JSON.parse(decryptedData) as TSecretRotationEncData;
|
||||
// rotation set cycle
|
||||
@@ -303,11 +294,9 @@ export const secretRotationQueueFactory = ({
|
||||
outputs: newCredential.outputs,
|
||||
internal: newCredential.internal
|
||||
});
|
||||
const encVarData = infisicalSymmetricEncypt(JSON.stringify(variables));
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: secretRotation.projectId
|
||||
});
|
||||
const encryptedRotationData = secretManagerEncryptor({
|
||||
plainText: Buffer.from(JSON.stringify(variables))
|
||||
}).cipherTextBlob;
|
||||
|
||||
const numberOfSecretsRotated = rotationOutputs.length;
|
||||
if (shouldUseSecretV2Bridge) {
|
||||
@@ -323,11 +312,7 @@ export const secretRotationQueueFactory = ({
|
||||
await secretRotationDAL.updateById(
|
||||
rotationId,
|
||||
{
|
||||
encryptedData: encVarData.ciphertext,
|
||||
encryptedDataIV: encVarData.iv,
|
||||
encryptedDataTag: encVarData.tag,
|
||||
keyEncoding: encVarData.encoding,
|
||||
algorithm: encVarData.algorithm,
|
||||
encryptedRotationData,
|
||||
lastRotatedAt: new Date(),
|
||||
statusMessage: "Rotated successfull",
|
||||
status: "success"
|
||||
@@ -371,11 +356,7 @@ export const secretRotationQueueFactory = ({
|
||||
await secretRotationDAL.updateById(
|
||||
rotationId,
|
||||
{
|
||||
encryptedData: encVarData.ciphertext,
|
||||
encryptedDataIV: encVarData.iv,
|
||||
encryptedDataTag: encVarData.tag,
|
||||
keyEncoding: encVarData.encoding,
|
||||
algorithm: encVarData.algorithm,
|
||||
encryptedRotationData,
|
||||
lastRotatedAt: new Date(),
|
||||
statusMessage: "Rotated successfull",
|
||||
status: "success"
|
||||
|
@@ -2,9 +2,11 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import Ajv from "ajv";
|
||||
|
||||
import { ActionProjectType, ProjectVersion, TableName } from "@app/db/schemas";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
@@ -30,6 +32,7 @@ type TSecretRotationServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
secretRotationQueue: TSecretRotationQueueFactory;
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSecretRotationServiceFactory = ReturnType<typeof secretRotationServiceFactory>;
|
||||
@@ -44,7 +47,8 @@ export const secretRotationServiceFactory = ({
|
||||
folderDAL,
|
||||
secretDAL,
|
||||
projectBotService,
|
||||
secretV2BridgeDAL
|
||||
secretV2BridgeDAL,
|
||||
kmsService
|
||||
}: TSecretRotationServiceFactoryDep) => {
|
||||
const getProviderTemplates = async ({
|
||||
actor,
|
||||
@@ -156,7 +160,11 @@ export const secretRotationServiceFactory = ({
|
||||
inputs: formattedInputs,
|
||||
creds: []
|
||||
};
|
||||
const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData));
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
|
||||
const doc = await secretRotationDAL.create(
|
||||
{
|
||||
@@ -164,11 +172,8 @@ export const secretRotationServiceFactory = ({
|
||||
secretPath,
|
||||
interval,
|
||||
envId: folder.envId,
|
||||
encryptedDataTag: encData.tag,
|
||||
encryptedDataIV: encData.iv,
|
||||
encryptedData: encData.ciphertext,
|
||||
algorithm: encData.algorithm,
|
||||
keyEncoding: encData.encoding
|
||||
encryptedRotationData: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(unencryptedData)) })
|
||||
.cipherTextBlob
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -1,3 +1,5 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
|
||||
// akhilmhdh: I did this, quite strange bug with eslint. Everything do have a type stil has this error
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
|
||||
|
@@ -1,4 +1,4 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
/* eslint-disable no-await-in-loop,@typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
|
@@ -2,6 +2,12 @@ import { Redis } from "ioredis";
|
||||
|
||||
import { Redlock, Settings } from "@app/lib/red-lock";
|
||||
|
||||
export enum PgSqlLock {
|
||||
BootUpMigration = 2023,
|
||||
SuperAdminInit = 2024,
|
||||
KmsRootKeyInit = 2025
|
||||
}
|
||||
|
||||
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
|
||||
|
||||
// all the key prefixes used must be set here to avoid conflict
|
||||
|
38
backend/src/keystore/memory.ts
Normal file
38
backend/src/keystore/memory.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
import { TKeyStoreFactory } from "./keystore";
|
||||
|
||||
export const inMemoryKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
|
||||
return {
|
||||
setItem: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
setItemWithExpiry: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
deleteItem: async (key) => {
|
||||
delete store[key];
|
||||
return 1;
|
||||
},
|
||||
getItem: async (key) => {
|
||||
const value = store[key];
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
};
|
||||
};
|
@@ -721,7 +721,8 @@ export const RAW_SECRETS = {
|
||||
secretName: "The name of the secret to update.",
|
||||
secretComment: "Update comment to the secret.",
|
||||
environment: "The slug of the environment where the secret is located.",
|
||||
secretPath: "The path of the secret to update.",
|
||||
mode: "Defines how the system should handle missing secrets during an update.",
|
||||
secretPath: "The default path for secrets to update or upsert, if not provided in the secret details.",
|
||||
secretValue: "The new value of the secret.",
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to update.",
|
||||
@@ -1718,36 +1719,40 @@ export const SecretSyncs = {
|
||||
SYNC_OPTIONS: (destination: SecretSync) => {
|
||||
const destinationName = SECRET_SYNC_NAME_MAP[destination];
|
||||
return {
|
||||
INITIAL_SYNC_BEHAVIOR: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`,
|
||||
PREPEND_PREFIX: `Optionally prepend a prefix to your secrets' keys when syncing to ${destinationName}.`,
|
||||
APPEND_SUFFIX: `Optionally append a suffix to your secrets' keys when syncing to ${destinationName}.`
|
||||
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`
|
||||
};
|
||||
},
|
||||
DESTINATION_CONFIG: {
|
||||
AWS_PARAMETER_STORE: {
|
||||
REGION: "The AWS region to sync secrets to.",
|
||||
PATH: "The Parameter Store path to sync secrets to."
|
||||
region: "The AWS region to sync secrets to.",
|
||||
path: "The Parameter Store path to sync secrets to."
|
||||
},
|
||||
AWS_SECRETS_MANAGER: {
|
||||
REGION: "The AWS region to sync secrets to.",
|
||||
MAPPING_BEHAVIOR:
|
||||
"How secrets from Infisical should be mapped to AWS Secrets Manager; one-to-one or many-to-one.",
|
||||
SECRET_NAME: "The secret name in AWS Secrets Manager to sync to when using mapping behavior many-to-one."
|
||||
region: "The AWS region to sync secrets to.",
|
||||
mappingBehavior: "How secrets from Infisical should be mapped to AWS Secrets Manager; one-to-one or many-to-one.",
|
||||
secretName: "The secret name in AWS Secrets Manager to sync to when using mapping behavior many-to-one."
|
||||
},
|
||||
GITHUB: {
|
||||
ORG: "The name of the GitHub organization.",
|
||||
OWNER: "The name of the GitHub account owner of the repository.",
|
||||
REPO: "The name of the GitHub repository.",
|
||||
ENV: "The name of the GitHub environment."
|
||||
scope: "The GitHub scope that secrets should be synced to",
|
||||
org: "The name of the GitHub organization.",
|
||||
owner: "The name of the GitHub account owner of the repository.",
|
||||
repo: "The name of the GitHub repository.",
|
||||
env: "The name of the GitHub environment."
|
||||
},
|
||||
AZURE_KEY_VAULT: {
|
||||
VAULT_BASE_URL:
|
||||
"The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
||||
},
|
||||
AZURE_APP_CONFIGURATION: {
|
||||
CONFIGURATION_URL:
|
||||
configurationUrl:
|
||||
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
|
||||
LABEL: "An optional label to assign to secrets created in Azure App Configuration."
|
||||
label: "An optional label to assign to secrets created in Azure App Configuration."
|
||||
},
|
||||
GCP: {
|
||||
scope: "The Google project scope that secrets should be synced to.",
|
||||
projectId: "The ID of the Google project secrets should be synced to."
|
||||
},
|
||||
DATABRICKS: {
|
||||
scope: "The Databricks secret scope that secrets should be synced to."
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@@ -258,7 +258,8 @@ const envSchema = z
|
||||
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
|
||||
}));
|
||||
|
||||
let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
|
||||
let envCfg: TEnvConfig;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
|
@@ -98,7 +98,7 @@ const extractReqId = () => {
|
||||
}
|
||||
};
|
||||
|
||||
export const initLogger = async () => {
|
||||
export const initLogger = () => {
|
||||
const cfg = loggerConfig.parse(process.env);
|
||||
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
||||
{
|
||||
|
@@ -2,14 +2,13 @@ import "./lib/telemetry/instrumentation";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Redis } from "ioredis";
|
||||
import path from "path";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
|
||||
import { runMigrations } from "./auto-start-migrations";
|
||||
import { initAuditLogDbConnection, initDbConnection } from "./db";
|
||||
import { keyStoreFactory } from "./keystore/keystore";
|
||||
import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env";
|
||||
import { isMigrationMode } from "./lib/fn";
|
||||
import { formatSmtpConfig, initEnvConfig } from "./lib/config/env";
|
||||
import { initLogger } from "./lib/logger";
|
||||
import { queueServiceFactory } from "./queue";
|
||||
import { main } from "./server/app";
|
||||
@@ -19,58 +18,53 @@ import { smtpServiceFactory } from "./services/smtp/smtp-service";
|
||||
dotenv.config();
|
||||
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
const logger = initLogger();
|
||||
const envConfig = initEnvConfig(logger);
|
||||
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
readReplicas: appCfg.DB_READ_REPLICAS?.map((el) => ({
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT,
|
||||
readReplicas: envConfig.DB_READ_REPLICAS?.map((el) => ({
|
||||
dbRootCert: el.DB_ROOT_CERT,
|
||||
dbConnectionUri: el.DB_CONNECTION_URI
|
||||
}))
|
||||
});
|
||||
|
||||
const auditLogDb = appCfg.AUDIT_LOGS_DB_CONNECTION_URI
|
||||
const auditLogDb = envConfig.AUDIT_LOGS_DB_CONNECTION_URI
|
||||
? initAuditLogDbConnection({
|
||||
dbConnectionUri: appCfg.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.AUDIT_LOGS_DB_ROOT_CERT
|
||||
dbConnectionUri: envConfig.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.AUDIT_LOGS_DB_ROOT_CERT
|
||||
})
|
||||
: undefined;
|
||||
|
||||
// Case: App is running in packaged mode (binary), and migration mode is enabled.
|
||||
// Run the migrations and exit the process after completion.
|
||||
if (IS_PACKAGED && isMigrationMode()) {
|
||||
try {
|
||||
logger.info("Running Postgres migrations..");
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "./db/migrations")
|
||||
});
|
||||
logger.info("Postgres migrations completed");
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to run migrations");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
}
|
||||
await runMigrations({ applicationDb: db, auditLogDb, logger });
|
||||
|
||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL, {
|
||||
dbConnectionUrl: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, {
|
||||
dbConnectionUrl: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
await queue.initialize();
|
||||
|
||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore, redis });
|
||||
const server = await main({
|
||||
db,
|
||||
auditLogDb,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
const bootstrap = await bootstrapCheck({ db });
|
||||
|
||||
// eslint-disable-next-line
|
||||
@@ -90,8 +84,8 @@ const run = async () => {
|
||||
});
|
||||
|
||||
await server.listen({
|
||||
port: appCfg.PORT,
|
||||
host: appCfg.HOST,
|
||||
port: envConfig.PORT,
|
||||
host: envConfig.HOST,
|
||||
listenTextResolver: (address) => {
|
||||
void bootstrap();
|
||||
return address;
|
||||
|
@@ -17,7 +17,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
import { getConfig, IS_PACKAGED, TEnvConfig } from "@app/lib/config/env";
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
@@ -43,10 +43,11 @@ type TMain = {
|
||||
keyStore: TKeyStoreFactory;
|
||||
hsmModule: HsmModule;
|
||||
redis: Redis;
|
||||
envConfig: TEnvConfig;
|
||||
};
|
||||
|
||||
// Run the server!
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis }: TMain) => {
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis, envConfig }: TMain) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const server = fastify({
|
||||
@@ -127,7 +128,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
})
|
||||
});
|
||||
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule, envConfig });
|
||||
|
||||
await server.register(registerServeUI, {
|
||||
standaloneMode: appCfg.STANDALONE_MODE || IS_PACKAGED,
|
||||
|
@@ -85,7 +85,7 @@ import { sshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certi
|
||||
import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal";
|
||||
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getConfig, TEnvConfig } from "@app/lib/config/env";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
|
||||
@@ -244,7 +244,8 @@ export const registerRoutes = async (
|
||||
hsmModule,
|
||||
smtp: smtpService,
|
||||
queue: queueService,
|
||||
keyStore
|
||||
keyStore,
|
||||
envConfig
|
||||
}: {
|
||||
auditLogDb?: Knex;
|
||||
db: Knex;
|
||||
@@ -252,6 +253,7 @@ export const registerRoutes = async (
|
||||
smtp: TSmtpService;
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
envConfig: TEnvConfig;
|
||||
}
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
@@ -391,7 +393,8 @@ export const registerRoutes = async (
|
||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule
|
||||
hsmModule,
|
||||
envConfig
|
||||
});
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
@@ -401,7 +404,8 @@ export const registerRoutes = async (
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
const externalKmsService = externalKmsServiceFactory({
|
||||
@@ -447,7 +451,6 @@ export const registerRoutes = async (
|
||||
const samlService = samlConfigServiceFactory({
|
||||
identityMetadataDAL,
|
||||
permissionService,
|
||||
orgBotDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
@@ -455,7 +458,8 @@ export const registerRoutes = async (
|
||||
samlConfigDAL,
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
smtpService,
|
||||
kmsService
|
||||
});
|
||||
const groupService = groupServiceFactory({
|
||||
userDAL,
|
||||
@@ -506,7 +510,6 @@ export const registerRoutes = async (
|
||||
ldapGroupMapDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
orgBotDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
@@ -518,7 +521,8 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
smtpService,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const telemetryService = telemetryServiceFactory({
|
||||
@@ -969,7 +973,8 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
webhookDAL,
|
||||
projectEnvDAL,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const secretTagService = secretTagServiceFactory({ secretTagDAL, permissionService });
|
||||
@@ -1149,7 +1154,8 @@ export const registerRoutes = async (
|
||||
secretDAL,
|
||||
folderDAL,
|
||||
projectBotService,
|
||||
secretV2BridgeDAL
|
||||
secretV2BridgeDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const integrationService = integrationServiceFactory({
|
||||
@@ -1238,9 +1244,9 @@ export const registerRoutes = async (
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
orgBotDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
kmsService
|
||||
});
|
||||
const identityGcpAuthService = identityGcpAuthServiceFactory({
|
||||
identityGcpAuthDAL,
|
||||
@@ -1272,7 +1278,7 @@ export const registerRoutes = async (
|
||||
identityAccessTokenDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
orgBotDAL
|
||||
kmsService
|
||||
});
|
||||
|
||||
const identityJwtAuthService = identityJwtAuthServiceFactory({
|
||||
@@ -1289,7 +1295,9 @@ export const registerRoutes = async (
|
||||
queueService,
|
||||
dynamicSecretLeaseDAL,
|
||||
dynamicSecretProviders,
|
||||
dynamicSecretDAL
|
||||
dynamicSecretDAL,
|
||||
folderDAL,
|
||||
kmsService
|
||||
});
|
||||
const dynamicSecretService = dynamicSecretServiceFactory({
|
||||
projectDAL,
|
||||
@@ -1299,7 +1307,8 @@ export const registerRoutes = async (
|
||||
dynamicSecretProviders,
|
||||
folderDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
kmsService
|
||||
});
|
||||
const dynamicSecretLeaseService = dynamicSecretLeaseServiceFactory({
|
||||
projectDAL,
|
||||
@@ -1309,7 +1318,8 @@ export const registerRoutes = async (
|
||||
dynamicSecretLeaseDAL,
|
||||
dynamicSecretProviders,
|
||||
folderDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
kmsService
|
||||
});
|
||||
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
|
||||
auditLogDAL,
|
||||
@@ -1337,7 +1347,7 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService,
|
||||
orgBotDAL,
|
||||
kmsService,
|
||||
permissionService,
|
||||
oidcConfigDAL,
|
||||
projectBotDAL,
|
||||
|
@@ -0,0 +1,42 @@
|
||||
import { LdapConfigsSchema, OidcConfigsSchema, SamlConfigsSchema } from "@app/db/schemas";
|
||||
|
||||
export const SanitizedSamlConfigSchema = SamlConfigsSchema.pick({
|
||||
id: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
lastUsed: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
authProvider: true
|
||||
});
|
||||
|
||||
export const SanitizedLdapConfigSchema = LdapConfigsSchema.pick({
|
||||
updatedAt: true,
|
||||
createdAt: true,
|
||||
isActive: true,
|
||||
orgId: true,
|
||||
id: true,
|
||||
url: true,
|
||||
searchBase: true,
|
||||
searchFilter: true,
|
||||
groupSearchBase: true,
|
||||
uniqueUserAttribute: true,
|
||||
groupSearchFilter: true
|
||||
});
|
||||
|
||||
export const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
lastUsed: true,
|
||||
issuer: true,
|
||||
jwksUri: true,
|
||||
discoveryURL: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
configurationType: true,
|
||||
allowedEmailDomains: true,
|
||||
authorizationEndpoint: true
|
||||
});
|
@@ -11,7 +11,7 @@ import {
|
||||
} from "@app/db/schemas";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
|
||||
import { UnpackedPermissionSchema } from "./santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "./sanitizedSchema/permission";
|
||||
|
||||
// sometimes the return data must be santizied to avoid leaking important values
|
||||
// always prefer pick over omit in zod
|
||||
@@ -201,10 +201,11 @@ export const SanitizedRoleSchemaV1 = ProjectRolesSchema.extend({
|
||||
});
|
||||
|
||||
export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
|
||||
encryptedInput: true,
|
||||
keyEncoding: true,
|
||||
inputCiphertext: true,
|
||||
inputIV: true,
|
||||
inputTag: true,
|
||||
inputCiphertext: true,
|
||||
keyEncoding: true,
|
||||
algorithm: true
|
||||
});
|
||||
|
||||
|
@@ -12,6 +12,10 @@ import {
|
||||
AzureKeyVaultConnectionListItemSchema,
|
||||
SanitizedAzureKeyVaultConnectionSchema
|
||||
} from "@app/services/app-connection/azure-key-vault";
|
||||
import {
|
||||
DatabricksConnectionListItemSchema,
|
||||
SanitizedDatabricksConnectionSchema
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import { GcpConnectionListItemSchema, SanitizedGcpConnectionSchema } from "@app/services/app-connection/gcp";
|
||||
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -22,7 +26,8 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedGitHubConnectionSchema.options,
|
||||
...SanitizedGcpConnectionSchema.options,
|
||||
...SanitizedAzureKeyVaultConnectionSchema.options,
|
||||
...SanitizedAzureAppConfigurationConnectionSchema.options
|
||||
...SanitizedAzureAppConfigurationConnectionSchema.options,
|
||||
...SanitizedDatabricksConnectionSchema.options
|
||||
]);
|
||||
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
@@ -30,7 +35,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
GitHubConnectionListItemSchema,
|
||||
GcpConnectionListItemSchema,
|
||||
AzureKeyVaultConnectionListItemSchema,
|
||||
AzureAppConfigurationConnectionListItemSchema
|
||||
AzureAppConfigurationConnectionListItemSchema,
|
||||
DatabricksConnectionListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -0,0 +1,54 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateDatabricksConnectionSchema,
|
||||
SanitizedDatabricksConnectionSchema,
|
||||
UpdateDatabricksConnectionSchema
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerDatabricksConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.Databricks,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedDatabricksConnectionSchema,
|
||||
createSchema: CreateDatabricksConnectionSchema,
|
||||
updateSchema: UpdateDatabricksConnectionSchema
|
||||
});
|
||||
|
||||
// The below endpoints are not exposed and for Infisical App use
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/secret-scopes`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretScopes: z.object({ name: z.string() }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const secretScopes = await server.services.appConnection.databricks.listSecretScopes(
|
||||
connectionId,
|
||||
req.permission
|
||||
);
|
||||
|
||||
return { secretScopes };
|
||||
}
|
||||
});
|
||||
};
|
@@ -41,7 +41,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
@@ -67,7 +67,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
@@ -97,7 +97,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
const { repo, owner } = req.query;
|
||||
|
@@ -3,6 +3,7 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
||||
import { registerAwsConnectionRouter } from "./aws-connection-router";
|
||||
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
|
||||
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
|
||||
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
|
||||
import { registerGcpConnectionRouter } from "./gcp-connection-router";
|
||||
import { registerGitHubConnectionRouter } from "./github-connection-router";
|
||||
|
||||
@@ -14,5 +15,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.GitHub]: registerGitHubConnectionRouter,
|
||||
[AppConnection.GCP]: registerGcpConnectionRouter,
|
||||
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
|
||||
[AppConnection.Databricks]: registerDatabricksConnectionRouter
|
||||
};
|
||||
|
@@ -8,13 +8,19 @@ import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
|
||||
const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.omit({
|
||||
encryptedCaCert: true,
|
||||
caCertIV: true,
|
||||
caCertTag: true,
|
||||
encryptedTokenReviewerJwt: true,
|
||||
tokenReviewerJwtIV: true,
|
||||
tokenReviewerJwtTag: true
|
||||
const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick({
|
||||
id: true,
|
||||
accessTokenTTL: true,
|
||||
accessTokenMaxTTL: true,
|
||||
accessTokenNumUsesLimit: true,
|
||||
accessTokenTrustedIps: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
identityId: true,
|
||||
kubernetesHost: true,
|
||||
allowedNamespaces: true,
|
||||
allowedNames: true,
|
||||
allowedAudience: true
|
||||
}).extend({
|
||||
caCert: z.string(),
|
||||
tokenReviewerJwt: z.string()
|
||||
|
@@ -12,10 +12,20 @@ import {
|
||||
validateOidcBoundClaimsField
|
||||
} from "@app/services/identity-oidc-auth/identity-oidc-auth-validators";
|
||||
|
||||
const IdentityOidcAuthResponseSchema = IdentityOidcAuthsSchema.omit({
|
||||
encryptedCaCert: true,
|
||||
caCertIV: true,
|
||||
caCertTag: true
|
||||
const IdentityOidcAuthResponseSchema = IdentityOidcAuthsSchema.pick({
|
||||
id: true,
|
||||
accessTokenTTL: true,
|
||||
accessTokenMaxTTL: true,
|
||||
accessTokenNumUsesLimit: true,
|
||||
accessTokenTrustedIps: true,
|
||||
identityId: true,
|
||||
oidcDiscoveryUrl: true,
|
||||
boundIssuer: true,
|
||||
boundAudiences: true,
|
||||
boundClaims: true,
|
||||
boundSubject: true,
|
||||
createdAt: true,
|
||||
updatedAt: true
|
||||
}).extend({
|
||||
caCert: z.string()
|
||||
});
|
||||
|
@@ -0,0 +1,17 @@
|
||||
import {
|
||||
CreateDatabricksSyncSchema,
|
||||
DatabricksSyncSchema,
|
||||
UpdateDatabricksSyncSchema
|
||||
} from "@app/services/secret-sync/databricks";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
|
||||
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
|
||||
|
||||
export const registerDatabricksSyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncSecretsEndpoints({
|
||||
destination: SecretSync.Databricks,
|
||||
server,
|
||||
responseSchema: DatabricksSyncSchema,
|
||||
createSchema: CreateDatabricksSyncSchema,
|
||||
updateSchema: UpdateDatabricksSyncSchema
|
||||
});
|
@@ -4,6 +4,7 @@ import { registerAwsParameterStoreSyncRouter } from "./aws-parameter-store-sync-
|
||||
import { registerAwsSecretsManagerSyncRouter } from "./aws-secrets-manager-sync-router";
|
||||
import { registerAzureAppConfigurationSyncRouter } from "./azure-app-configuration-sync-router";
|
||||
import { registerAzureKeyVaultSyncRouter } from "./azure-key-vault-sync-router";
|
||||
import { registerDatabricksSyncRouter } from "./databricks-sync-router";
|
||||
import { registerGcpSyncRouter } from "./gcp-sync-router";
|
||||
import { registerGitHubSyncRouter } from "./github-sync-router";
|
||||
|
||||
@@ -15,5 +16,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
|
||||
[SecretSync.GitHub]: registerGitHubSyncRouter,
|
||||
[SecretSync.GCPSecretManager]: registerGcpSyncRouter,
|
||||
[SecretSync.AzureKeyVault]: registerAzureKeyVaultSyncRouter,
|
||||
[SecretSync.AzureAppConfiguration]: registerAzureAppConfigurationSyncRouter
|
||||
[SecretSync.AzureAppConfiguration]: registerAzureAppConfigurationSyncRouter,
|
||||
[SecretSync.Databricks]: registerDatabricksSyncRouter
|
||||
};
|
||||
|
@@ -18,6 +18,7 @@ import {
|
||||
AzureAppConfigurationSyncSchema
|
||||
} from "@app/services/secret-sync/azure-app-configuration";
|
||||
import { AzureKeyVaultSyncListItemSchema, AzureKeyVaultSyncSchema } from "@app/services/secret-sync/azure-key-vault";
|
||||
import { DatabricksSyncListItemSchema, DatabricksSyncSchema } from "@app/services/secret-sync/databricks";
|
||||
import { GcpSyncListItemSchema, GcpSyncSchema } from "@app/services/secret-sync/gcp";
|
||||
import { GitHubSyncListItemSchema, GitHubSyncSchema } from "@app/services/secret-sync/github";
|
||||
|
||||
@@ -27,7 +28,8 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
|
||||
GitHubSyncSchema,
|
||||
GcpSyncSchema,
|
||||
AzureKeyVaultSyncSchema,
|
||||
AzureAppConfigurationSyncSchema
|
||||
AzureAppConfigurationSyncSchema,
|
||||
DatabricksSyncSchema
|
||||
]);
|
||||
|
||||
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
@@ -36,7 +38,8 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
GitHubSyncListItemSchema,
|
||||
GcpSyncListItemSchema,
|
||||
AzureKeyVaultSyncListItemSchema,
|
||||
AzureAppConfigurationSyncListItemSchema
|
||||
AzureAppConfigurationSyncListItemSchema,
|
||||
DatabricksSyncListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -20,6 +20,7 @@ import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ProjectFilterType } from "@app/services/project/project-types";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
import { SecretOperations, SecretProtectionType } from "@app/services/secret/secret-types";
|
||||
import { SecretUpdateMode } from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import { secretRawSchema } from "../sanitizedSchemas";
|
||||
@@ -36,11 +37,12 @@ const SecretReferenceNodeTree: z.ZodType<TSecretReferenceNode> = SecretReference
|
||||
children: z.lazy(() => SecretReferenceNodeTree.array())
|
||||
});
|
||||
|
||||
const SecretNameSchema = z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((el) => !el.includes(" "), "Secret name cannot contain spaces.");
|
||||
const BaseSecretNameSchema = z.string().trim().min(1);
|
||||
|
||||
const SecretNameSchema = BaseSecretNameSchema.refine(
|
||||
(el) => !el.includes(" "),
|
||||
"Secret name cannot contain spaces."
|
||||
).refine((el) => !el.includes(":"), "Secret name cannot contain colon.");
|
||||
|
||||
export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@@ -618,7 +620,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
secretName: SecretNameSchema.describe(RAW_SECRETS.UPDATE.secretName)
|
||||
secretName: BaseSecretNameSchema.describe(RAW_SECRETS.UPDATE.secretName)
|
||||
}),
|
||||
body: z.object({
|
||||
workspaceId: z.string().trim().describe(RAW_SECRETS.UPDATE.workspaceId),
|
||||
@@ -2029,6 +2031,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.UPDATE.secretPath),
|
||||
mode: z
|
||||
.nativeEnum(SecretUpdateMode)
|
||||
.optional()
|
||||
.default(SecretUpdateMode.FailOnNotFound)
|
||||
.describe(RAW_SECRETS.UPDATE.mode),
|
||||
secrets: z
|
||||
.object({
|
||||
secretKey: SecretNameSchema.describe(RAW_SECRETS.UPDATE.secretName),
|
||||
@@ -2036,6 +2043,12 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.string()
|
||||
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
|
||||
.describe(RAW_SECRETS.UPDATE.secretValue),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.transform(removeTrailingSlash)
|
||||
.optional()
|
||||
.describe(RAW_SECRETS.UPDATE.secretPath),
|
||||
secretComment: z.string().trim().optional().describe(RAW_SECRETS.UPDATE.secretComment),
|
||||
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding),
|
||||
newSecretName: SecretNameSchema.optional().describe(RAW_SECRETS.UPDATE.newSecretName),
|
||||
@@ -2072,7 +2085,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
environment,
|
||||
projectSlug,
|
||||
projectId: req.body.workspaceId,
|
||||
secrets: inputSecrets
|
||||
secrets: inputSecrets,
|
||||
mode: req.body.mode
|
||||
});
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
return { approval: secretOperation.approval };
|
||||
@@ -2091,15 +2105,39 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: secrets.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version,
|
||||
secretMetadata: secretMetadataMap.get(secret.secretKey)
|
||||
}))
|
||||
secrets: secrets
|
||||
.filter((el) => el.version > 1)
|
||||
.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretPath: secret.secretPath,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version,
|
||||
secretMetadata: secretMetadataMap.get(secret.secretKey)
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
const createdSecrets = secrets.filter((el) => el.version === 1);
|
||||
if (createdSecrets.length) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: secrets[0].workspace,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.CREATE_SECRETS,
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: createdSecrets.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretPath: secret.secretPath,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version,
|
||||
secretMetadata: secretMetadataMap.get(secret.secretKey)
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretUpdated,
|
||||
|
@@ -1,6 +1,7 @@
|
||||
export enum AppConnection {
|
||||
GitHub = "github",
|
||||
AWS = "aws",
|
||||
Databricks = "databricks",
|
||||
GCP = "gcp",
|
||||
AzureKeyVault = "azure-key-vault",
|
||||
AzureAppConfiguration = "azure-app-configuration"
|
||||
|
@@ -5,12 +5,17 @@ import { TAppConnectionServiceFactoryDep } from "@app/services/app-connection/ap
|
||||
import { TAppConnection, TAppConnectionConfig } from "@app/services/app-connection/app-connection-types";
|
||||
import {
|
||||
AwsConnectionMethod,
|
||||
getAwsAppConnectionListItem,
|
||||
getAwsConnectionListItem,
|
||||
validateAwsConnectionCredentials
|
||||
} from "@app/services/app-connection/aws";
|
||||
import {
|
||||
DatabricksConnectionMethod,
|
||||
getDatabricksConnectionListItem,
|
||||
validateDatabricksConnectionCredentials
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import {
|
||||
GcpConnectionMethod,
|
||||
getGcpAppConnectionListItem,
|
||||
getGcpConnectionListItem,
|
||||
validateGcpConnectionCredentials
|
||||
} from "@app/services/app-connection/gcp";
|
||||
import {
|
||||
@@ -33,11 +38,12 @@ import {
|
||||
|
||||
export const listAppConnectionOptions = () => {
|
||||
return [
|
||||
getAwsAppConnectionListItem(),
|
||||
getAwsConnectionListItem(),
|
||||
getGitHubConnectionListItem(),
|
||||
getGcpAppConnectionListItem(),
|
||||
getGcpConnectionListItem(),
|
||||
getAzureKeyVaultConnectionListItem(),
|
||||
getAzureAppConfigurationConnectionListItem()
|
||||
getAzureAppConfigurationConnectionListItem(),
|
||||
getDatabricksConnectionListItem()
|
||||
].sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
|
||||
@@ -90,6 +96,8 @@ export const validateAppConnectionCredentials = async (
|
||||
switch (app) {
|
||||
case AppConnection.AWS:
|
||||
return validateAwsConnectionCredentials(appConnection);
|
||||
case AppConnection.Databricks:
|
||||
return validateDatabricksConnectionCredentials(appConnection);
|
||||
case AppConnection.GitHub:
|
||||
return validateGitHubConnectionCredentials(appConnection);
|
||||
case AppConnection.GCP:
|
||||
@@ -118,6 +126,8 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
return "Assume Role";
|
||||
case GcpConnectionMethod.ServiceAccountImpersonation:
|
||||
return "Service Account Impersonation";
|
||||
case DatabricksConnectionMethod.ServicePrincipal:
|
||||
return "Service Principal";
|
||||
default:
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
throw new Error(`Unhandled App Connection Method: ${method}`);
|
||||
|
@@ -5,5 +5,6 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.GitHub]: "GitHub",
|
||||
[AppConnection.GCP]: "GCP",
|
||||
[AppConnection.AzureKeyVault]: "Azure Key Vault",
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration"
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
|
||||
[AppConnection.Databricks]: "Databricks"
|
||||
};
|
||||
|
@@ -23,6 +23,8 @@ import {
|
||||
TValidateAppConnectionCredentials
|
||||
} from "@app/services/app-connection/app-connection-types";
|
||||
import { ValidateAwsConnectionCredentialsSchema } from "@app/services/app-connection/aws";
|
||||
import { ValidateDatabricksConnectionCredentialsSchema } from "@app/services/app-connection/databricks";
|
||||
import { databricksConnectionService } from "@app/services/app-connection/databricks/databricks-connection-service";
|
||||
import { ValidateGitHubConnectionCredentialsSchema } from "@app/services/app-connection/github";
|
||||
import { githubConnectionService } from "@app/services/app-connection/github/github-connection-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
@@ -46,7 +48,8 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.GitHub]: ValidateGitHubConnectionCredentialsSchema,
|
||||
[AppConnection.GCP]: ValidateGcpConnectionCredentialsSchema,
|
||||
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
|
||||
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema
|
||||
};
|
||||
|
||||
export const appConnectionServiceFactory = ({
|
||||
@@ -365,6 +368,7 @@ export const appConnectionServiceFactory = ({
|
||||
connectAppConnectionById,
|
||||
listAvailableAppConnectionsForUser,
|
||||
github: githubConnectionService(connectAppConnectionById),
|
||||
gcp: gcpConnectionService(connectAppConnectionById)
|
||||
gcp: gcpConnectionService(connectAppConnectionById),
|
||||
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService)
|
||||
};
|
||||
};
|
||||
|
@@ -4,6 +4,12 @@ import {
|
||||
TAwsConnectionInput,
|
||||
TValidateAwsConnectionCredentials
|
||||
} from "@app/services/app-connection/aws";
|
||||
import {
|
||||
TDatabricksConnection,
|
||||
TDatabricksConnectionConfig,
|
||||
TDatabricksConnectionInput,
|
||||
TValidateDatabricksConnectionCredentials
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import {
|
||||
TGitHubConnection,
|
||||
TGitHubConnectionConfig,
|
||||
@@ -31,6 +37,7 @@ export type TAppConnection = { id: string } & (
|
||||
| TGcpConnection
|
||||
| TAzureKeyVaultConnection
|
||||
| TAzureAppConfigurationConnection
|
||||
| TDatabricksConnection
|
||||
);
|
||||
|
||||
export type TAppConnectionInput = { id: string } & (
|
||||
@@ -39,6 +46,7 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TGcpConnectionInput
|
||||
| TAzureKeyVaultConnectionInput
|
||||
| TAzureAppConfigurationConnectionInput
|
||||
| TDatabricksConnectionInput
|
||||
);
|
||||
|
||||
export type TCreateAppConnectionDTO = Pick<
|
||||
@@ -55,11 +63,13 @@ export type TAppConnectionConfig =
|
||||
| TGitHubConnectionConfig
|
||||
| TGcpConnectionConfig
|
||||
| TAzureKeyVaultConnectionConfig
|
||||
| TAzureAppConfigurationConnectionConfig;
|
||||
| TAzureAppConfigurationConnectionConfig
|
||||
| TDatabricksConnectionConfig;
|
||||
|
||||
export type TValidateAppConnectionCredentials =
|
||||
| TValidateAwsConnectionCredentials
|
||||
| TValidateGitHubConnectionCredentials
|
||||
| TValidateGcpConnectionCredentials
|
||||
| TValidateAzureKeyVaultConnectionCredentials
|
||||
| TValidateAzureAppConfigurationConnectionCredentials;
|
||||
| TValidateAzureAppConfigurationConnectionCredentials
|
||||
| TValidateDatabricksConnectionCredentials;
|
||||
|
@@ -9,7 +9,7 @@ import { AppConnection, AWSRegion } from "@app/services/app-connection/app-conne
|
||||
import { AwsConnectionMethod } from "./aws-connection-enums";
|
||||
import { TAwsConnectionConfig } from "./aws-connection-types";
|
||||
|
||||
export const getAwsAppConnectionListItem = () => {
|
||||
export const getAwsConnectionListItem = () => {
|
||||
const { INF_APP_CONNECTION_AWS_ACCESS_KEY_ID } = getConfig();
|
||||
|
||||
return {
|
||||
|
@@ -0,0 +1,3 @@
|
||||
export enum DatabricksConnectionMethod {
|
||||
ServicePrincipal = "service-principal"
|
||||
}
|
@@ -0,0 +1,92 @@
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { encryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { DatabricksConnectionMethod } from "./databricks-connection-enums";
|
||||
import {
|
||||
TAuthorizeDatabricksConnection,
|
||||
TDatabricksConnection,
|
||||
TDatabricksConnectionConfig
|
||||
} from "./databricks-connection-types";
|
||||
|
||||
export const getDatabricksConnectionListItem = () => {
|
||||
return {
|
||||
name: "Databricks" as const,
|
||||
app: AppConnection.Databricks as const,
|
||||
methods: Object.values(DatabricksConnectionMethod) as [DatabricksConnectionMethod.ServicePrincipal]
|
||||
};
|
||||
};
|
||||
|
||||
const authorizeDatabricksConnection = async ({
|
||||
clientId,
|
||||
clientSecret,
|
||||
workspaceUrl
|
||||
}: Pick<TDatabricksConnection["credentials"], "workspaceUrl" | "clientId" | "clientSecret">) => {
|
||||
const { data } = await request.post<TAuthorizeDatabricksConnection>(
|
||||
`${removeTrailingSlash(workspaceUrl)}/oidc/v1/token`,
|
||||
"grant_type=client_credentials&scope=all-apis",
|
||||
{
|
||||
auth: {
|
||||
username: clientId,
|
||||
password: clientSecret
|
||||
},
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return { accessToken: data.access_token, expiresAt: data.expires_in * 1000 + Date.now() };
|
||||
};
|
||||
|
||||
export const getDatabricksConnectionAccessToken = async (
|
||||
{ id, orgId, credentials }: TDatabricksConnection,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const { clientSecret, clientId, workspaceUrl, accessToken, expiresAt } = credentials;
|
||||
|
||||
// get new token if less than 10 minutes from expiry
|
||||
if (Date.now() < expiresAt - 10_000) {
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
const authData = await authorizeDatabricksConnection({ clientId, clientSecret, workspaceUrl });
|
||||
|
||||
const updatedCredentials: TDatabricksConnection["credentials"] = {
|
||||
...credentials,
|
||||
...authData
|
||||
};
|
||||
|
||||
const encryptedCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedCredentials,
|
||||
orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await appConnectionDAL.updateById(id, { encryptedCredentials });
|
||||
|
||||
return authData.accessToken;
|
||||
};
|
||||
|
||||
export const validateDatabricksConnectionCredentials = async (appConnection: TDatabricksConnectionConfig) => {
|
||||
const { credentials } = appConnection;
|
||||
|
||||
try {
|
||||
const { accessToken, expiresAt } = await authorizeDatabricksConnection(appConnection.credentials);
|
||||
|
||||
return {
|
||||
...credentials,
|
||||
accessToken,
|
||||
expiresAt
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection - verify credentials`
|
||||
});
|
||||
}
|
||||
};
|
@@ -0,0 +1,77 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { DatabricksConnectionMethod } from "./databricks-connection-enums";
|
||||
|
||||
export const DatabricksConnectionServicePrincipalInputCredentialsSchema = z.object({
|
||||
clientId: z.string().trim().min(1, "Client ID required"),
|
||||
clientSecret: z.string().trim().min(1, "Client Secret required"),
|
||||
workspaceUrl: z.string().trim().url().min(1, "Workspace URL required")
|
||||
});
|
||||
|
||||
export const DatabricksConnectionServicePrincipalOutputCredentialsSchema = z
|
||||
.object({
|
||||
accessToken: z.string(),
|
||||
expiresAt: z.number()
|
||||
})
|
||||
.merge(DatabricksConnectionServicePrincipalInputCredentialsSchema);
|
||||
|
||||
const BaseDatabricksConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Databricks) });
|
||||
|
||||
export const DatabricksConnectionSchema = z.intersection(
|
||||
BaseDatabricksConnectionSchema,
|
||||
z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z.literal(DatabricksConnectionMethod.ServicePrincipal),
|
||||
credentials: DatabricksConnectionServicePrincipalOutputCredentialsSchema
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
export const SanitizedDatabricksConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseDatabricksConnectionSchema.extend({
|
||||
method: z.literal(DatabricksConnectionMethod.ServicePrincipal),
|
||||
credentials: DatabricksConnectionServicePrincipalOutputCredentialsSchema.pick({
|
||||
clientId: true,
|
||||
workspaceUrl: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateDatabricksConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(DatabricksConnectionMethod.ServicePrincipal)
|
||||
.describe(AppConnections?.CREATE(AppConnection.Databricks).method),
|
||||
credentials: DatabricksConnectionServicePrincipalInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.Databricks).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateDatabricksConnectionSchema = ValidateDatabricksConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Databricks)
|
||||
);
|
||||
|
||||
export const UpdateDatabricksConnectionSchema = z
|
||||
.object({
|
||||
credentials: DatabricksConnectionServicePrincipalInputCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.Databricks).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Databricks));
|
||||
|
||||
export const DatabricksConnectionListItemSchema = z.object({
|
||||
name: z.literal("Databricks"),
|
||||
app: z.literal(AppConnection.Databricks),
|
||||
// the below is preferable but currently breaks with our zod to json schema parser
|
||||
// methods: z.tuple([z.literal(AwsConnectionMethod.ServicePrincipal), z.literal(AwsConnectionMethod.AccessKey)]),
|
||||
methods: z.nativeEnum(DatabricksConnectionMethod).array()
|
||||
});
|
@@ -0,0 +1,60 @@
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { getDatabricksConnectionAccessToken } from "@app/services/app-connection/databricks/databricks-connection-fns";
|
||||
import {
|
||||
TDatabricksConnection,
|
||||
TDatabricksListSecretScopesResponse
|
||||
} from "@app/services/app-connection/databricks/databricks-connection-types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TDatabricksConnection>;
|
||||
|
||||
const listDatabricksSecretScopes = async (
|
||||
appConnection: TDatabricksConnection,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const {
|
||||
credentials: { workspaceUrl }
|
||||
} = appConnection;
|
||||
|
||||
const accessToken = await getDatabricksConnectionAccessToken(appConnection, appConnectionDAL, kmsService);
|
||||
|
||||
const { data } = await request.get<TDatabricksListSecretScopesResponse>(
|
||||
`${removeTrailingSlash(workspaceUrl)}/api/2.0/secrets/scopes/list`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// not present in response if no scopes exists
|
||||
return data.scopes ?? [];
|
||||
};
|
||||
|
||||
export const databricksConnectionService = (
|
||||
getAppConnection: TGetAppConnectionFunc,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const listSecretScopes = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Databricks, connectionId, actor);
|
||||
|
||||
const secretScopes = await listDatabricksSecretScopes(appConnection, appConnectionDAL, kmsService);
|
||||
|
||||
return secretScopes;
|
||||
};
|
||||
|
||||
return {
|
||||
listSecretScopes
|
||||
};
|
||||
};
|
@@ -0,0 +1,36 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import {
|
||||
CreateDatabricksConnectionSchema,
|
||||
DatabricksConnectionSchema,
|
||||
ValidateDatabricksConnectionCredentialsSchema
|
||||
} from "./databricks-connection-schemas";
|
||||
|
||||
export type TDatabricksConnection = z.infer<typeof DatabricksConnectionSchema>;
|
||||
|
||||
export type TDatabricksConnectionInput = z.infer<typeof CreateDatabricksConnectionSchema> & {
|
||||
app: AppConnection.Databricks;
|
||||
};
|
||||
|
||||
export type TValidateDatabricksConnectionCredentials = typeof ValidateDatabricksConnectionCredentialsSchema;
|
||||
|
||||
export type TDatabricksConnectionConfig = DiscriminativePick<
|
||||
TDatabricksConnection,
|
||||
"method" | "app" | "credentials"
|
||||
> & {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TAuthorizeDatabricksConnection = {
|
||||
access_token: string;
|
||||
scope: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
};
|
||||
|
||||
export type TDatabricksListSecretScopesResponse = {
|
||||
scopes?: { name: string; backend_type: string; keyvault_metadata: { resource_id: string; dns_name: string } }[];
|
||||
};
|
4
backend/src/services/app-connection/databricks/index.ts
Normal file
4
backend/src/services/app-connection/databricks/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./databricks-connection-enums";
|
||||
export * from "./databricks-connection-fns";
|
||||
export * from "./databricks-connection-schemas";
|
||||
export * from "./databricks-connection-types";
|
@@ -17,7 +17,7 @@ import {
|
||||
TGcpConnectionConfig
|
||||
} from "./gcp-connection-types";
|
||||
|
||||
export const getGcpAppConnectionListItem = () => {
|
||||
export const getGcpConnectionListItem = () => {
|
||||
return {
|
||||
name: "GCP" as const,
|
||||
app: AppConnection.GCP as const,
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
const twelveDigitRegex = /^\d{12}$/;
|
||||
const arnRegex = /^arn:aws:iam::\d{12}:(user\/[\w+=,.@/-]+|role\/[\w+=,.@/-]+|\*)$/;
|
||||
const arnRegex = /^arn:aws:iam::\d{12}:(user\/[a-zA-Z0-9_.@+*/-]+|role\/[a-zA-Z0-9_.@+*/-]+|\*)$/;
|
||||
|
||||
export const validateAccountIds = z
|
||||
.string()
|
||||
|
@@ -3,28 +3,21 @@ import axios, { AxiosError } from "axios";
|
||||
import https from "https";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { IdentityAuthMethod, SecretKeyEncoding, TIdentityKubernetesAuthsUpdate } from "@app/db/schemas";
|
||||
import { IdentityAuthMethod, TIdentityKubernetesAuthsUpdate } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { KmsDataKey } from "../kms/kms-types";
|
||||
import { TIdentityKubernetesAuthDALFactory } from "./identity-kubernetes-auth-dal";
|
||||
import { extractK8sUsername } from "./identity-kubernetes-auth-fns";
|
||||
import {
|
||||
@@ -43,9 +36,9 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TIdentityKubernetesAuthServiceFactory = ReturnType<typeof identityKubernetesAuthServiceFactory>;
|
||||
@@ -54,9 +47,9 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
orgBotDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
kmsService
|
||||
}: TIdentityKubernetesAuthServiceFactoryDep) => {
|
||||
const login = async ({ identityId, jwt: serviceAccountJwt }: TLoginKubernetesAuthDTO) => {
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId });
|
||||
@@ -75,42 +68,21 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found for organization with ID ${identityMembershipOrg.orgId}`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
});
|
||||
|
||||
const { encryptedCaCert, caCertIV, caCertTag, encryptedTokenReviewerJwt, tokenReviewerJwtIV, tokenReviewerJwtTag } =
|
||||
identityKubernetesAuth;
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCaCert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCaCert,
|
||||
iv: caCertIV,
|
||||
tag: caCertTag,
|
||||
key
|
||||
});
|
||||
if (identityKubernetesAuth.encryptedKubernetesCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: identityKubernetesAuth.encryptedKubernetesCaCertificate }).toString();
|
||||
}
|
||||
|
||||
let tokenReviewerJwt = "";
|
||||
if (encryptedTokenReviewerJwt && tokenReviewerJwtIV && tokenReviewerJwtTag) {
|
||||
tokenReviewerJwt = decryptSymmetric({
|
||||
ciphertext: encryptedTokenReviewerJwt,
|
||||
iv: tokenReviewerJwtIV,
|
||||
tag: tokenReviewerJwtTag,
|
||||
key
|
||||
});
|
||||
if (identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt) {
|
||||
tokenReviewerJwt = decryptor({
|
||||
cipherTextBlob: identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt
|
||||
}).toString();
|
||||
}
|
||||
|
||||
const { data } = await axios
|
||||
@@ -297,79 +269,25 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
return extractIPDetails(accessTokenTrustedIp.ipAddress);
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: identityMembershipOrg.orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedCaCert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
const {
|
||||
ciphertext: encryptedTokenReviewerJwt,
|
||||
iv: tokenReviewerJwtIV,
|
||||
tag: tokenReviewerJwtTag
|
||||
} = encryptSymmetric(tokenReviewerJwt, key);
|
||||
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const doc = await identityKubernetesAuthDAL.create(
|
||||
{
|
||||
identityId: identityMembershipOrg.identityId,
|
||||
kubernetesHost,
|
||||
encryptedCaCert,
|
||||
caCertIV,
|
||||
caCertTag,
|
||||
encryptedTokenReviewerJwt,
|
||||
tokenReviewerJwtIV,
|
||||
tokenReviewerJwtTag,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps)
|
||||
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps),
|
||||
encryptedKubernetesTokenReviewerJwt: encryptor({ plainText: Buffer.from(tokenReviewerJwt) }).cipherTextBlob,
|
||||
encryptedKubernetesCaCertificate: encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -455,61 +373,34 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
: undefined
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found for organization with ID ${identityMembershipOrg.orgId}`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { encryptor, decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
});
|
||||
|
||||
if (caCert !== undefined) {
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
updateQuery.encryptedCaCert = encryptedCACert;
|
||||
updateQuery.caCertIV = caCertIV;
|
||||
updateQuery.caCertTag = caCertTag;
|
||||
updateQuery.encryptedKubernetesCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (tokenReviewerJwt !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedTokenReviewerJwt,
|
||||
iv: tokenReviewerJwtIV,
|
||||
tag: tokenReviewerJwtTag
|
||||
} = encryptSymmetric(tokenReviewerJwt, key);
|
||||
updateQuery.encryptedTokenReviewerJwt = encryptedTokenReviewerJwt;
|
||||
updateQuery.tokenReviewerJwtIV = tokenReviewerJwtIV;
|
||||
updateQuery.tokenReviewerJwtTag = tokenReviewerJwtTag;
|
||||
updateQuery.encryptedKubernetesTokenReviewerJwt = encryptor({
|
||||
plainText: Buffer.from(tokenReviewerJwt)
|
||||
}).cipherTextBlob;
|
||||
}
|
||||
|
||||
const updatedKubernetesAuth = await identityKubernetesAuthDAL.updateById(identityKubernetesAuth.id, updateQuery);
|
||||
|
||||
const updatedCACert =
|
||||
updatedKubernetesAuth.encryptedCaCert && updatedKubernetesAuth.caCertIV && updatedKubernetesAuth.caCertTag
|
||||
? decryptSymmetric({
|
||||
ciphertext: updatedKubernetesAuth.encryptedCaCert,
|
||||
iv: updatedKubernetesAuth.caCertIV,
|
||||
tag: updatedKubernetesAuth.caCertTag,
|
||||
key
|
||||
})
|
||||
: "";
|
||||
const updatedCACert = updatedKubernetesAuth.encryptedKubernetesCaCertificate
|
||||
? decryptor({
|
||||
cipherTextBlob: updatedKubernetesAuth.encryptedKubernetesCaCertificate
|
||||
}).toString()
|
||||
: "";
|
||||
|
||||
const updatedTokenReviewerJwt =
|
||||
updatedKubernetesAuth.encryptedTokenReviewerJwt &&
|
||||
updatedKubernetesAuth.tokenReviewerJwtIV &&
|
||||
updatedKubernetesAuth.tokenReviewerJwtTag
|
||||
? decryptSymmetric({
|
||||
ciphertext: updatedKubernetesAuth.encryptedTokenReviewerJwt,
|
||||
iv: updatedKubernetesAuth.tokenReviewerJwtIV,
|
||||
tag: updatedKubernetesAuth.tokenReviewerJwtTag,
|
||||
key
|
||||
})
|
||||
: "";
|
||||
const updatedTokenReviewerJwt = updatedKubernetesAuth.encryptedKubernetesTokenReviewerJwt
|
||||
? decryptor({
|
||||
cipherTextBlob: updatedKubernetesAuth.encryptedKubernetesTokenReviewerJwt
|
||||
}).toString()
|
||||
: "";
|
||||
|
||||
return {
|
||||
...updatedKubernetesAuth,
|
||||
@@ -545,41 +436,21 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found for organization with ID ${identityMembershipOrg.orgId}`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
});
|
||||
|
||||
const { encryptedCaCert, caCertIV, caCertTag, encryptedTokenReviewerJwt, tokenReviewerJwtIV, tokenReviewerJwtTag } =
|
||||
identityKubernetesAuth;
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCaCert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCaCert,
|
||||
iv: caCertIV,
|
||||
tag: caCertTag,
|
||||
key
|
||||
});
|
||||
if (identityKubernetesAuth.encryptedKubernetesCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: identityKubernetesAuth.encryptedKubernetesCaCertificate }).toString();
|
||||
}
|
||||
|
||||
let tokenReviewerJwt = "";
|
||||
if (encryptedTokenReviewerJwt && tokenReviewerJwtIV && tokenReviewerJwtTag) {
|
||||
tokenReviewerJwt = decryptSymmetric({
|
||||
ciphertext: encryptedTokenReviewerJwt,
|
||||
iv: tokenReviewerJwtIV,
|
||||
tag: tokenReviewerJwtTag,
|
||||
key
|
||||
});
|
||||
if (identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt) {
|
||||
tokenReviewerJwt = decryptor({
|
||||
cipherTextBlob: identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt
|
||||
}).toString();
|
||||
}
|
||||
|
||||
return { ...identityKubernetesAuth, caCert, tokenReviewerJwt, orgId: identityMembershipOrg.orgId };
|
||||
|
@@ -4,20 +4,12 @@ import https from "https";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { JwksClient } from "jwks-rsa";
|
||||
|
||||
import { IdentityAuthMethod, SecretKeyEncoding, TIdentityOidcAuthsUpdate } from "@app/db/schemas";
|
||||
import { IdentityAuthMethod, TIdentityOidcAuthsUpdate } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { generateAsymmetricKeyPair } from "@app/lib/crypto";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
@@ -25,7 +17,8 @@ import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||
import { TOrgBotDALFactory } from "../org/org-bot-dal";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { KmsDataKey } from "../kms/kms-types";
|
||||
import { TIdentityOidcAuthDALFactory } from "./identity-oidc-auth-dal";
|
||||
import { doesAudValueMatchOidcPolicy, doesFieldValueMatchOidcPolicy } from "./identity-oidc-auth-fns";
|
||||
import {
|
||||
@@ -42,7 +35,7 @@ type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TIdentityOidcAuthServiceFactory = ReturnType<typeof identityOidcAuthServiceFactory>;
|
||||
@@ -53,7 +46,7 @@ export const identityOidcAuthServiceFactory = ({
|
||||
permissionService,
|
||||
licenseService,
|
||||
identityAccessTokenDAL,
|
||||
orgBotDAL
|
||||
kmsService
|
||||
}: TIdentityOidcAuthServiceFactoryDep) => {
|
||||
const login = async ({ identityId, jwt: oidcJwt }: TLoginOidcAuthDTO) => {
|
||||
const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId });
|
||||
@@ -70,31 +63,14 @@ export const identityOidcAuthServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found for organization with ID '${identityMembershipOrg.orgId}'`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
});
|
||||
|
||||
const { encryptedCaCert, caCertIV, caCertTag } = identityOidcAuth;
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCaCert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCaCert,
|
||||
iv: caCertIV,
|
||||
tag: caCertTag,
|
||||
key
|
||||
});
|
||||
if (identityOidcAuth.encryptedCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: identityOidcAuth.encryptedCaCertificate }).toString();
|
||||
}
|
||||
|
||||
const requestAgent = new https.Agent({ ca: caCert, rejectUnauthorized: !!caCert });
|
||||
@@ -264,64 +240,17 @@ export const identityOidcAuthServiceFactory = ({
|
||||
return extractIPDetails(accessTokenTrustedIp.ipAddress);
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: identityMembershipOrg.orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedCaCert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
|
||||
const identityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const doc = await identityOidcAuthDAL.create(
|
||||
{
|
||||
identityId: identityMembershipOrg.identityId,
|
||||
oidcDiscoveryUrl,
|
||||
encryptedCaCert,
|
||||
caCertIV,
|
||||
caCertTag,
|
||||
encryptedCaCertificate: encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob,
|
||||
boundIssuer,
|
||||
boundAudiences,
|
||||
boundClaims,
|
||||
@@ -415,38 +344,19 @@ export const identityOidcAuthServiceFactory = ({
|
||||
: undefined
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found for organization with ID '${identityMembershipOrg.orgId}'`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { encryptor, decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
});
|
||||
|
||||
if (caCert !== undefined) {
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
updateQuery.encryptedCaCert = encryptedCACert;
|
||||
updateQuery.caCertIV = caCertIV;
|
||||
updateQuery.caCertTag = caCertTag;
|
||||
updateQuery.encryptedCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const updatedOidcAuth = await identityOidcAuthDAL.updateById(identityOidcAuth.id, updateQuery);
|
||||
const updatedCACert =
|
||||
updatedOidcAuth.encryptedCaCert && updatedOidcAuth.caCertIV && updatedOidcAuth.caCertTag
|
||||
? decryptSymmetric({
|
||||
ciphertext: updatedOidcAuth.encryptedCaCert,
|
||||
iv: updatedOidcAuth.caCertIV,
|
||||
tag: updatedOidcAuth.caCertTag,
|
||||
key
|
||||
})
|
||||
: "";
|
||||
const updatedCACert = updatedOidcAuth.encryptedCaCertificate
|
||||
? decryptor({ cipherTextBlob: updatedOidcAuth.encryptedCaCertificate }).toString()
|
||||
: "";
|
||||
|
||||
return {
|
||||
...updatedOidcAuth,
|
||||
@@ -476,27 +386,14 @@ export const identityOidcAuthServiceFactory = ({
|
||||
|
||||
const identityOidcAuth = await identityOidcAuthDAL.findOne({ identityId });
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: identityMembershipOrg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found for organization with ID ${identityMembershipOrg.orgId}`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
});
|
||||
|
||||
const caCert = decryptSymmetric({
|
||||
ciphertext: identityOidcAuth.encryptedCaCert,
|
||||
iv: identityOidcAuth.caCertIV,
|
||||
tag: identityOidcAuth.caCertTag,
|
||||
key
|
||||
});
|
||||
const caCert = identityOidcAuth.encryptedCaCertificate
|
||||
? decryptor({ cipherTextBlob: identityOidcAuth.encryptedCaCertificate }).toString()
|
||||
: "";
|
||||
|
||||
return { ...identityOidcAuth, orgId: identityMembershipOrg.orgId, caCert };
|
||||
};
|
||||
|
@@ -1,10 +1,24 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
import { Knex } from "knex";
|
||||
|
||||
export type TKmsRootConfigDALFactory = ReturnType<typeof kmsRootConfigDALFactory>;
|
||||
|
||||
export const kmsRootConfigDALFactory = (db: TDbClient) => {
|
||||
const kmsOrm = ormify(db, TableName.KmsServerRootConfig);
|
||||
return kmsOrm;
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const result = await (tx || db)(TableName.KmsServerRootConfig)
|
||||
.where({ id } as never)
|
||||
.first("*");
|
||||
return result;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find by id" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...kmsOrm, findById };
|
||||
};
|
||||
|
@@ -12,8 +12,8 @@ import {
|
||||
TExternalKmsProviderFns
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { KeyStorePrefixes, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { randomSecureBytes } from "@app/lib/crypto";
|
||||
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { generateHash } from "@app/lib/crypto/encryption";
|
||||
@@ -44,23 +44,22 @@ type TKmsServiceFactoryDep = {
|
||||
kmsDAL: TKmsKeyDALFactory;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById" | "updateById" | "transaction">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findById" | "updateById" | "transaction">;
|
||||
kmsRootConfigDAL: Pick<TKmsRootConfigDALFactory, "findById" | "create" | "updateById">;
|
||||
kmsRootConfigDAL: Pick<TKmsRootConfigDALFactory, "findById" | "create" | "updateById" | "transaction">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "waitTillReady" | "setItemWithExpiry">;
|
||||
internalKmsDAL: Pick<TInternalKmsDALFactory, "create">;
|
||||
hsmService: THsmServiceFactory;
|
||||
envConfig: Pick<TEnvConfig, "ENCRYPTION_KEY" | "ROOT_ENCRYPTION_KEY">;
|
||||
};
|
||||
|
||||
export type TKmsServiceFactory = ReturnType<typeof kmsServiceFactory>;
|
||||
|
||||
const KMS_ROOT_CREATION_WAIT_KEY = "wait_till_ready_kms_root_key";
|
||||
const KMS_ROOT_CREATION_WAIT_TIME = 10;
|
||||
|
||||
// akhilmhdh: Don't edit this value. This is measured for blob concatination in kms
|
||||
const KMS_VERSION = "v01";
|
||||
const KMS_VERSION_BLOB_LENGTH = 3;
|
||||
const KmsSanitizedSchema = KmsKeysSchema.extend({ isExternal: z.boolean() });
|
||||
|
||||
export const kmsServiceFactory = ({
|
||||
envConfig,
|
||||
kmsDAL,
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
@@ -473,7 +472,8 @@ export const kmsServiceFactory = ({
|
||||
}
|
||||
|
||||
const kmsDecryptor = await decryptWithKmsKey({
|
||||
kmsId: kmsKeyId
|
||||
kmsId: kmsKeyId,
|
||||
tx: trx
|
||||
});
|
||||
|
||||
return kmsDecryptor({
|
||||
@@ -635,10 +635,8 @@ export const kmsServiceFactory = ({
|
||||
};
|
||||
|
||||
const $getBasicEncryptionKey = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY;
|
||||
const isBase64 = !appCfg.ENCRYPTION_KEY;
|
||||
const encryptionKey = envConfig.ENCRYPTION_KEY || envConfig.ROOT_ENCRYPTION_KEY;
|
||||
const isBase64 = !envConfig.ENCRYPTION_KEY;
|
||||
if (!encryptionKey)
|
||||
throw new Error(
|
||||
"Root encryption key not found for KMS service. Did you set the ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY environment variables?"
|
||||
@@ -874,54 +872,33 @@ export const kmsServiceFactory = ({
|
||||
return { id, name, orgId, isExternal };
|
||||
};
|
||||
|
||||
// akhilmhdh: a copy of this is made in migrations/utils/kms
|
||||
const startService = async () => {
|
||||
const lock = await keyStore.acquireLock([`KMS_ROOT_CFG_LOCK`], 3000, { retryCount: 3 }).catch(() => null);
|
||||
if (!lock) {
|
||||
await keyStore.waitTillReady({
|
||||
key: KMS_ROOT_CREATION_WAIT_KEY,
|
||||
keyCheckCb: (val) => val === "true",
|
||||
waitingCb: () => logger.info("KMS. Waiting for leader to finish creation of KMS Root Key")
|
||||
const kmsRootConfig = await kmsRootConfigDAL.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.KmsRootKeyInit]);
|
||||
// check if KMS root key was already generated and saved in DB
|
||||
const existingRootConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID);
|
||||
if (existingRootConfig) return existingRootConfig;
|
||||
|
||||
logger.info("KMS: Generating new ROOT Key");
|
||||
const newRootKey = randomSecureBytes(32);
|
||||
const encryptedRootKey = await $encryptRootKey(newRootKey, RootKeyEncryptionStrategy.Software).catch((err) => {
|
||||
logger.error({ hsmEnabled: hsmService.isActive() }, "KMS: Failed to encrypt ROOT Key");
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
// check if KMS root key was already generated and saved in DB
|
||||
const kmsRootConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID);
|
||||
|
||||
// case 1: a root key already exists in the DB
|
||||
if (kmsRootConfig) {
|
||||
if (lock) await lock.release();
|
||||
logger.info(`KMS: Encrypted ROOT Key found from DB. Decrypting. [strategy=${kmsRootConfig.encryptionStrategy}]`);
|
||||
|
||||
const decryptedRootKey = await $decryptRootKey(kmsRootConfig);
|
||||
|
||||
// set the flag so that other instance nodes can start
|
||||
await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true");
|
||||
logger.info("KMS: Loading ROOT Key into Memory.");
|
||||
ROOT_ENCRYPTION_KEY = decryptedRootKey;
|
||||
return;
|
||||
}
|
||||
|
||||
// case 2: no config is found, so we create a new root key with basic encryption
|
||||
logger.info("KMS: Generating new ROOT Key");
|
||||
const newRootKey = randomSecureBytes(32);
|
||||
const encryptedRootKey = await $encryptRootKey(newRootKey, RootKeyEncryptionStrategy.Software).catch((err) => {
|
||||
logger.error({ hsmEnabled: hsmService.isActive() }, "KMS: Failed to encrypt ROOT Key");
|
||||
throw err;
|
||||
const newRootConfig = await kmsRootConfigDAL.create({
|
||||
// @ts-expect-error id is kept as fixed for idempotence and to avoid race condition
|
||||
id: KMS_ROOT_CONFIG_UUID,
|
||||
encryptedRootKey,
|
||||
encryptionStrategy: RootKeyEncryptionStrategy.Software
|
||||
});
|
||||
return newRootConfig;
|
||||
});
|
||||
|
||||
await kmsRootConfigDAL.create({
|
||||
// @ts-expect-error id is kept as fixed for idempotence and to avoid race condition
|
||||
id: KMS_ROOT_CONFIG_UUID,
|
||||
encryptedRootKey,
|
||||
encryptionStrategy: RootKeyEncryptionStrategy.Software
|
||||
});
|
||||
const decryptedRootKey = await $decryptRootKey(kmsRootConfig);
|
||||
|
||||
// set the flag so that other instance nodes can start
|
||||
await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true");
|
||||
logger.info("KMS: Saved and loaded ROOT Key into memory");
|
||||
if (lock) await lock.release();
|
||||
ROOT_ENCRYPTION_KEY = newRootKey;
|
||||
logger.info("KMS: Loading ROOT Key into Memory.");
|
||||
ROOT_ENCRYPTION_KEY = decryptedRootKey;
|
||||
};
|
||||
|
||||
const updateEncryptionStrategy = async (strategy: RootKeyEncryptionStrategy) => {
|
||||
|
@@ -9,7 +9,7 @@ import {
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
|
||||
import { ActorAuthMethod } from "../auth/auth-type";
|
||||
import { TIdentityProjectMembershipRoleDALFactory } from "../identity-project/identity-project-membership-role-dal";
|
||||
|
@@ -4,8 +4,17 @@ import { ProjectType, TProjectKeys } from "@app/db/schemas";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
import { CaStatus } from "../certificate-authority/certificate-authority-types";
|
||||
import { KmsType } from "../kms/kms-types";
|
||||
|
||||
enum KmsType {
|
||||
External = "external",
|
||||
Internal = "internal"
|
||||
}
|
||||
|
||||
enum CaStatus {
|
||||
ACTIVE = "active",
|
||||
DISABLED = "disabled",
|
||||
PENDING_CERTIFICATE = "pending-certificate"
|
||||
}
|
||||
|
||||
export enum ProjectFilterType {
|
||||
ID = "id",
|
||||
|
@@ -10,14 +10,14 @@ import {
|
||||
} from "@app/services/secret-sync/secret-sync-schemas";
|
||||
|
||||
const AwsParameterStoreSyncDestinationConfigSchema = z.object({
|
||||
region: z.nativeEnum(AWSRegion).describe(SecretSyncs.DESTINATION_CONFIG.AWS_PARAMETER_STORE.REGION),
|
||||
region: z.nativeEnum(AWSRegion).describe(SecretSyncs.DESTINATION_CONFIG.AWS_PARAMETER_STORE.region),
|
||||
path: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Parameter Store Path required")
|
||||
.max(2048, "Cannot exceed 2048 characters")
|
||||
.regex(/^\/([/]|(([\w-]+\/)+))?$/, 'Invalid path - must follow "/example/path/" format')
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_PARAMETER_STORE.PATH)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_PARAMETER_STORE.path)
|
||||
});
|
||||
|
||||
export const AwsParameterStoreSyncSchema = BaseSecretSyncSchema(SecretSync.AWSParameterStore).extend({
|
||||
|
@@ -15,12 +15,12 @@ const AwsSecretsManagerSyncDestinationConfigSchema = z
|
||||
z.object({
|
||||
mappingBehavior: z
|
||||
.literal(AwsSecretsManagerSyncMappingBehavior.OneToOne)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.MAPPING_BEHAVIOR)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.mappingBehavior)
|
||||
}),
|
||||
z.object({
|
||||
mappingBehavior: z
|
||||
.literal(AwsSecretsManagerSyncMappingBehavior.ManyToOne)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.MAPPING_BEHAVIOR),
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.mappingBehavior),
|
||||
secretName: z
|
||||
.string()
|
||||
.regex(
|
||||
@@ -29,12 +29,12 @@ const AwsSecretsManagerSyncDestinationConfigSchema = z
|
||||
)
|
||||
.min(1, "Secret name is required")
|
||||
.max(256, "Secret name cannot exceed 256 characters")
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.SECRET_NAME)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.secretName)
|
||||
})
|
||||
])
|
||||
.and(
|
||||
z.object({
|
||||
region: z.nativeEnum(AWSRegion).describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.REGION)
|
||||
region: z.nativeEnum(AWSRegion).describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.region)
|
||||
})
|
||||
);
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user