Compare commits
50 Commits
daniel/min
...
doc/add-ca
Author | SHA1 | Date | |
---|---|---|---|
|
0d4d73b61d | ||
|
198b607e2e | ||
|
1f5a73047d | ||
|
0366df6e19 | ||
|
c77e0c0666 | ||
|
8e70731c4c | ||
|
21c6700db2 | ||
|
619062033b | ||
|
36973b1b5c | ||
|
1ca578ee03 | ||
|
8a7f7ac9fd | ||
|
049fd8e769 | ||
|
2c825616a6 | ||
|
febbd4ade5 | ||
|
874dc01692 | ||
|
b44b8bf647 | ||
|
258e561b84 | ||
|
5802638fc4 | ||
|
e2e7004583 | ||
|
7826324435 | ||
|
2bfc1caec5 | ||
|
4b9e3e44e2 | ||
|
b2a680ebd7 | ||
|
b269bb81fe | ||
|
5ca7ff4f2d | ||
|
ec12d57862 | ||
|
2d16f5f258 | ||
|
93912da528 | ||
|
ffc5e61faa | ||
|
70e68f4441 | ||
|
a004934a28 | ||
|
0811192eed | ||
|
1e09487572 | ||
|
86202caa95 | ||
|
285fca4ded | ||
|
30fb60b441 | ||
|
e531390922 | ||
|
e88ce49463 | ||
|
9214c93ece | ||
|
7a3bfa9e4c | ||
|
7aa0e8572c | ||
|
72780c61b4 | ||
|
c4da0305ba | ||
|
4fdfdc1a39 | ||
|
4de8888843 | ||
|
da35ec90bc | ||
|
ecf2cb6e51 | ||
|
1e5a9a6020 | ||
|
00e69e6632 | ||
|
cedb22a39a |
262
.github/workflows/deployment-pipeline.yml
vendored
@@ -1,262 +0,0 @@
|
||||
name: Deployment pipeline
|
||||
on: [workflow_dispatch]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: "infisical-core-deployment"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Integration tests
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-us:
|
||||
name: US production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
- name: Post slack message
|
||||
uses: slackapi/slack-github-action@v2.0.0
|
||||
with:
|
||||
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
|
||||
webhook-type: incoming-webhook
|
||||
payload: |
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
blocks:
|
||||
- type: "section"
|
||||
text:
|
||||
type: "mrkdwn"
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Application:*\nInfisical Core"
|
||||
- type: "mrkdwn"
|
||||
text: "*Instance Type:*\nShared Infisical Cloud"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Region:*\nUS"
|
||||
- type: "mrkdwn"
|
||||
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
|
||||
|
||||
|
||||
production-eu:
|
||||
name: EU production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [production-us]
|
||||
environment:
|
||||
name: production-eu
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
- name: Post slack message
|
||||
uses: slackapi/slack-github-action@v2.0.0
|
||||
with:
|
||||
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
|
||||
webhook-type: incoming-webhook
|
||||
payload: |
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
blocks:
|
||||
- type: "section"
|
||||
text:
|
||||
type: "mrkdwn"
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Application:*\nInfisical Core"
|
||||
- type: "mrkdwn"
|
||||
text: "*Instance Type:*\nShared Infisical Cloud"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Region:*\nEU"
|
||||
- type: "mrkdwn"
|
||||
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
|
||||
|
@@ -535,6 +535,107 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
mode: "upsert",
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[0].path
|
||||
}));
|
||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[1].path
|
||||
}));
|
||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
mode: "upsert",
|
||||
secrets: testSecrets
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
firstBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
secondBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
|
103
backend/package-lock.json
generated
@@ -21,7 +21,7 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "^8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
@@ -48,8 +48,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@@ -81,7 +81,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
@@ -5423,13 +5423,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.1.1.tgz",
|
||||
"integrity": "sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/cookie": {
|
||||
"version": "9.3.1",
|
||||
@@ -5502,19 +5499,41 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/multipart": {
|
||||
"version": "8.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.0.tgz",
|
||||
"integrity": "sha512-A8h80TTyqUzaMVH0Cr9Qcm6RxSkVqmhK/MVBYHYeRRSUbUYv08WecjWKSlG2aSnD4aGI841pVxAjC+G1GafUeQ==",
|
||||
"version": "8.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.1.tgz",
|
||||
"integrity": "sha512-pncbnG28S6MIskFSVRtzTKE9dK+GrKAJl0NbaQ/CG8ded80okWFsYKzSlP9haaLNQhNRDOoHqmGQNvgbiPVpWQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.1.0",
|
||||
"@fastify/deepmerge": "^1.0.0",
|
||||
"@fastify/error": "^3.0.0",
|
||||
"@fastify/busboy": "^3.0.0",
|
||||
"@fastify/deepmerge": "^2.0.0",
|
||||
"@fastify/error": "^4.0.0",
|
||||
"fastify-plugin": "^4.0.0",
|
||||
"secure-json-parse": "^2.4.0",
|
||||
"stream-wormhole": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/multipart/node_modules/@fastify/deepmerge": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/deepmerge/-/deepmerge-2.0.1.tgz",
|
||||
"integrity": "sha512-hx+wJQr9Ph1hY/dyzY0SxqjumMyqZDlIF6oe71dpRKDHUg7dFQfjG94qqwQ274XRjmUrwKiYadex8XplNHx3CA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/multipart/node_modules/@fastify/error": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.0.0.tgz",
|
||||
"integrity": "sha512-OO/SA8As24JtT1usTUTKgGH7uLvhfwZPwlptRi2Dp5P4KKmJI3gvsZ8MIHnNwDs4sLf/aai5LzTyl66xr7qMxA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/passport": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/passport/-/passport-2.4.0.tgz",
|
||||
@@ -9049,6 +9068,7 @@
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/logger/-/logger-4.0.0.tgz",
|
||||
"integrity": "sha512-Wz7QYfPAlG/DR+DfABddUZeNgoeY7d1J39OCR2jR+v7VBsB8ezulDK5szTnDDPDwLH5IWhLvXIHlCFZV7MSKgA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": ">=18.0.0"
|
||||
},
|
||||
@@ -9058,12 +9078,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/oauth": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.1.tgz",
|
||||
"integrity": "sha512-TuR9PI6bYKX6qHC7FQI4keMnhj45TNfSNQtTU3mtnHUX4XLM2dYLvRkUNADyiLTle2qu2rsOQtCIsZJw6H0sDA==",
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.2.tgz",
|
||||
"integrity": "sha512-MdPS8AP9n3u/hBeqRFu+waArJLD/q+wOSZ48ktMTwxQLc6HJyaWPf8soqAyS/b0D6IlvI5TxAdyRyyv3wQ5IVw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@slack/logger": "^4",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@types/jsonwebtoken": "^9",
|
||||
"@types/node": ">=18",
|
||||
"jsonwebtoken": "^9",
|
||||
@@ -9075,24 +9096,26 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/types": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.12.0.tgz",
|
||||
"integrity": "sha512-yFewzUomYZ2BYaGJidPuIgjoYj5wqPDmi7DLSaGIkf+rCi4YZ2Z3DaiYIbz7qb/PL2NmamWjCvB7e9ArI5HkKg==",
|
||||
"version": "2.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.14.0.tgz",
|
||||
"integrity": "sha512-n0EGm7ENQRxlXbgKSrQZL69grzg1gHLAVd+GlRVQJ1NSORo0FrApR7wql/gaKdu2n4TO83Sq/AmeUOqD60aXUA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 12.13.0",
|
||||
"npm": ">= 6.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/web-api": {
|
||||
"version": "7.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.3.4.tgz",
|
||||
"integrity": "sha512-KwLK8dlz2lhr3NO7kbYQ7zgPTXPKrhq1JfQc0etJ0K8LSJhYYnf8GbVznvgDT/Uz1/pBXfFQnoXjrQIOKAdSuw==",
|
||||
"version": "7.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.8.0.tgz",
|
||||
"integrity": "sha512-d4SdG+6UmGdzWw38a4sN3lF/nTEzsDxhzU13wm10ejOpPehtmRoqBKnPztQUfFiWbNvSb4czkWYJD4kt+5+Fuw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@slack/logger": "^4.0.0",
|
||||
"@slack/types": "^2.9.0",
|
||||
"@types/node": ">=18.0.0",
|
||||
"@types/retry": "0.12.0",
|
||||
"axios": "^1.7.4",
|
||||
"axios": "^1.7.8",
|
||||
"eventemitter3": "^5.0.1",
|
||||
"form-data": "^4.0.0",
|
||||
"is-electron": "2.2.2",
|
||||
@@ -9110,6 +9133,7 @@
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
@@ -10526,7 +10550,8 @@
|
||||
"node_modules/@types/retry": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz",
|
||||
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA=="
|
||||
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/safe-regex": {
|
||||
"version": "1.1.6",
|
||||
@@ -11969,9 +11994,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.4",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
|
||||
"integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
@@ -13926,7 +13952,8 @@
|
||||
"node_modules/eventemitter3": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz",
|
||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="
|
||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/events": {
|
||||
"version": "3.3.0",
|
||||
@@ -15942,7 +15969,8 @@
|
||||
"node_modules/is-electron": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/is-electron/-/is-electron-2.2.2.tgz",
|
||||
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg=="
|
||||
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-extglob": {
|
||||
"version": "2.1.1",
|
||||
@@ -18182,6 +18210,7 @@
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
|
||||
"integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
@@ -18228,6 +18257,7 @@
|
||||
"version": "6.6.2",
|
||||
"resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz",
|
||||
"integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"eventemitter3": "^4.0.4",
|
||||
"p-timeout": "^3.2.0"
|
||||
@@ -18242,12 +18272,14 @@
|
||||
"node_modules/p-queue/node_modules/eventemitter3": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/p-retry": {
|
||||
"version": "4.6.2",
|
||||
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz",
|
||||
"integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/retry": "0.12.0",
|
||||
"retry": "^0.13.1"
|
||||
@@ -18271,6 +18303,7 @@
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz",
|
||||
"integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"p-finally": "^1.0.0"
|
||||
},
|
||||
|
@@ -129,7 +129,7 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
@@ -156,8 +156,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@@ -189,7 +189,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
|
@@ -42,7 +42,7 @@ export const getMigrationEnvConfig = () => {
|
||||
console.error("Invalid environment variables. Check the error below");
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(
|
||||
"Migration is now automatic at startup. Please remove this step from your workflow and start the application as normal."
|
||||
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
|
||||
);
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(parsedEnv.error.issues);
|
||||
|
@@ -352,6 +352,7 @@ interface CreateSecretBatchEvent {
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretPath?: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
}>;
|
||||
@@ -374,8 +375,14 @@ interface UpdateSecretBatchEvent {
|
||||
type: EventType.UPDATE_SECRETS;
|
||||
metadata: {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number; secretMetadata?: TSecretMetadata }>;
|
||||
secretPath?: string;
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
secretPath?: string;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -111,7 +111,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
if (search) {
|
||||
// void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike '%${search}%'`);
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike ?`, [`%${search}%`]);
|
||||
} else if (username) {
|
||||
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
|
||||
}
|
||||
|
@@ -721,7 +721,8 @@ export const RAW_SECRETS = {
|
||||
secretName: "The name of the secret to update.",
|
||||
secretComment: "Update comment to the secret.",
|
||||
environment: "The slug of the environment where the secret is located.",
|
||||
secretPath: "The path of the secret to update.",
|
||||
mode: "Defines how the system should handle missing secrets during an update.",
|
||||
secretPath: "The default path for secrets to update or upsert, if not provided in the secret details.",
|
||||
secretValue: "The new value of the secret.",
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to update.",
|
||||
@@ -1718,36 +1719,40 @@ export const SecretSyncs = {
|
||||
SYNC_OPTIONS: (destination: SecretSync) => {
|
||||
const destinationName = SECRET_SYNC_NAME_MAP[destination];
|
||||
return {
|
||||
INITIAL_SYNC_BEHAVIOR: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`,
|
||||
PREPEND_PREFIX: `Optionally prepend a prefix to your secrets' keys when syncing to ${destinationName}.`,
|
||||
APPEND_SUFFIX: `Optionally append a suffix to your secrets' keys when syncing to ${destinationName}.`
|
||||
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`
|
||||
};
|
||||
},
|
||||
DESTINATION_CONFIG: {
|
||||
AWS_PARAMETER_STORE: {
|
||||
REGION: "The AWS region to sync secrets to.",
|
||||
PATH: "The Parameter Store path to sync secrets to."
|
||||
region: "The AWS region to sync secrets to.",
|
||||
path: "The Parameter Store path to sync secrets to."
|
||||
},
|
||||
AWS_SECRETS_MANAGER: {
|
||||
REGION: "The AWS region to sync secrets to.",
|
||||
MAPPING_BEHAVIOR:
|
||||
"How secrets from Infisical should be mapped to AWS Secrets Manager; one-to-one or many-to-one.",
|
||||
SECRET_NAME: "The secret name in AWS Secrets Manager to sync to when using mapping behavior many-to-one."
|
||||
region: "The AWS region to sync secrets to.",
|
||||
mappingBehavior: "How secrets from Infisical should be mapped to AWS Secrets Manager; one-to-one or many-to-one.",
|
||||
secretName: "The secret name in AWS Secrets Manager to sync to when using mapping behavior many-to-one."
|
||||
},
|
||||
GITHUB: {
|
||||
ORG: "The name of the GitHub organization.",
|
||||
OWNER: "The name of the GitHub account owner of the repository.",
|
||||
REPO: "The name of the GitHub repository.",
|
||||
ENV: "The name of the GitHub environment."
|
||||
scope: "The GitHub scope that secrets should be synced to",
|
||||
org: "The name of the GitHub organization.",
|
||||
owner: "The name of the GitHub account owner of the repository.",
|
||||
repo: "The name of the GitHub repository.",
|
||||
env: "The name of the GitHub environment."
|
||||
},
|
||||
AZURE_KEY_VAULT: {
|
||||
VAULT_BASE_URL:
|
||||
"The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
||||
},
|
||||
AZURE_APP_CONFIGURATION: {
|
||||
CONFIGURATION_URL:
|
||||
configurationUrl:
|
||||
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
|
||||
LABEL: "An optional label to assign to secrets created in Azure App Configuration."
|
||||
label: "An optional label to assign to secrets created in Azure App Configuration."
|
||||
},
|
||||
GCP: {
|
||||
scope: "The Google project scope that secrets should be synced to.",
|
||||
projectId: "The ID of the Google project secrets should be synced to."
|
||||
},
|
||||
DATABRICKS: {
|
||||
scope: "The Databricks secret scope that secrets should be synced to."
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@@ -12,6 +12,10 @@ import {
|
||||
AzureKeyVaultConnectionListItemSchema,
|
||||
SanitizedAzureKeyVaultConnectionSchema
|
||||
} from "@app/services/app-connection/azure-key-vault";
|
||||
import {
|
||||
DatabricksConnectionListItemSchema,
|
||||
SanitizedDatabricksConnectionSchema
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import { GcpConnectionListItemSchema, SanitizedGcpConnectionSchema } from "@app/services/app-connection/gcp";
|
||||
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -22,7 +26,8 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedGitHubConnectionSchema.options,
|
||||
...SanitizedGcpConnectionSchema.options,
|
||||
...SanitizedAzureKeyVaultConnectionSchema.options,
|
||||
...SanitizedAzureAppConfigurationConnectionSchema.options
|
||||
...SanitizedAzureAppConfigurationConnectionSchema.options,
|
||||
...SanitizedDatabricksConnectionSchema.options
|
||||
]);
|
||||
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
@@ -30,7 +35,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
GitHubConnectionListItemSchema,
|
||||
GcpConnectionListItemSchema,
|
||||
AzureKeyVaultConnectionListItemSchema,
|
||||
AzureAppConfigurationConnectionListItemSchema
|
||||
AzureAppConfigurationConnectionListItemSchema,
|
||||
DatabricksConnectionListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -0,0 +1,54 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateDatabricksConnectionSchema,
|
||||
SanitizedDatabricksConnectionSchema,
|
||||
UpdateDatabricksConnectionSchema
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerDatabricksConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.Databricks,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedDatabricksConnectionSchema,
|
||||
createSchema: CreateDatabricksConnectionSchema,
|
||||
updateSchema: UpdateDatabricksConnectionSchema
|
||||
});
|
||||
|
||||
// The below endpoints are not exposed and for Infisical App use
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/secret-scopes`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretScopes: z.object({ name: z.string() }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const secretScopes = await server.services.appConnection.databricks.listSecretScopes(
|
||||
connectionId,
|
||||
req.permission
|
||||
);
|
||||
|
||||
return { secretScopes };
|
||||
}
|
||||
});
|
||||
};
|
@@ -41,7 +41,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
@@ -67,7 +67,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
@@ -97,7 +97,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
const { repo, owner } = req.query;
|
||||
|
@@ -3,6 +3,7 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
||||
import { registerAwsConnectionRouter } from "./aws-connection-router";
|
||||
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
|
||||
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
|
||||
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
|
||||
import { registerGcpConnectionRouter } from "./gcp-connection-router";
|
||||
import { registerGitHubConnectionRouter } from "./github-connection-router";
|
||||
|
||||
@@ -14,5 +15,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.GitHub]: registerGitHubConnectionRouter,
|
||||
[AppConnection.GCP]: registerGcpConnectionRouter,
|
||||
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
|
||||
[AppConnection.Databricks]: registerDatabricksConnectionRouter
|
||||
};
|
||||
|
@@ -0,0 +1,17 @@
|
||||
import {
|
||||
CreateDatabricksSyncSchema,
|
||||
DatabricksSyncSchema,
|
||||
UpdateDatabricksSyncSchema
|
||||
} from "@app/services/secret-sync/databricks";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
|
||||
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
|
||||
|
||||
export const registerDatabricksSyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncSecretsEndpoints({
|
||||
destination: SecretSync.Databricks,
|
||||
server,
|
||||
responseSchema: DatabricksSyncSchema,
|
||||
createSchema: CreateDatabricksSyncSchema,
|
||||
updateSchema: UpdateDatabricksSyncSchema
|
||||
});
|
@@ -4,6 +4,7 @@ import { registerAwsParameterStoreSyncRouter } from "./aws-parameter-store-sync-
|
||||
import { registerAwsSecretsManagerSyncRouter } from "./aws-secrets-manager-sync-router";
|
||||
import { registerAzureAppConfigurationSyncRouter } from "./azure-app-configuration-sync-router";
|
||||
import { registerAzureKeyVaultSyncRouter } from "./azure-key-vault-sync-router";
|
||||
import { registerDatabricksSyncRouter } from "./databricks-sync-router";
|
||||
import { registerGcpSyncRouter } from "./gcp-sync-router";
|
||||
import { registerGitHubSyncRouter } from "./github-sync-router";
|
||||
|
||||
@@ -15,5 +16,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
|
||||
[SecretSync.GitHub]: registerGitHubSyncRouter,
|
||||
[SecretSync.GCPSecretManager]: registerGcpSyncRouter,
|
||||
[SecretSync.AzureKeyVault]: registerAzureKeyVaultSyncRouter,
|
||||
[SecretSync.AzureAppConfiguration]: registerAzureAppConfigurationSyncRouter
|
||||
[SecretSync.AzureAppConfiguration]: registerAzureAppConfigurationSyncRouter,
|
||||
[SecretSync.Databricks]: registerDatabricksSyncRouter
|
||||
};
|
||||
|
@@ -18,6 +18,7 @@ import {
|
||||
AzureAppConfigurationSyncSchema
|
||||
} from "@app/services/secret-sync/azure-app-configuration";
|
||||
import { AzureKeyVaultSyncListItemSchema, AzureKeyVaultSyncSchema } from "@app/services/secret-sync/azure-key-vault";
|
||||
import { DatabricksSyncListItemSchema, DatabricksSyncSchema } from "@app/services/secret-sync/databricks";
|
||||
import { GcpSyncListItemSchema, GcpSyncSchema } from "@app/services/secret-sync/gcp";
|
||||
import { GitHubSyncListItemSchema, GitHubSyncSchema } from "@app/services/secret-sync/github";
|
||||
|
||||
@@ -27,7 +28,8 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
|
||||
GitHubSyncSchema,
|
||||
GcpSyncSchema,
|
||||
AzureKeyVaultSyncSchema,
|
||||
AzureAppConfigurationSyncSchema
|
||||
AzureAppConfigurationSyncSchema,
|
||||
DatabricksSyncSchema
|
||||
]);
|
||||
|
||||
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
@@ -36,7 +38,8 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
GitHubSyncListItemSchema,
|
||||
GcpSyncListItemSchema,
|
||||
AzureKeyVaultSyncListItemSchema,
|
||||
AzureAppConfigurationSyncListItemSchema
|
||||
AzureAppConfigurationSyncListItemSchema,
|
||||
DatabricksSyncListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -20,6 +20,7 @@ import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ProjectFilterType } from "@app/services/project/project-types";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
import { SecretOperations, SecretProtectionType } from "@app/services/secret/secret-types";
|
||||
import { SecretUpdateMode } from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import { secretRawSchema } from "../sanitizedSchemas";
|
||||
@@ -2030,6 +2031,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.UPDATE.secretPath),
|
||||
mode: z
|
||||
.nativeEnum(SecretUpdateMode)
|
||||
.optional()
|
||||
.default(SecretUpdateMode.FailOnNotFound)
|
||||
.describe(RAW_SECRETS.UPDATE.mode),
|
||||
secrets: z
|
||||
.object({
|
||||
secretKey: SecretNameSchema.describe(RAW_SECRETS.UPDATE.secretName),
|
||||
@@ -2037,6 +2043,12 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.string()
|
||||
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
|
||||
.describe(RAW_SECRETS.UPDATE.secretValue),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.transform(removeTrailingSlash)
|
||||
.optional()
|
||||
.describe(RAW_SECRETS.UPDATE.secretPath),
|
||||
secretComment: z.string().trim().optional().describe(RAW_SECRETS.UPDATE.secretComment),
|
||||
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding),
|
||||
newSecretName: SecretNameSchema.optional().describe(RAW_SECRETS.UPDATE.newSecretName),
|
||||
@@ -2073,7 +2085,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
environment,
|
||||
projectSlug,
|
||||
projectId: req.body.workspaceId,
|
||||
secrets: inputSecrets
|
||||
secrets: inputSecrets,
|
||||
mode: req.body.mode
|
||||
});
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
return { approval: secretOperation.approval };
|
||||
@@ -2092,15 +2105,39 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: secrets.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version,
|
||||
secretMetadata: secretMetadataMap.get(secret.secretKey)
|
||||
}))
|
||||
secrets: secrets
|
||||
.filter((el) => el.version > 1)
|
||||
.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretPath: secret.secretPath,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version,
|
||||
secretMetadata: secretMetadataMap.get(secret.secretKey)
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
const createdSecrets = secrets.filter((el) => el.version === 1);
|
||||
if (createdSecrets.length) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: secrets[0].workspace,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.CREATE_SECRETS,
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: createdSecrets.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretPath: secret.secretPath,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version,
|
||||
secretMetadata: secretMetadataMap.get(secret.secretKey)
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretUpdated,
|
||||
|
@@ -1,6 +1,7 @@
|
||||
export enum AppConnection {
|
||||
GitHub = "github",
|
||||
AWS = "aws",
|
||||
Databricks = "databricks",
|
||||
GCP = "gcp",
|
||||
AzureKeyVault = "azure-key-vault",
|
||||
AzureAppConfiguration = "azure-app-configuration"
|
||||
|
@@ -5,12 +5,17 @@ import { TAppConnectionServiceFactoryDep } from "@app/services/app-connection/ap
|
||||
import { TAppConnection, TAppConnectionConfig } from "@app/services/app-connection/app-connection-types";
|
||||
import {
|
||||
AwsConnectionMethod,
|
||||
getAwsAppConnectionListItem,
|
||||
getAwsConnectionListItem,
|
||||
validateAwsConnectionCredentials
|
||||
} from "@app/services/app-connection/aws";
|
||||
import {
|
||||
DatabricksConnectionMethod,
|
||||
getDatabricksConnectionListItem,
|
||||
validateDatabricksConnectionCredentials
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import {
|
||||
GcpConnectionMethod,
|
||||
getGcpAppConnectionListItem,
|
||||
getGcpConnectionListItem,
|
||||
validateGcpConnectionCredentials
|
||||
} from "@app/services/app-connection/gcp";
|
||||
import {
|
||||
@@ -33,11 +38,12 @@ import {
|
||||
|
||||
export const listAppConnectionOptions = () => {
|
||||
return [
|
||||
getAwsAppConnectionListItem(),
|
||||
getAwsConnectionListItem(),
|
||||
getGitHubConnectionListItem(),
|
||||
getGcpAppConnectionListItem(),
|
||||
getGcpConnectionListItem(),
|
||||
getAzureKeyVaultConnectionListItem(),
|
||||
getAzureAppConfigurationConnectionListItem()
|
||||
getAzureAppConfigurationConnectionListItem(),
|
||||
getDatabricksConnectionListItem()
|
||||
].sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
|
||||
@@ -90,6 +96,8 @@ export const validateAppConnectionCredentials = async (
|
||||
switch (app) {
|
||||
case AppConnection.AWS:
|
||||
return validateAwsConnectionCredentials(appConnection);
|
||||
case AppConnection.Databricks:
|
||||
return validateDatabricksConnectionCredentials(appConnection);
|
||||
case AppConnection.GitHub:
|
||||
return validateGitHubConnectionCredentials(appConnection);
|
||||
case AppConnection.GCP:
|
||||
@@ -118,6 +126,8 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
return "Assume Role";
|
||||
case GcpConnectionMethod.ServiceAccountImpersonation:
|
||||
return "Service Account Impersonation";
|
||||
case DatabricksConnectionMethod.ServicePrincipal:
|
||||
return "Service Principal";
|
||||
default:
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
throw new Error(`Unhandled App Connection Method: ${method}`);
|
||||
|
@@ -5,5 +5,6 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.GitHub]: "GitHub",
|
||||
[AppConnection.GCP]: "GCP",
|
||||
[AppConnection.AzureKeyVault]: "Azure Key Vault",
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration"
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
|
||||
[AppConnection.Databricks]: "Databricks"
|
||||
};
|
||||
|
@@ -23,6 +23,8 @@ import {
|
||||
TValidateAppConnectionCredentials
|
||||
} from "@app/services/app-connection/app-connection-types";
|
||||
import { ValidateAwsConnectionCredentialsSchema } from "@app/services/app-connection/aws";
|
||||
import { ValidateDatabricksConnectionCredentialsSchema } from "@app/services/app-connection/databricks";
|
||||
import { databricksConnectionService } from "@app/services/app-connection/databricks/databricks-connection-service";
|
||||
import { ValidateGitHubConnectionCredentialsSchema } from "@app/services/app-connection/github";
|
||||
import { githubConnectionService } from "@app/services/app-connection/github/github-connection-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
@@ -46,7 +48,8 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.GitHub]: ValidateGitHubConnectionCredentialsSchema,
|
||||
[AppConnection.GCP]: ValidateGcpConnectionCredentialsSchema,
|
||||
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
|
||||
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema
|
||||
};
|
||||
|
||||
export const appConnectionServiceFactory = ({
|
||||
@@ -365,6 +368,7 @@ export const appConnectionServiceFactory = ({
|
||||
connectAppConnectionById,
|
||||
listAvailableAppConnectionsForUser,
|
||||
github: githubConnectionService(connectAppConnectionById),
|
||||
gcp: gcpConnectionService(connectAppConnectionById)
|
||||
gcp: gcpConnectionService(connectAppConnectionById),
|
||||
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService)
|
||||
};
|
||||
};
|
||||
|
@@ -4,6 +4,12 @@ import {
|
||||
TAwsConnectionInput,
|
||||
TValidateAwsConnectionCredentials
|
||||
} from "@app/services/app-connection/aws";
|
||||
import {
|
||||
TDatabricksConnection,
|
||||
TDatabricksConnectionConfig,
|
||||
TDatabricksConnectionInput,
|
||||
TValidateDatabricksConnectionCredentials
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import {
|
||||
TGitHubConnection,
|
||||
TGitHubConnectionConfig,
|
||||
@@ -31,6 +37,7 @@ export type TAppConnection = { id: string } & (
|
||||
| TGcpConnection
|
||||
| TAzureKeyVaultConnection
|
||||
| TAzureAppConfigurationConnection
|
||||
| TDatabricksConnection
|
||||
);
|
||||
|
||||
export type TAppConnectionInput = { id: string } & (
|
||||
@@ -39,6 +46,7 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TGcpConnectionInput
|
||||
| TAzureKeyVaultConnectionInput
|
||||
| TAzureAppConfigurationConnectionInput
|
||||
| TDatabricksConnectionInput
|
||||
);
|
||||
|
||||
export type TCreateAppConnectionDTO = Pick<
|
||||
@@ -55,11 +63,13 @@ export type TAppConnectionConfig =
|
||||
| TGitHubConnectionConfig
|
||||
| TGcpConnectionConfig
|
||||
| TAzureKeyVaultConnectionConfig
|
||||
| TAzureAppConfigurationConnectionConfig;
|
||||
| TAzureAppConfigurationConnectionConfig
|
||||
| TDatabricksConnectionConfig;
|
||||
|
||||
export type TValidateAppConnectionCredentials =
|
||||
| TValidateAwsConnectionCredentials
|
||||
| TValidateGitHubConnectionCredentials
|
||||
| TValidateGcpConnectionCredentials
|
||||
| TValidateAzureKeyVaultConnectionCredentials
|
||||
| TValidateAzureAppConfigurationConnectionCredentials;
|
||||
| TValidateAzureAppConfigurationConnectionCredentials
|
||||
| TValidateDatabricksConnectionCredentials;
|
||||
|
@@ -9,7 +9,7 @@ import { AppConnection, AWSRegion } from "@app/services/app-connection/app-conne
|
||||
import { AwsConnectionMethod } from "./aws-connection-enums";
|
||||
import { TAwsConnectionConfig } from "./aws-connection-types";
|
||||
|
||||
export const getAwsAppConnectionListItem = () => {
|
||||
export const getAwsConnectionListItem = () => {
|
||||
const { INF_APP_CONNECTION_AWS_ACCESS_KEY_ID } = getConfig();
|
||||
|
||||
return {
|
||||
|
@@ -0,0 +1,3 @@
|
||||
export enum DatabricksConnectionMethod {
|
||||
ServicePrincipal = "service-principal"
|
||||
}
|
@@ -0,0 +1,92 @@
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { encryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { DatabricksConnectionMethod } from "./databricks-connection-enums";
|
||||
import {
|
||||
TAuthorizeDatabricksConnection,
|
||||
TDatabricksConnection,
|
||||
TDatabricksConnectionConfig
|
||||
} from "./databricks-connection-types";
|
||||
|
||||
export const getDatabricksConnectionListItem = () => {
|
||||
return {
|
||||
name: "Databricks" as const,
|
||||
app: AppConnection.Databricks as const,
|
||||
methods: Object.values(DatabricksConnectionMethod) as [DatabricksConnectionMethod.ServicePrincipal]
|
||||
};
|
||||
};
|
||||
|
||||
const authorizeDatabricksConnection = async ({
|
||||
clientId,
|
||||
clientSecret,
|
||||
workspaceUrl
|
||||
}: Pick<TDatabricksConnection["credentials"], "workspaceUrl" | "clientId" | "clientSecret">) => {
|
||||
const { data } = await request.post<TAuthorizeDatabricksConnection>(
|
||||
`${removeTrailingSlash(workspaceUrl)}/oidc/v1/token`,
|
||||
"grant_type=client_credentials&scope=all-apis",
|
||||
{
|
||||
auth: {
|
||||
username: clientId,
|
||||
password: clientSecret
|
||||
},
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return { accessToken: data.access_token, expiresAt: data.expires_in * 1000 + Date.now() };
|
||||
};
|
||||
|
||||
export const getDatabricksConnectionAccessToken = async (
|
||||
{ id, orgId, credentials }: TDatabricksConnection,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const { clientSecret, clientId, workspaceUrl, accessToken, expiresAt } = credentials;
|
||||
|
||||
// get new token if less than 10 minutes from expiry
|
||||
if (Date.now() < expiresAt - 10_000) {
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
const authData = await authorizeDatabricksConnection({ clientId, clientSecret, workspaceUrl });
|
||||
|
||||
const updatedCredentials: TDatabricksConnection["credentials"] = {
|
||||
...credentials,
|
||||
...authData
|
||||
};
|
||||
|
||||
const encryptedCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedCredentials,
|
||||
orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await appConnectionDAL.updateById(id, { encryptedCredentials });
|
||||
|
||||
return authData.accessToken;
|
||||
};
|
||||
|
||||
export const validateDatabricksConnectionCredentials = async (appConnection: TDatabricksConnectionConfig) => {
|
||||
const { credentials } = appConnection;
|
||||
|
||||
try {
|
||||
const { accessToken, expiresAt } = await authorizeDatabricksConnection(appConnection.credentials);
|
||||
|
||||
return {
|
||||
...credentials,
|
||||
accessToken,
|
||||
expiresAt
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection - verify credentials`
|
||||
});
|
||||
}
|
||||
};
|
@@ -0,0 +1,77 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { DatabricksConnectionMethod } from "./databricks-connection-enums";
|
||||
|
||||
export const DatabricksConnectionServicePrincipalInputCredentialsSchema = z.object({
|
||||
clientId: z.string().trim().min(1, "Client ID required"),
|
||||
clientSecret: z.string().trim().min(1, "Client Secret required"),
|
||||
workspaceUrl: z.string().trim().url().min(1, "Workspace URL required")
|
||||
});
|
||||
|
||||
export const DatabricksConnectionServicePrincipalOutputCredentialsSchema = z
|
||||
.object({
|
||||
accessToken: z.string(),
|
||||
expiresAt: z.number()
|
||||
})
|
||||
.merge(DatabricksConnectionServicePrincipalInputCredentialsSchema);
|
||||
|
||||
const BaseDatabricksConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Databricks) });
|
||||
|
||||
export const DatabricksConnectionSchema = z.intersection(
|
||||
BaseDatabricksConnectionSchema,
|
||||
z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z.literal(DatabricksConnectionMethod.ServicePrincipal),
|
||||
credentials: DatabricksConnectionServicePrincipalOutputCredentialsSchema
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
export const SanitizedDatabricksConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseDatabricksConnectionSchema.extend({
|
||||
method: z.literal(DatabricksConnectionMethod.ServicePrincipal),
|
||||
credentials: DatabricksConnectionServicePrincipalOutputCredentialsSchema.pick({
|
||||
clientId: true,
|
||||
workspaceUrl: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateDatabricksConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(DatabricksConnectionMethod.ServicePrincipal)
|
||||
.describe(AppConnections?.CREATE(AppConnection.Databricks).method),
|
||||
credentials: DatabricksConnectionServicePrincipalInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.Databricks).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateDatabricksConnectionSchema = ValidateDatabricksConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Databricks)
|
||||
);
|
||||
|
||||
export const UpdateDatabricksConnectionSchema = z
|
||||
.object({
|
||||
credentials: DatabricksConnectionServicePrincipalInputCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.Databricks).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Databricks));
|
||||
|
||||
export const DatabricksConnectionListItemSchema = z.object({
|
||||
name: z.literal("Databricks"),
|
||||
app: z.literal(AppConnection.Databricks),
|
||||
// the below is preferable but currently breaks with our zod to json schema parser
|
||||
// methods: z.tuple([z.literal(AwsConnectionMethod.ServicePrincipal), z.literal(AwsConnectionMethod.AccessKey)]),
|
||||
methods: z.nativeEnum(DatabricksConnectionMethod).array()
|
||||
});
|
@@ -0,0 +1,60 @@
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { getDatabricksConnectionAccessToken } from "@app/services/app-connection/databricks/databricks-connection-fns";
|
||||
import {
|
||||
TDatabricksConnection,
|
||||
TDatabricksListSecretScopesResponse
|
||||
} from "@app/services/app-connection/databricks/databricks-connection-types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TDatabricksConnection>;
|
||||
|
||||
const listDatabricksSecretScopes = async (
|
||||
appConnection: TDatabricksConnection,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const {
|
||||
credentials: { workspaceUrl }
|
||||
} = appConnection;
|
||||
|
||||
const accessToken = await getDatabricksConnectionAccessToken(appConnection, appConnectionDAL, kmsService);
|
||||
|
||||
const { data } = await request.get<TDatabricksListSecretScopesResponse>(
|
||||
`${removeTrailingSlash(workspaceUrl)}/api/2.0/secrets/scopes/list`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// not present in response if no scopes exists
|
||||
return data.scopes ?? [];
|
||||
};
|
||||
|
||||
export const databricksConnectionService = (
|
||||
getAppConnection: TGetAppConnectionFunc,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const listSecretScopes = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Databricks, connectionId, actor);
|
||||
|
||||
const secretScopes = await listDatabricksSecretScopes(appConnection, appConnectionDAL, kmsService);
|
||||
|
||||
return secretScopes;
|
||||
};
|
||||
|
||||
return {
|
||||
listSecretScopes
|
||||
};
|
||||
};
|
@@ -0,0 +1,36 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import {
|
||||
CreateDatabricksConnectionSchema,
|
||||
DatabricksConnectionSchema,
|
||||
ValidateDatabricksConnectionCredentialsSchema
|
||||
} from "./databricks-connection-schemas";
|
||||
|
||||
export type TDatabricksConnection = z.infer<typeof DatabricksConnectionSchema>;
|
||||
|
||||
export type TDatabricksConnectionInput = z.infer<typeof CreateDatabricksConnectionSchema> & {
|
||||
app: AppConnection.Databricks;
|
||||
};
|
||||
|
||||
export type TValidateDatabricksConnectionCredentials = typeof ValidateDatabricksConnectionCredentialsSchema;
|
||||
|
||||
export type TDatabricksConnectionConfig = DiscriminativePick<
|
||||
TDatabricksConnection,
|
||||
"method" | "app" | "credentials"
|
||||
> & {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TAuthorizeDatabricksConnection = {
|
||||
access_token: string;
|
||||
scope: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
};
|
||||
|
||||
export type TDatabricksListSecretScopesResponse = {
|
||||
scopes?: { name: string; backend_type: string; keyvault_metadata: { resource_id: string; dns_name: string } }[];
|
||||
};
|
4
backend/src/services/app-connection/databricks/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./databricks-connection-enums";
|
||||
export * from "./databricks-connection-fns";
|
||||
export * from "./databricks-connection-schemas";
|
||||
export * from "./databricks-connection-types";
|
@@ -17,7 +17,7 @@ import {
|
||||
TGcpConnectionConfig
|
||||
} from "./gcp-connection-types";
|
||||
|
||||
export const getGcpAppConnectionListItem = () => {
|
||||
export const getGcpConnectionListItem = () => {
|
||||
return {
|
||||
name: "GCP" as const,
|
||||
app: AppConnection.GCP as const,
|
||||
|
@@ -10,14 +10,14 @@ import {
|
||||
} from "@app/services/secret-sync/secret-sync-schemas";
|
||||
|
||||
const AwsParameterStoreSyncDestinationConfigSchema = z.object({
|
||||
region: z.nativeEnum(AWSRegion).describe(SecretSyncs.DESTINATION_CONFIG.AWS_PARAMETER_STORE.REGION),
|
||||
region: z.nativeEnum(AWSRegion).describe(SecretSyncs.DESTINATION_CONFIG.AWS_PARAMETER_STORE.region),
|
||||
path: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Parameter Store Path required")
|
||||
.max(2048, "Cannot exceed 2048 characters")
|
||||
.regex(/^\/([/]|(([\w-]+\/)+))?$/, 'Invalid path - must follow "/example/path/" format')
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_PARAMETER_STORE.PATH)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_PARAMETER_STORE.path)
|
||||
});
|
||||
|
||||
export const AwsParameterStoreSyncSchema = BaseSecretSyncSchema(SecretSync.AWSParameterStore).extend({
|
||||
|
@@ -15,12 +15,12 @@ const AwsSecretsManagerSyncDestinationConfigSchema = z
|
||||
z.object({
|
||||
mappingBehavior: z
|
||||
.literal(AwsSecretsManagerSyncMappingBehavior.OneToOne)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.MAPPING_BEHAVIOR)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.mappingBehavior)
|
||||
}),
|
||||
z.object({
|
||||
mappingBehavior: z
|
||||
.literal(AwsSecretsManagerSyncMappingBehavior.ManyToOne)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.MAPPING_BEHAVIOR),
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.mappingBehavior),
|
||||
secretName: z
|
||||
.string()
|
||||
.regex(
|
||||
@@ -29,12 +29,12 @@ const AwsSecretsManagerSyncDestinationConfigSchema = z
|
||||
)
|
||||
.min(1, "Secret name is required")
|
||||
.max(256, "Secret name cannot exceed 256 characters")
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.SECRET_NAME)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.secretName)
|
||||
})
|
||||
])
|
||||
.and(
|
||||
z.object({
|
||||
region: z.nativeEnum(AWSRegion).describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.REGION)
|
||||
region: z.nativeEnum(AWSRegion).describe(SecretSyncs.DESTINATION_CONFIG.AWS_SECRETS_MANAGER.region)
|
||||
})
|
||||
);
|
||||
|
||||
|
@@ -11,7 +11,7 @@ import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { TAzureAppConfigurationSyncWithCredentials } from "./azure-app-configuration-sync-types";
|
||||
|
||||
type TAzureAppConfigurationSecretSyncFactoryDeps = {
|
||||
type TAzureAppConfigurationSyncFactoryDeps = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
@@ -22,10 +22,10 @@ interface AzureAppConfigKeyValue {
|
||||
label?: string;
|
||||
}
|
||||
|
||||
export const azureAppConfigurationSecretSyncFactory = ({
|
||||
export const azureAppConfigurationSyncFactory = ({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
}: TAzureAppConfigurationSecretSyncFactoryDeps) => {
|
||||
}: TAzureAppConfigurationSyncFactoryDeps) => {
|
||||
const $getCompleteAzureAppConfigValues = async (accessToken: string, baseURL: string, url: string) => {
|
||||
let result: AzureAppConfigKeyValue[] = [];
|
||||
let currentUrl = url;
|
||||
|
@@ -14,8 +14,8 @@ const AzureAppConfigurationSyncDestinationConfigSchema = z.object({
|
||||
configurationUrl: z
|
||||
.string()
|
||||
.min(1, "App Configuration URL required")
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AZURE_APP_CONFIGURATION.CONFIGURATION_URL),
|
||||
label: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.AZURE_APP_CONFIGURATION.LABEL)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AZURE_APP_CONFIGURATION.configurationUrl),
|
||||
label: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.AZURE_APP_CONFIGURATION.label)
|
||||
});
|
||||
|
||||
const AzureAppConfigurationSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
|
||||
|
@@ -10,15 +10,12 @@ import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
|
||||
import { SecretSyncError } from "../secret-sync-errors";
|
||||
import { GetAzureKeyVaultSecret, TAzureKeyVaultSyncWithCredentials } from "./azure-key-vault-sync-types";
|
||||
|
||||
type TAzureKeyVaultSecretSyncFactoryDeps = {
|
||||
type TAzureKeyVaultSyncFactoryDeps = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export const azureKeyVaultSecretSyncFactory = ({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
}: TAzureKeyVaultSecretSyncFactoryDeps) => {
|
||||
export const azureKeyVaultSyncFactory = ({ kmsService, appConnectionDAL }: TAzureKeyVaultSyncFactoryDeps) => {
|
||||
const $getAzureKeyVaultSecrets = async (accessToken: string, vaultBaseUrl: string) => {
|
||||
const paginateAzureKeyVaultSecrets = async () => {
|
||||
let result: GetAzureKeyVaultSecret[] = [];
|
||||
|
@@ -15,7 +15,7 @@ const AzureKeyVaultSyncDestinationConfigSchema = z.object({
|
||||
.string()
|
||||
.url("Invalid vault base URL format")
|
||||
.min(1, "Vault base URL required")
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AZURE_KEY_VAULT.VAULT_BASE_URL)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.AZURE_KEY_VAULT.vaultBaseUrl)
|
||||
});
|
||||
|
||||
const AzureKeyVaultSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
|
||||
|
@@ -0,0 +1,10 @@
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
export const DATABRICKS_SYNC_LIST_OPTION: TSecretSyncListItem = {
|
||||
name: "Databricks",
|
||||
destination: SecretSync.Databricks,
|
||||
connection: AppConnection.Databricks,
|
||||
canImportSecrets: false
|
||||
};
|
@@ -0,0 +1,164 @@
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { getDatabricksConnectionAccessToken } from "@app/services/app-connection/databricks";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import {
|
||||
TDatabricksDeleteSecret,
|
||||
TDatabricksListSecretKeys,
|
||||
TDatabricksListSecretKeysResponse,
|
||||
TDatabricksPutSecret,
|
||||
TDatabricksSyncWithCredentials
|
||||
} from "@app/services/secret-sync/databricks/databricks-sync-types";
|
||||
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
import { SECRET_SYNC_NAME_MAP } from "@app/services/secret-sync/secret-sync-maps";
|
||||
|
||||
import { TSecretMap } from "../secret-sync-types";
|
||||
|
||||
type TDatabricksSecretSyncFactoryDeps = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
const DATABRICKS_SCOPE_SECRET_LIMIT = 1000;
|
||||
|
||||
const listDatabricksSecrets = async ({ workspaceUrl, scope, accessToken }: TDatabricksListSecretKeys) => {
|
||||
const { data } = await request.get<TDatabricksListSecretKeysResponse>(
|
||||
`${removeTrailingSlash(workspaceUrl)}/api/2.0/secrets/list`,
|
||||
{
|
||||
params: {
|
||||
scope
|
||||
},
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// not present in response if no secrets exist in scope
|
||||
return data.secrets ?? [];
|
||||
};
|
||||
const putDatabricksSecret = async ({ workspaceUrl, scope, key, value, accessToken }: TDatabricksPutSecret) =>
|
||||
request.post(
|
||||
`${removeTrailingSlash(workspaceUrl)}/api/2.0/secrets/put`,
|
||||
{
|
||||
scope,
|
||||
key,
|
||||
string_value: value
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const deleteDatabricksSecrets = async ({ workspaceUrl, scope, key, accessToken }: TDatabricksDeleteSecret) =>
|
||||
request.post(
|
||||
`${removeTrailingSlash(workspaceUrl)}/api/2.0/secrets/delete`,
|
||||
{
|
||||
scope,
|
||||
key
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const databricksSyncFactory = ({ kmsService, appConnectionDAL }: TDatabricksSecretSyncFactoryDeps) => {
|
||||
const syncSecrets = async (secretSync: TDatabricksSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
if (Object.keys(secretSync).length > DATABRICKS_SCOPE_SECRET_LIMIT) {
|
||||
throw new Error(
|
||||
`Databricks does not support storing more than ${DATABRICKS_SCOPE_SECRET_LIMIT} secrets per scope.`
|
||||
);
|
||||
}
|
||||
|
||||
const {
|
||||
destinationConfig: { scope },
|
||||
connection
|
||||
} = secretSync;
|
||||
|
||||
const { workspaceUrl } = connection.credentials;
|
||||
|
||||
const accessToken = await getDatabricksConnectionAccessToken(connection, appConnectionDAL, kmsService);
|
||||
|
||||
for await (const entry of Object.entries(secretMap)) {
|
||||
const [key, { value }] = entry;
|
||||
|
||||
try {
|
||||
await putDatabricksSecret({
|
||||
key,
|
||||
value,
|
||||
workspaceUrl,
|
||||
scope,
|
||||
accessToken
|
||||
});
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: key
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const databricksSecretKeys = await listDatabricksSecrets({
|
||||
workspaceUrl,
|
||||
scope,
|
||||
accessToken
|
||||
});
|
||||
|
||||
for await (const secret of databricksSecretKeys) {
|
||||
if (!(secret.key in secretMap)) {
|
||||
await deleteDatabricksSecrets({
|
||||
key: secret.key,
|
||||
workspaceUrl,
|
||||
scope,
|
||||
accessToken
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const removeSecrets = async (secretSync: TDatabricksSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const {
|
||||
destinationConfig: { scope },
|
||||
connection
|
||||
} = secretSync;
|
||||
|
||||
const { workspaceUrl } = connection.credentials;
|
||||
|
||||
const accessToken = await getDatabricksConnectionAccessToken(connection, appConnectionDAL, kmsService);
|
||||
|
||||
const databricksSecretKeys = await listDatabricksSecrets({
|
||||
workspaceUrl,
|
||||
scope,
|
||||
accessToken
|
||||
});
|
||||
|
||||
for await (const secret of databricksSecretKeys) {
|
||||
if (secret.key in secretMap) {
|
||||
await deleteDatabricksSecrets({
|
||||
key: secret.key,
|
||||
workspaceUrl,
|
||||
scope,
|
||||
accessToken
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getSecrets = async (secretSync: TDatabricksSyncWithCredentials) => {
|
||||
throw new Error(`${SECRET_SYNC_NAME_MAP[secretSync.destination]} does not support importing secrets.`);
|
||||
};
|
||||
|
||||
return {
|
||||
syncSecrets,
|
||||
removeSecrets,
|
||||
getSecrets
|
||||
};
|
||||
};
|
@@ -0,0 +1,43 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSyncs } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import {
|
||||
BaseSecretSyncSchema,
|
||||
GenericCreateSecretSyncFieldsSchema,
|
||||
GenericUpdateSecretSyncFieldsSchema
|
||||
} from "@app/services/secret-sync/secret-sync-schemas";
|
||||
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
const DatabricksSyncDestinationConfigSchema = z.object({
|
||||
scope: z.string().trim().min(1, "Databricks scope required").describe(SecretSyncs.DESTINATION_CONFIG.DATABRICKS.scope)
|
||||
});
|
||||
|
||||
const DatabricksSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: false };
|
||||
|
||||
export const DatabricksSyncSchema = BaseSecretSyncSchema(SecretSync.Databricks, DatabricksSyncOptionsConfig).extend({
|
||||
destination: z.literal(SecretSync.Databricks),
|
||||
destinationConfig: DatabricksSyncDestinationConfigSchema
|
||||
});
|
||||
|
||||
export const CreateDatabricksSyncSchema = GenericCreateSecretSyncFieldsSchema(
|
||||
SecretSync.Databricks,
|
||||
DatabricksSyncOptionsConfig
|
||||
).extend({
|
||||
destinationConfig: DatabricksSyncDestinationConfigSchema
|
||||
});
|
||||
|
||||
export const UpdateDatabricksSyncSchema = GenericUpdateSecretSyncFieldsSchema(
|
||||
SecretSync.Databricks,
|
||||
DatabricksSyncOptionsConfig
|
||||
).extend({
|
||||
destinationConfig: DatabricksSyncDestinationConfigSchema.optional()
|
||||
});
|
||||
|
||||
export const DatabricksSyncListItemSchema = z.object({
|
||||
name: z.literal("Databricks"),
|
||||
connection: z.literal(AppConnection.Databricks),
|
||||
destination: z.literal(SecretSync.Databricks),
|
||||
canImportSecrets: z.literal(false)
|
||||
});
|
@@ -0,0 +1,40 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { TDatabricksConnection } from "@app/services/app-connection/databricks";
|
||||
|
||||
import {
|
||||
CreateDatabricksSyncSchema,
|
||||
DatabricksSyncListItemSchema,
|
||||
DatabricksSyncSchema
|
||||
} from "./databricks-sync-schemas";
|
||||
|
||||
export type TDatabricksSync = z.infer<typeof DatabricksSyncSchema>;
|
||||
|
||||
export type TDatabricksSyncInput = z.infer<typeof CreateDatabricksSyncSchema>;
|
||||
|
||||
export type TDatabricksSyncListItem = z.infer<typeof DatabricksSyncListItemSchema>;
|
||||
|
||||
export type TDatabricksSyncWithCredentials = TDatabricksSync & {
|
||||
connection: TDatabricksConnection;
|
||||
};
|
||||
|
||||
export type TDatabricksListSecretKeysResponse = {
|
||||
secrets?: { key: string; last_updated_timestamp: number }[];
|
||||
};
|
||||
|
||||
type TBaseDatabricksSecretRequest = {
|
||||
scope: string;
|
||||
workspaceUrl: string;
|
||||
accessToken: string;
|
||||
};
|
||||
|
||||
export type TDatabricksListSecretKeys = TBaseDatabricksSecretRequest;
|
||||
|
||||
export type TDatabricksPutSecret = {
|
||||
key: string;
|
||||
value?: string;
|
||||
} & TBaseDatabricksSecretRequest;
|
||||
|
||||
export type TDatabricksDeleteSecret = {
|
||||
key: string;
|
||||
} & TBaseDatabricksSecretRequest;
|
4
backend/src/services/secret-sync/databricks/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./databricks-sync-constants";
|
||||
export * from "./databricks-sync-fns";
|
||||
export * from "./databricks-sync-schemas";
|
||||
export * from "./databricks-sync-types";
|
@@ -1,5 +1,6 @@
|
||||
import z from "zod";
|
||||
|
||||
import { SecretSyncs } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseSecretSyncSchema,
|
||||
@@ -14,8 +15,8 @@ import { GcpSyncScope } from "./gcp-sync-enums";
|
||||
const GcpSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
|
||||
|
||||
const GcpSyncDestinationConfigSchema = z.object({
|
||||
scope: z.literal(GcpSyncScope.Global),
|
||||
projectId: z.string().min(1, "Project ID is required")
|
||||
scope: z.literal(GcpSyncScope.Global).describe(SecretSyncs.DESTINATION_CONFIG.GCP.scope),
|
||||
projectId: z.string().min(1, "Project ID is required").describe(SecretSyncs.DESTINATION_CONFIG.GCP.projectId)
|
||||
});
|
||||
|
||||
export const GcpSyncSchema = BaseSecretSyncSchema(SecretSync.GCPSecretManager, GcpSyncOptionsConfig).extend({
|
||||
|
@@ -14,21 +14,21 @@ import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types"
|
||||
const GitHubSyncDestinationConfigSchema = z
|
||||
.discriminatedUnion("scope", [
|
||||
z.object({
|
||||
scope: z.literal(GitHubSyncScope.Organization),
|
||||
org: z.string().min(1, "Organization name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.ORG),
|
||||
scope: z.literal(GitHubSyncScope.Organization).describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.scope),
|
||||
org: z.string().min(1, "Organization name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.org),
|
||||
visibility: z.nativeEnum(GitHubSyncVisibility),
|
||||
selectedRepositoryIds: z.number().array().optional()
|
||||
}),
|
||||
z.object({
|
||||
scope: z.literal(GitHubSyncScope.Repository),
|
||||
owner: z.string().min(1, "Repository owner name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.OWNER),
|
||||
repo: z.string().min(1, "Repository name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.REPO)
|
||||
scope: z.literal(GitHubSyncScope.Repository).describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.scope),
|
||||
owner: z.string().min(1, "Repository owner name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.owner),
|
||||
repo: z.string().min(1, "Repository name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.repo)
|
||||
}),
|
||||
z.object({
|
||||
scope: z.literal(GitHubSyncScope.RepositoryEnvironment),
|
||||
owner: z.string().min(1, "Repository owner name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.OWNER),
|
||||
repo: z.string().min(1, "Repository name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.REPO),
|
||||
env: z.string().min(1, "Environment name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.ENV)
|
||||
scope: z.literal(GitHubSyncScope.RepositoryEnvironment).describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.scope),
|
||||
owner: z.string().min(1, "Repository owner name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.owner),
|
||||
repo: z.string().min(1, "Repository name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.repo),
|
||||
env: z.string().min(1, "Environment name required").describe(SecretSyncs.DESTINATION_CONFIG.GITHUB.env)
|
||||
})
|
||||
])
|
||||
.superRefine((options, ctx) => {
|
||||
|
@@ -4,7 +4,8 @@ export enum SecretSync {
|
||||
GitHub = "github",
|
||||
GCPSecretManager = "gcp-secret-manager",
|
||||
AzureKeyVault = "azure-key-vault",
|
||||
AzureAppConfiguration = "azure-app-configuration"
|
||||
AzureAppConfiguration = "azure-app-configuration",
|
||||
Databricks = "databricks"
|
||||
}
|
||||
|
||||
export enum SecretSyncInitialSyncBehavior {
|
||||
|
@@ -8,6 +8,7 @@ import {
|
||||
AWS_SECRETS_MANAGER_SYNC_LIST_OPTION,
|
||||
AwsSecretsManagerSyncFns
|
||||
} from "@app/services/secret-sync/aws-secrets-manager";
|
||||
import { DATABRICKS_SYNC_LIST_OPTION, databricksSyncFactory } from "@app/services/secret-sync/databricks";
|
||||
import { GITHUB_SYNC_LIST_OPTION, GithubSyncFns } from "@app/services/secret-sync/github";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
@@ -19,11 +20,8 @@ import {
|
||||
|
||||
import { TAppConnectionDALFactory } from "../app-connection/app-connection-dal";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import {
|
||||
AZURE_APP_CONFIGURATION_SYNC_LIST_OPTION,
|
||||
azureAppConfigurationSecretSyncFactory
|
||||
} from "./azure-app-configuration";
|
||||
import { AZURE_KEY_VAULT_SYNC_LIST_OPTION, azureKeyVaultSecretSyncFactory } from "./azure-key-vault";
|
||||
import { AZURE_APP_CONFIGURATION_SYNC_LIST_OPTION, azureAppConfigurationSyncFactory } from "./azure-app-configuration";
|
||||
import { AZURE_KEY_VAULT_SYNC_LIST_OPTION, azureKeyVaultSyncFactory } from "./azure-key-vault";
|
||||
import { GCP_SYNC_LIST_OPTION } from "./gcp";
|
||||
import { GcpSyncFns } from "./gcp/gcp-sync-fns";
|
||||
|
||||
@@ -33,7 +31,8 @@ const SECRET_SYNC_LIST_OPTIONS: Record<SecretSync, TSecretSyncListItem> = {
|
||||
[SecretSync.GitHub]: GITHUB_SYNC_LIST_OPTION,
|
||||
[SecretSync.GCPSecretManager]: GCP_SYNC_LIST_OPTION,
|
||||
[SecretSync.AzureKeyVault]: AZURE_KEY_VAULT_SYNC_LIST_OPTION,
|
||||
[SecretSync.AzureAppConfiguration]: AZURE_APP_CONFIGURATION_SYNC_LIST_OPTION
|
||||
[SecretSync.AzureAppConfiguration]: AZURE_APP_CONFIGURATION_SYNC_LIST_OPTION,
|
||||
[SecretSync.Databricks]: DATABRICKS_SYNC_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretSyncOptions = () => {
|
||||
@@ -41,7 +40,7 @@ export const listSecretSyncOptions = () => {
|
||||
};
|
||||
|
||||
type TSyncSecretDeps = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
@@ -103,12 +102,17 @@ export const SecretSyncFns = {
|
||||
case SecretSync.GCPSecretManager:
|
||||
return GcpSyncFns.syncSecrets(secretSync, secretMap);
|
||||
case SecretSync.AzureKeyVault:
|
||||
return azureKeyVaultSecretSyncFactory({
|
||||
return azureKeyVaultSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).syncSecrets(secretSync, secretMap);
|
||||
case SecretSync.AzureAppConfiguration:
|
||||
return azureAppConfigurationSecretSyncFactory({
|
||||
return azureAppConfigurationSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).syncSecrets(secretSync, secretMap);
|
||||
case SecretSync.Databricks:
|
||||
return databricksSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).syncSecrets(secretSync, secretMap);
|
||||
@@ -137,17 +141,22 @@ export const SecretSyncFns = {
|
||||
secretMap = await GcpSyncFns.getSecrets(secretSync);
|
||||
break;
|
||||
case SecretSync.AzureKeyVault:
|
||||
secretMap = await azureKeyVaultSecretSyncFactory({
|
||||
secretMap = await azureKeyVaultSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).getSecrets(secretSync);
|
||||
break;
|
||||
case SecretSync.AzureAppConfiguration:
|
||||
secretMap = await azureAppConfigurationSecretSyncFactory({
|
||||
secretMap = await azureAppConfigurationSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).getSecrets(secretSync);
|
||||
break;
|
||||
case SecretSync.Databricks:
|
||||
return databricksSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).getSecrets(secretSync);
|
||||
default:
|
||||
throw new Error(
|
||||
`Unhandled sync destination for get secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||
@@ -174,12 +183,17 @@ export const SecretSyncFns = {
|
||||
case SecretSync.GCPSecretManager:
|
||||
return GcpSyncFns.removeSecrets(secretSync, secretMap);
|
||||
case SecretSync.AzureKeyVault:
|
||||
return azureKeyVaultSecretSyncFactory({
|
||||
return azureKeyVaultSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).removeSecrets(secretSync, secretMap);
|
||||
case SecretSync.AzureAppConfiguration:
|
||||
return azureAppConfigurationSecretSyncFactory({
|
||||
return azureAppConfigurationSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).removeSecrets(secretSync, secretMap);
|
||||
case SecretSync.Databricks:
|
||||
return databricksSyncFactory({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}).removeSecrets(secretSync, secretMap);
|
||||
|
@@ -7,7 +7,8 @@ export const SECRET_SYNC_NAME_MAP: Record<SecretSync, string> = {
|
||||
[SecretSync.GitHub]: "GitHub",
|
||||
[SecretSync.GCPSecretManager]: "GCP Secret Manager",
|
||||
[SecretSync.AzureKeyVault]: "Azure Key Vault",
|
||||
[SecretSync.AzureAppConfiguration]: "Azure App Configuration"
|
||||
[SecretSync.AzureAppConfiguration]: "Azure App Configuration",
|
||||
[SecretSync.Databricks]: "Databricks"
|
||||
};
|
||||
|
||||
export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
|
||||
@@ -16,5 +17,6 @@ export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
|
||||
[SecretSync.GitHub]: AppConnection.GitHub,
|
||||
[SecretSync.GCPSecretManager]: AppConnection.GCP,
|
||||
[SecretSync.AzureKeyVault]: AppConnection.AzureKeyVault,
|
||||
[SecretSync.AzureAppConfiguration]: AppConnection.AzureAppConfiguration
|
||||
[SecretSync.AzureAppConfiguration]: AppConnection.AzureAppConfiguration,
|
||||
[SecretSync.Databricks]: AppConnection.Databricks
|
||||
};
|
||||
|
@@ -64,7 +64,7 @@ export type TSecretSyncQueueFactory = ReturnType<typeof secretSyncQueueFactory>;
|
||||
type TSecretSyncQueueFactoryDep = {
|
||||
queueService: Pick<TQueueServiceFactory, "queue" | "start">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
|
||||
folderDAL: TSecretFolderDALFactory;
|
||||
secretV2BridgeDAL: Pick<
|
||||
|
@@ -13,7 +13,7 @@ const SyncOptionsSchema = (secretSync: SecretSync, options: TSyncOptionsConfig =
|
||||
initialSyncBehavior: (options.canImportSecrets
|
||||
? z.nativeEnum(SecretSyncInitialSyncBehavior)
|
||||
: z.literal(SecretSyncInitialSyncBehavior.OverwriteDestination)
|
||||
).describe(SecretSyncs.SYNC_OPTIONS(secretSync).INITIAL_SYNC_BEHAVIOR)
|
||||
).describe(SecretSyncs.SYNC_OPTIONS(secretSync).initialSyncBehavior)
|
||||
// prependPrefix: z
|
||||
// .string()
|
||||
// .trim()
|
||||
|
@@ -8,6 +8,12 @@ import {
|
||||
TAwsSecretsManagerSyncListItem,
|
||||
TAwsSecretsManagerSyncWithCredentials
|
||||
} from "@app/services/secret-sync/aws-secrets-manager";
|
||||
import {
|
||||
TDatabricksSync,
|
||||
TDatabricksSyncInput,
|
||||
TDatabricksSyncListItem,
|
||||
TDatabricksSyncWithCredentials
|
||||
} from "@app/services/secret-sync/databricks";
|
||||
import {
|
||||
TGitHubSync,
|
||||
TGitHubSyncInput,
|
||||
@@ -43,7 +49,8 @@ export type TSecretSync =
|
||||
| TGitHubSync
|
||||
| TGcpSync
|
||||
| TAzureKeyVaultSync
|
||||
| TAzureAppConfigurationSync;
|
||||
| TAzureAppConfigurationSync
|
||||
| TDatabricksSync;
|
||||
|
||||
export type TSecretSyncWithCredentials =
|
||||
| TAwsParameterStoreSyncWithCredentials
|
||||
@@ -51,7 +58,8 @@ export type TSecretSyncWithCredentials =
|
||||
| TGitHubSyncWithCredentials
|
||||
| TGcpSyncWithCredentials
|
||||
| TAzureKeyVaultSyncWithCredentials
|
||||
| TAzureAppConfigurationSyncWithCredentials;
|
||||
| TAzureAppConfigurationSyncWithCredentials
|
||||
| TDatabricksSyncWithCredentials;
|
||||
|
||||
export type TSecretSyncInput =
|
||||
| TAwsParameterStoreSyncInput
|
||||
@@ -59,7 +67,8 @@ export type TSecretSyncInput =
|
||||
| TGitHubSyncInput
|
||||
| TGcpSyncInput
|
||||
| TAzureKeyVaultSyncInput
|
||||
| TAzureAppConfigurationSyncInput;
|
||||
| TAzureAppConfigurationSyncInput
|
||||
| TDatabricksSyncInput;
|
||||
|
||||
export type TSecretSyncListItem =
|
||||
| TAwsParameterStoreSyncListItem
|
||||
@@ -67,7 +76,8 @@ export type TSecretSyncListItem =
|
||||
| TGitHubSyncListItem
|
||||
| TGcpSyncListItem
|
||||
| TAzureKeyVaultSyncListItem
|
||||
| TAzureAppConfigurationSyncListItem;
|
||||
| TAzureAppConfigurationSyncListItem
|
||||
| TDatabricksSyncListItem;
|
||||
|
||||
export type TSyncOptionsConfig = {
|
||||
canImportSecrets: boolean;
|
||||
|
@@ -1,7 +1,15 @@
|
||||
import { ForbiddenError, PureAbility, subject } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ActionProjectType, ProjectMembershipRole, SecretsV2Schema, SecretType, TableName } from "@app/db/schemas";
|
||||
import {
|
||||
ActionProjectType,
|
||||
ProjectMembershipRole,
|
||||
SecretsV2Schema,
|
||||
SecretType,
|
||||
TableName,
|
||||
TSecretsV2
|
||||
} from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@@ -36,6 +44,7 @@ import {
|
||||
} from "./secret-v2-bridge-fns";
|
||||
import {
|
||||
SecretOperations,
|
||||
SecretUpdateMode,
|
||||
TBackFillSecretReferencesDTO,
|
||||
TCreateManySecretDTO,
|
||||
TCreateSecretDTO,
|
||||
@@ -103,12 +112,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const $validateSecretReferences = async (
|
||||
projectId: string,
|
||||
permission: PureAbility,
|
||||
references: ReturnType<typeof getAllSecretReferences>["nestedReferences"]
|
||||
references: ReturnType<typeof getAllSecretReferences>["nestedReferences"],
|
||||
tx?: Knex
|
||||
) => {
|
||||
if (!references.length) return;
|
||||
|
||||
const uniqueReferenceEnvironmentSlugs = Array.from(new Set(references.map((el) => el.environment)));
|
||||
const referencesEnvironments = await projectEnvDAL.findBySlugs(projectId, uniqueReferenceEnvironmentSlugs);
|
||||
const referencesEnvironments = await projectEnvDAL.findBySlugs(projectId, uniqueReferenceEnvironmentSlugs, tx);
|
||||
if (referencesEnvironments.length !== uniqueReferenceEnvironmentSlugs.length)
|
||||
throw new BadRequestError({
|
||||
message: `Referenced environment not found. Missing ${diff(
|
||||
@@ -122,36 +132,41 @@ export const secretV2BridgeServiceFactory = ({
|
||||
references.map((el) => ({
|
||||
secretPath: el.secretPath,
|
||||
envId: referencesEnvironmentGroupBySlug[el.environment][0].id
|
||||
}))
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const referencesFolderGroupByPath = groupBy(referredFolders.filter(Boolean), (i) => `${i?.envId}-${i?.path}`);
|
||||
const referredSecrets = await secretDAL.find({
|
||||
$complex: {
|
||||
operator: "or",
|
||||
value: references.map((el) => {
|
||||
const folderId =
|
||||
referencesFolderGroupByPath[`${referencesEnvironmentGroupBySlug[el.environment][0].id}-${el.secretPath}`][0]
|
||||
?.id;
|
||||
if (!folderId) throw new BadRequestError({ message: `Referenced path ${el.secretPath} doesn't exist` });
|
||||
const referredSecrets = await secretDAL.find(
|
||||
{
|
||||
$complex: {
|
||||
operator: "or",
|
||||
value: references.map((el) => {
|
||||
const folderId =
|
||||
referencesFolderGroupByPath[
|
||||
`${referencesEnvironmentGroupBySlug[el.environment][0].id}-${el.secretPath}`
|
||||
][0]?.id;
|
||||
if (!folderId) throw new BadRequestError({ message: `Referenced path ${el.secretPath} doesn't exist` });
|
||||
|
||||
return {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "eq",
|
||||
field: "folderId",
|
||||
value: folderId
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
}
|
||||
]
|
||||
};
|
||||
})
|
||||
}
|
||||
});
|
||||
return {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "eq",
|
||||
field: "folderId",
|
||||
value: folderId
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
}
|
||||
]
|
||||
};
|
||||
})
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (
|
||||
referredSecrets.length !==
|
||||
@@ -1245,8 +1260,9 @@ export const secretV2BridgeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
environment,
|
||||
projectId,
|
||||
secretPath,
|
||||
secrets: inputSecrets
|
||||
secretPath: defaultSecretPath = "/",
|
||||
secrets: inputSecrets,
|
||||
mode: updateMode
|
||||
}: TUpdateManySecretDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
@@ -1257,196 +1273,280 @@ export const secretV2BridgeServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder)
|
||||
const secretsToUpdateGroupByPath = groupBy(inputSecrets, (el) => el.secretPath || defaultSecretPath);
|
||||
const projectEnvironment = await projectEnvDAL.findOne({ projectId, slug: environment });
|
||||
if (!projectEnvironment) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`,
|
||||
message: `Environment with slug '${environment}' in project with ID '${projectId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const folders = await folderDAL.findByManySecretPath(
|
||||
Object.keys(secretsToUpdateGroupByPath).map((el) => ({ envId: projectEnvironment.id, secretPath: el }))
|
||||
);
|
||||
if (folders.length !== Object.keys(secretsToUpdateGroupByPath).length)
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${null}' in environment with slug '${environment}' not found`,
|
||||
name: "UpdateManySecret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const secretsToUpdate = await secretDAL.find({
|
||||
folderId,
|
||||
$complex: {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "or",
|
||||
value: inputSecrets.map((el) => ({
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
field: "type",
|
||||
value: SecretType.Shared
|
||||
}
|
||||
]
|
||||
}))
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
if (secretsToUpdate.length !== inputSecrets.length) {
|
||||
const secretsToUpdateNames = secretsToUpdate.map((secret) => secret.key);
|
||||
const invalidSecrets = inputSecrets.filter((secret) => !secretsToUpdateNames.includes(secret.secretKey));
|
||||
throw new NotFoundError({
|
||||
message: `Secret does not exist: ${invalidSecrets.map((el) => el.secretKey).join(",")}`
|
||||
});
|
||||
}
|
||||
const secretsToUpdateInDBGroupedByKey = groupBy(secretsToUpdate, (i) => i.key);
|
||||
|
||||
secretsToUpdate.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.key,
|
||||
secretTags: el.tags.map((i) => i.slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
// get all tags
|
||||
const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds);
|
||||
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : [];
|
||||
if (tags.length !== sanitizedTagIds.length) throw new NotFoundError({ message: "Tag not found" });
|
||||
const tagsGroupByID = groupBy(tags, (i) => i.id);
|
||||
|
||||
// check again to avoid non authorized tags are removed
|
||||
inputSecrets.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.secretKey,
|
||||
secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
// now find any secret that needs to update its name
|
||||
// same process as above
|
||||
const secretsWithNewName = inputSecrets.filter(({ newSecretName }) => Boolean(newSecretName));
|
||||
if (secretsWithNewName.length) {
|
||||
const secrets = await secretDAL.find({
|
||||
folderId,
|
||||
$complex: {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "or",
|
||||
value: secretsWithNewName.map((el) => ({
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
field: "type",
|
||||
value: SecretType.Shared
|
||||
}
|
||||
]
|
||||
}))
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
if (secrets.length)
|
||||
throw new BadRequestError({
|
||||
message: `Secret with new name already exists: ${secretsWithNewName.map((el) => el.newSecretName).join(",")}`
|
||||
});
|
||||
|
||||
secretsWithNewName.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.newSecretName as string,
|
||||
secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
// now get all secret references made and validate the permission
|
||||
const secretReferencesGroupByInputSecretKey: Record<string, ReturnType<typeof getAllSecretReferences>> = {};
|
||||
const secretReferences: TSecretReference[] = [];
|
||||
inputSecrets.forEach((el) => {
|
||||
if (el.secretValue) {
|
||||
const references = getAllSecretReferences(el.secretValue);
|
||||
secretReferencesGroupByInputSecretKey[el.secretKey] = references;
|
||||
secretReferences.push(...references.nestedReferences);
|
||||
references.localReferences.forEach((localRefKey) => {
|
||||
secretReferences.push({ secretKey: localRefKey, secretPath, environment });
|
||||
});
|
||||
}
|
||||
});
|
||||
await $validateSecretReferences(projectId, permission, secretReferences);
|
||||
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.SecretManager, projectId });
|
||||
|
||||
const secrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkUpdate({
|
||||
folderId,
|
||||
orgId: actorOrgId,
|
||||
tx,
|
||||
inputSecrets: inputSecrets.map((el) => {
|
||||
const originalSecret = secretsToUpdateInDBGroupedByKey[el.secretKey][0];
|
||||
const encryptedValue =
|
||||
typeof el.secretValue !== "undefined"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences
|
||||
}
|
||||
: {};
|
||||
const updatedSecrets: Array<TSecretsV2 & { secretPath: string }> = [];
|
||||
await secretDAL.transaction(async (tx) => {
|
||||
for await (const folder of folders) {
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
|
||||
return {
|
||||
filter: { id: originalSecret.id, type: SecretType.Shared },
|
||||
data: {
|
||||
reminderRepeatDays: el.secretReminderRepeatDays,
|
||||
encryptedComment: setKnexStringValue(
|
||||
el.secretComment,
|
||||
(value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob
|
||||
),
|
||||
reminderNote: el.secretReminderNote,
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
key: el.newSecretName || el.secretKey,
|
||||
tags: el.tagIds,
|
||||
secretMetadata: el.secretMetadata,
|
||||
...encryptedValue
|
||||
const folderId = folder.id;
|
||||
const secretPath = folder.path;
|
||||
let secretsToUpdate = secretsToUpdateGroupByPath[secretPath];
|
||||
const secretsToUpdateInDB = await secretDAL.find(
|
||||
{
|
||||
folderId,
|
||||
$complex: {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "or",
|
||||
value: secretsToUpdate.map((el) => ({
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
field: "type",
|
||||
value: SecretType.Shared
|
||||
}
|
||||
]
|
||||
}))
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
}),
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
resourceMetadataDAL
|
||||
})
|
||||
);
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath,
|
||||
projectId,
|
||||
orgId: actorOrgId,
|
||||
environmentSlug: folder.environment.slug
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (secretsToUpdateInDB.length !== secretsToUpdate.length && updateMode === SecretUpdateMode.FailOnNotFound)
|
||||
throw new NotFoundError({
|
||||
message: `Secret does not exist: ${diff(
|
||||
secretsToUpdate.map((el) => el.secretKey),
|
||||
secretsToUpdateInDB.map((el) => el.key)
|
||||
).join(", ")} in path ${folder.path}`
|
||||
});
|
||||
|
||||
const secretsToUpdateInDBGroupedByKey = groupBy(secretsToUpdateInDB, (i) => i.key);
|
||||
const secretsToCreate = secretsToUpdate.filter((el) => !secretsToUpdateInDBGroupedByKey?.[el.secretKey]);
|
||||
secretsToUpdate = secretsToUpdate.filter((el) => secretsToUpdateInDBGroupedByKey?.[el.secretKey]);
|
||||
|
||||
secretsToUpdateInDB.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.key,
|
||||
secretTags: el.tags.map((i) => i.slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
// get all tags
|
||||
const sanitizedTagIds = secretsToUpdate.flatMap(({ tagIds = [] }) => tagIds);
|
||||
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds, tx) : [];
|
||||
if (tags.length !== sanitizedTagIds.length) throw new NotFoundError({ message: "Tag not found" });
|
||||
const tagsGroupByID = groupBy(tags, (i) => i.id);
|
||||
|
||||
// check create permission allowed in upsert mode
|
||||
if (updateMode === SecretUpdateMode.Upsert) {
|
||||
secretsToCreate.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.secretKey,
|
||||
secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// check again to avoid non authorized tags are removed
|
||||
secretsToUpdate.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.secretKey,
|
||||
secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
// now find any secret that needs to update its name
|
||||
// same process as above
|
||||
const secretsWithNewName = secretsToUpdate.filter(({ newSecretName }) => Boolean(newSecretName));
|
||||
if (secretsWithNewName.length) {
|
||||
const secrets = await secretDAL.find(
|
||||
{
|
||||
folderId,
|
||||
$complex: {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "or",
|
||||
value: secretsWithNewName.map((el) => ({
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
field: "type",
|
||||
value: SecretType.Shared
|
||||
}
|
||||
]
|
||||
}))
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (secrets.length)
|
||||
throw new BadRequestError({
|
||||
message: `Secret with new name already exists: ${secretsWithNewName
|
||||
.map((el) => el.newSecretName)
|
||||
.join(", ")}`
|
||||
});
|
||||
|
||||
secretsWithNewName.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.newSecretName as string,
|
||||
secretTags: (el.tagIds || []).map((i) => tagsGroupByID[i][0].slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
// now get all secret references made and validate the permission
|
||||
const secretReferencesGroupByInputSecretKey: Record<string, ReturnType<typeof getAllSecretReferences>> = {};
|
||||
const secretReferences: TSecretReference[] = [];
|
||||
secretsToUpdate.concat(SecretUpdateMode.Upsert === updateMode ? secretsToCreate : []).forEach((el) => {
|
||||
if (el.secretValue) {
|
||||
const references = getAllSecretReferences(el.secretValue);
|
||||
secretReferencesGroupByInputSecretKey[el.secretKey] = references;
|
||||
secretReferences.push(...references.nestedReferences);
|
||||
references.localReferences.forEach((localRefKey) => {
|
||||
secretReferences.push({ secretKey: localRefKey, secretPath, environment });
|
||||
});
|
||||
}
|
||||
});
|
||||
await $validateSecretReferences(projectId, permission, secretReferences, tx);
|
||||
|
||||
const bulkUpdatedSecrets = await fnSecretBulkUpdate({
|
||||
folderId,
|
||||
orgId: actorOrgId,
|
||||
tx,
|
||||
inputSecrets: secretsToUpdate.map((el) => {
|
||||
const originalSecret = secretsToUpdateInDBGroupedByKey[el.secretKey][0];
|
||||
const encryptedValue =
|
||||
typeof el.secretValue !== "undefined"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
return {
|
||||
filter: { id: originalSecret.id, type: SecretType.Shared },
|
||||
data: {
|
||||
reminderRepeatDays: el.secretReminderRepeatDays,
|
||||
encryptedComment: setKnexStringValue(
|
||||
el.secretComment,
|
||||
(value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob
|
||||
),
|
||||
reminderNote: el.secretReminderNote,
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
key: el.newSecretName || el.secretKey,
|
||||
tags: el.tagIds,
|
||||
secretMetadata: el.secretMetadata,
|
||||
...encryptedValue
|
||||
}
|
||||
};
|
||||
}),
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
updatedSecrets.push(...bulkUpdatedSecrets.map((el) => ({ ...el, secretPath: folder.path })));
|
||||
if (updateMode === SecretUpdateMode.Upsert) {
|
||||
const bulkInsertedSecrets = await fnSecretBulkInsert({
|
||||
inputSecrets: secretsToCreate.map((el) => {
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences;
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
encryptedComment: setKnexStringValue(
|
||||
el.secretComment,
|
||||
(value) => secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob
|
||||
),
|
||||
encryptedValue: el.secretValue
|
||||
? secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob
|
||||
: undefined,
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
key: el.secretKey,
|
||||
tagIds: el.tagIds,
|
||||
references,
|
||||
secretMetadata: el.secretMetadata,
|
||||
type: SecretType.Shared
|
||||
};
|
||||
}),
|
||||
folderId,
|
||||
orgId: actorOrgId,
|
||||
secretDAL,
|
||||
resourceMetadataDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
tx
|
||||
});
|
||||
updatedSecrets.push(...bulkInsertedSecrets.map((el) => ({ ...el, secretPath: folder.path })));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return secrets.map((el) =>
|
||||
reshapeBridgeSecret(projectId, environment, secretPath, {
|
||||
await Promise.allSettled(folders.map((el) => (el?.id ? snapshotService.performSnapshot(el.id) : undefined)));
|
||||
await Promise.allSettled(
|
||||
folders.map((el) =>
|
||||
el
|
||||
? secretQueueService.syncSecrets({
|
||||
actor,
|
||||
actorId,
|
||||
secretPath: el.path,
|
||||
projectId,
|
||||
orgId: actorOrgId,
|
||||
environmentSlug: environment
|
||||
})
|
||||
: undefined
|
||||
)
|
||||
);
|
||||
|
||||
return updatedSecrets.map((el) =>
|
||||
reshapeBridgeSecret(projectId, environment, el.secretPath, {
|
||||
...el,
|
||||
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
|
||||
comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : ""
|
||||
|
@@ -23,6 +23,12 @@ export type TSecretReferenceDTO = {
|
||||
secretKey: string;
|
||||
};
|
||||
|
||||
export enum SecretUpdateMode {
|
||||
Ignore = "ignore",
|
||||
Upsert = "upsert",
|
||||
FailOnNotFound = "failOnNotFound"
|
||||
}
|
||||
|
||||
export type TGetSecretsDTO = {
|
||||
expandSecretReferences?: boolean;
|
||||
path: string;
|
||||
@@ -113,6 +119,7 @@ export type TUpdateManySecretDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
mode: SecretUpdateMode;
|
||||
secrets: {
|
||||
secretKey: string;
|
||||
newSecretName?: string;
|
||||
@@ -123,6 +130,7 @@ export type TUpdateManySecretDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
secretReminderRepeatDays?: number | null;
|
||||
secretReminderNote?: string | null;
|
||||
secretMetadata?: ResourceMetadataDTO;
|
||||
secretPath?: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
|
@@ -30,7 +30,10 @@ import { groupBy, pick } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { TGetSecretsRawByFolderMappingsDTO } from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
import {
|
||||
SecretUpdateMode,
|
||||
TGetSecretsRawByFolderMappingsDTO
|
||||
} from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
|
||||
import { ActorType } from "../auth/auth-type";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
@@ -2012,6 +2015,7 @@ export const secretServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
mode = SecretUpdateMode.FailOnNotFound,
|
||||
secrets: inputSecrets = []
|
||||
}: TUpdateManySecretRawDTO) => {
|
||||
if (!projectSlug && !optionalProjectId)
|
||||
@@ -2076,7 +2080,8 @@ export const secretServiceFactory = ({
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
secrets: inputSecrets
|
||||
secrets: inputSecrets,
|
||||
mode
|
||||
});
|
||||
return { type: SecretProtectionType.Direct as const, secrets };
|
||||
}
|
||||
|
@@ -17,6 +17,7 @@ import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
|
||||
import { ResourceMetadataDTO } from "../resource-metadata/resource-metadata-schema";
|
||||
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { SecretUpdateMode } from "../secret-v2-bridge/secret-v2-bridge-types";
|
||||
import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal";
|
||||
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
|
||||
|
||||
@@ -274,6 +275,7 @@ export type TUpdateManySecretRawDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
projectId?: string;
|
||||
projectSlug?: string;
|
||||
environment: string;
|
||||
mode: SecretUpdateMode;
|
||||
secrets: {
|
||||
secretKey: string;
|
||||
newSecretName?: string;
|
||||
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Available"
|
||||
openapi: "GET /api/v1/app-connections/databricks/available"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Create"
|
||||
openapi: "POST /api/v1/app-connections/databricks"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Delete"
|
||||
openapi: "DELETE /api/v1/app-connections/databricks/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by ID"
|
||||
openapi: "GET /api/v1/app-connections/databricks/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by Name"
|
||||
openapi: "GET /api/v1/app-connections/databricks/connection-name/{connectionName}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "List"
|
||||
openapi: "GET /api/v1/app-connections/databricks"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v1/app-connections/databricks/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Create"
|
||||
openapi: "POST /api/v1/secret-syncs/databricks"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Delete"
|
||||
openapi: "DELETE /api/v1/secret-syncs/databricks/{syncId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by ID"
|
||||
openapi: "GET /api/v1/secret-syncs/databricks/{syncId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by Name"
|
||||
openapi: "GET /api/v1/secret-syncs/databricks/sync-name/{syncName}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "List"
|
||||
openapi: "GET /api/v1/secret-syncs/databricks"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Remove Secrets"
|
||||
openapi: "POST /api/v1/secret-syncs/databricks/{syncId}/remove-secrets"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Sync Secrets"
|
||||
openapi: "POST /api/v1/secret-syncs/databricks/{syncId}/sync-secrets"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v1/secret-syncs/databricks/{syncId}"
|
||||
---
|
@@ -4,9 +4,6 @@ title: "Backend development guide"
|
||||
|
||||
Suppose you're interested in implementing a new feature in Infisical's backend, let's call it "feature-x." Here are the general steps you should follow.
|
||||
|
||||
## Database schema migration
|
||||
In order to run [schema migrations](https://en.wikipedia.org/wiki/Schema_migration#:~:text=A%20schema%20migration%20is%20performed,some%20newer%20or%20older%20version) you need to expose your database connection string. Create a `.env.migration` file to set the database connection URI for migration scripts, or alternatively, export the `DB_CONNECTION_URI` environment variable.
|
||||
|
||||
## Creating new database model
|
||||
If your feature involves a change in the database, you need to first address this by generating the necessary database schemas.
|
||||
|
||||
|
@@ -0,0 +1,87 @@
|
||||
---
|
||||
title: "Terraform Cloud"
|
||||
description: "How to authenticate with Infisical from Terraform Cloud using OIDC."
|
||||
---
|
||||
|
||||
This guide will walk you through setting up Terraform Cloud to inject a [workload identity token](https://developer.hashicorp.com/terraform/cloud-docs/workspaces/dynamic-provider-credentials/workload-identity-tokens) and use it for OIDC-based authentication with the Infisical Terraform provider. You'll start by creating a machine identity in Infisical, then configure Terraform Cloud to pass the injected token into your Terraform runs.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a Machine Identity in Infisical">
|
||||
Follow the instructions [in this documentation](/documentation/platform/identities/oidc-auth/general) to create a machine identity with OIDC auth. Infisical OIDC configuration values for Terraform Cloud:
|
||||
1. Set the OIDC Discovery URL to https://app.terraform.io.
|
||||
2. Set the Issuer to https://app.terraform.io.
|
||||
3. Configure the Audience to match the value you will use for **TFC_WORKLOAD_IDENTITY_AUDIENCE** in Terraform Cloud for the next step.
|
||||
|
||||
|
||||
To view all possible claims available from Terraform cloud, visit [HashiCorp’s documentation](https://developer.hashicorp.com/terraform/cloud-docs/workspaces/dynamic-provider-credentials/workload-identity-tokens#token-structure).
|
||||
|
||||
</Step>
|
||||
<Step title="Enable Workload Identity Token Injection in Terraform Cloud">
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Generate single token">
|
||||
1. **Navigate to your workspace** in Terraform Cloud.
|
||||
2. **Add a workspace variable** named `TFC_WORKLOAD_IDENTITY_AUDIENCE`:
|
||||
- **Key**: `TFC_WORKLOAD_IDENTITY_AUDIENCE`
|
||||
- **Value**: For example, `my-infisical-audience`
|
||||
- **Category**: Environment
|
||||
|
||||
> **Important**:
|
||||
> - The presence of `TFC_WORKLOAD_IDENTITY_AUDIENCE` is required for Terraform Cloud to inject a token.
|
||||
> - If you are self-hosting HCP Terraform agents, ensure they are **v1.7.0 or above**.
|
||||
|
||||
Once set, Terraform Cloud will inject a workload identity token into the run environment as `TFC_WORKLOAD_IDENTITY_TOKEN`.
|
||||
</Tab>
|
||||
<Tab title="(Optional) Generate Multiple Tokens">
|
||||
If you need multiple tokens (each with a different audience), create additional variables:
|
||||
|
||||
```
|
||||
TFC_WORKLOAD_IDENTITY_AUDIENCE_[YOUR_TAG_HERE]
|
||||
```
|
||||
|
||||
For example:
|
||||
- `TFC_WORKLOAD_IDENTITY_AUDIENCE_INFISICAL`
|
||||
- `TFC_WORKLOAD_IDENTITY_AUDIENCE_OTHER_SERVICE`
|
||||
|
||||
Terraform Cloud will then inject:
|
||||
- `TFC_WORKLOAD_IDENTITY_TOKEN_INFISICAL`
|
||||
- `TFC_WORKLOAD_IDENTITY_TOKEN_OTHER_SERVICE`
|
||||
|
||||
> **Note**:
|
||||
> - The `[YOUR_TAG_HERE]` can only contain letters, numbers, and underscores.
|
||||
> - You **cannot** use the reserved keyword `TYPE`.
|
||||
> - Generating multiple tokens requires **v1.12.0 or later** if you are self-hosting agents.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
<Warning>
|
||||
If you are running on self-hosted HCP Terraform agents, you must use v1.7.0 or later to enable token injection. If you need to generate multiple tokens, you must use v1.12.0 or later.
|
||||
</Warning>
|
||||
</Step>
|
||||
<Step title="Configure the Infisical Provider">
|
||||
In your Terraform configuration, reference the injected token by name. For example:
|
||||
|
||||
```hcl
|
||||
provider "infisical" {
|
||||
host = "https://app.infisical.com"
|
||||
|
||||
auth = {
|
||||
oidc = {
|
||||
identity_id = "<identity-id>"
|
||||
# This must match the environment variable Terraform injects:
|
||||
token_environment_variable_name = "TFC_WORKLOAD_IDENTITY_TOKEN"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- **`host`**: Defaults to `https://app.infisical.com`. Override if using a self-hosted Infisical instance.
|
||||
- **`identity_id`**: The OIDC identity ID from Infisical.
|
||||
- **`token_environment_variable_name`**: Must match the injected variable name from Terraform Cloud. If using single token, use `TFC_WORKLOAD_IDENTITY_TOKEN`. If using multiple tokens, choose the one you want to use (e.g., `TFC_WORKLOAD_IDENTITY_TOKEN_INFISICAL`).
|
||||
</Step>
|
||||
<Step title="Validate Your Setup">
|
||||
1. Run a plan and apply in Terraform Cloud.
|
||||
2. Verify the Infisical provider authenticates successfully without issues. If you run into authentication errors, double-check the Infisical identity has the correct roles/permissions in Infisical.
|
||||
|
||||
</Step>
|
||||
</Steps>
|
36
docs/documentation/setup/networking.mdx
Normal file
@@ -0,0 +1,36 @@
|
||||
---
|
||||
title: "Networking"
|
||||
sidebarTitle: "Networking"
|
||||
description: "Network configuration details for Infisical Cloud"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
When integrating your infrastructure with Infisical Cloud, you may need to configure network access controls. This page provides the IP addresses that Infisical uses to communicate with your services.
|
||||
|
||||
## Egress IP Addresses
|
||||
|
||||
Infisical Cloud operates from two regions: US and EU. If your infrastructure has strict network policies, you may need to allow traffic from Infisical by adding the following IP addresses to your ingress rules. These are the egress IPs Infisical uses when making outbound requests to your services.
|
||||
|
||||
### US Region
|
||||
|
||||
To allow connections from Infisical US, add these IP addresses to your ingress rules:
|
||||
|
||||
- `3.213.63.16`
|
||||
- `54.164.68.7`
|
||||
|
||||
### EU Region
|
||||
|
||||
To allow connections from Infisical EU, add these IP addresses to your ingress rules:
|
||||
|
||||
- `3.77.89.19`
|
||||
- `3.125.209.189`
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
You may need to allow Infisical’s egress IPs if your services require inbound connections for:
|
||||
|
||||
- Secret rotation - When Infisical needs to send requests to your systems to automatically rotate credentials
|
||||
- Dynamic secrets - When Infisical generates and manages temporary credentials for your cloud services
|
||||
- Secret integrations - When syncing secrets with third-party services like Azure Key Vault
|
||||
- Native authentication with machine identities - When using methods like Kubernetes authentication
|
BIN
docs/images/app-connections/databricks/add-service-principal.png
Normal file
After Width: | Height: | Size: 392 KiB |
After Width: | Height: | Size: 774 KiB |
After Width: | Height: | Size: 446 KiB |
After Width: | Height: | Size: 1.2 MiB |
After Width: | Height: | Size: 382 KiB |
After Width: | Height: | Size: 743 KiB |
BIN
docs/images/app-connections/databricks/service-principal-ids.png
Normal file
After Width: | Height: | Size: 512 KiB |
After Width: | Height: | Size: 410 KiB |
BIN
docs/images/app-connections/databricks/workspace-settings.png
Normal file
After Width: | Height: | Size: 363 KiB |
BIN
docs/images/secret-syncs/databricks/databricks-created.png
Normal file
After Width: | Height: | Size: 1.3 MiB |
BIN
docs/images/secret-syncs/databricks/databricks-destination.png
Normal file
After Width: | Height: | Size: 808 KiB |
BIN
docs/images/secret-syncs/databricks/databricks-details.png
Normal file
After Width: | Height: | Size: 792 KiB |
BIN
docs/images/secret-syncs/databricks/databricks-options.png
Normal file
After Width: | Height: | Size: 853 KiB |
BIN
docs/images/secret-syncs/databricks/databricks-review.png
Normal file
After Width: | Height: | Size: 827 KiB |
BIN
docs/images/secret-syncs/databricks/databricks-source.png
Normal file
After Width: | Height: | Size: 778 KiB |
BIN
docs/images/secret-syncs/databricks/select-databricks-option.png
Normal file
After Width: | Height: | Size: 804 KiB |
@@ -62,7 +62,7 @@ Infisical currently only supports one method for connecting to Azure, which is O
|
||||
## Setup Azure Connection in Infisical
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to the App Connections">
|
||||
<Step title="Navigate to App Connections">
|
||||
Navigate to the **App Connections** tab on the **Organization Settings** page. 
|
||||
</Step>
|
||||
|
@@ -61,7 +61,7 @@ Infisical currently only supports one method for connecting to Azure, which is O
|
||||
## Setup Azure Connection in Infisical
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to the App Connections">
|
||||
<Step title="Navigate to App Connections">
|
||||
Navigate to the **App Connections** tab on the **Organization Settings** page. 
|
||||
</Step>
|
||||
|
64
docs/integrations/app-connections/databricks.mdx
Normal file
@@ -0,0 +1,64 @@
|
||||
---
|
||||
title: "Databricks Connection"
|
||||
description: "Learn how to configure a Databricks Connection for Infisical."
|
||||
---
|
||||
|
||||
Infisical supports the use of [service principals](https://docs.databricks.com/en/admin/users-groups/service-principals.html) to connect with your Databricks workspaces.
|
||||
|
||||
## Configure a Service Principal for Infisical
|
||||
|
||||
<Steps>
|
||||
<Step title="Databricks Workspace Settings">
|
||||
Navigate to your Databricks Workspace **Settings** via the dropdown in the top right.
|
||||

|
||||
</Step>
|
||||
<Step title="Manage Service Principals">
|
||||
Under the **Identity & Access** tab, click the **Manage** button in the **Service Principals** section.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Service Principal Management">
|
||||
Click the **Add Service Principal** button.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Add Service Principal">
|
||||
Select the **Add New** option and create a service principal for Infisical.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate Service Principal Secret">
|
||||
Click on your new service principal, select the **Secrets** tab and click the **Generate Secret** button.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Service Principal Secret">
|
||||
Copy your service principal **Secret** and **Client ID** for use in the following steps.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Setup Databricks Connection in Infisical
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to App Connections">
|
||||
Navigate to the **App Connections** tab on the **Organization Settings**
|
||||
page. 
|
||||
</Step>
|
||||
<Step title="Add Connection">
|
||||
Select the **Databricks Connection** option from the connection options modal.
|
||||

|
||||
</Step>
|
||||
<Step title="Authorize Connection">
|
||||
Select the **Service Principal** method, add your **workspace URL** and **service principal credentials**, then click **Connect to
|
||||
Databricks**. 
|
||||
</Step>
|
||||
<Step title="Connection Created">
|
||||
Your **Databricks Connection** is now available for use. 
|
||||
</Step>
|
||||
</Steps>
|
@@ -81,7 +81,7 @@ Infisical supports [service account impersonation](https://cloud.google.com/iam/
|
||||
## Setup GCP Connection in Infisical
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to the App Connections">
|
||||
<Step title="Navigate to App Connections">
|
||||
Navigate to the **App Connections** tab on the **Organization Settings**
|
||||
page. 
|
||||
|
@@ -69,7 +69,7 @@ Infisical supports two methods for connecting to GitHub.
|
||||
## Setup GitHub Connection in Infisical
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to the App Connections">
|
||||
<Step title="Navigate to App Connections">
|
||||
Navigate to the **App Connections** tab on the **Organization Settings** page.
|
||||

|
||||
</Step>
|
||||
@@ -135,7 +135,7 @@ Infisical supports two methods for connecting to GitHub.
|
||||
## Setup GitHub Connection in Infisical
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to the App Connections">
|
||||
<Step title="Navigate to App Connections">
|
||||
Navigate to the **App Connections** tab on the **Organization Settings** page.
|
||||

|
||||
</Step>
|
||||
|
@@ -1,102 +1,237 @@
|
||||
---
|
||||
title: "Terraform Provider"
|
||||
description: "Learn how to fetch Secrets From Infisical With Terraform."
|
||||
url: "https://registry.terraform.io/providers/Infisical/infisical/latest/docs"
|
||||
title: "Terraform"
|
||||
description: "Learn how to fetch secrets from Infisical with Terraform using both traditional data sources and ephemeral resources"
|
||||
---
|
||||
{/*
|
||||
This guide provides step-by-step guidance on how to fetch secrets from Infisical using Terraform.
|
||||
|
||||
This guide demonstrates how to use Infisical to manage secrets in your Terraform infrastructure code, supporting both traditional data sources and ephemeral resources for enhanced security. It uses:
|
||||
|
||||
- Infisical (you can use [Infisical Cloud](https://app.infisical.com) or a [self-hosted instance of Infisical](https://infisical.com/docs/self-hosting/overview)) to store your secrets
|
||||
- The [Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest) to fetch secrets for your infrastructure
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Basic understanding of Terraform
|
||||
- Install [Terraform](https://www.terraform.io/downloads.html)
|
||||
Before you begin, make sure you have:
|
||||
|
||||
## Steps
|
||||
- [Terraform](https://www.terraform.io/downloads.html) installed (v1.10.0+ for ephemeral resources)
|
||||
- An Infisical account with access to a project
|
||||
- Basic understanding of Terraform and infrastructure as code
|
||||
|
||||
### 1. Define Required Providers
|
||||
## Project Setup
|
||||
|
||||
Specify `infisical` in the `required_providers` block within the `terraform` block of your configuration file. If you would like to use a specific version of the provider, uncomment and replace `<latest version>` with the version of the Infisical provider that you want to use.
|
||||
### Configure Provider
|
||||
|
||||
```hcl main.tf
|
||||
First, specify the Infisical provider in your Terraform configuration:
|
||||
|
||||
```hcl
|
||||
terraform {
|
||||
required_providers {
|
||||
infisical = {
|
||||
# version = <latest version>
|
||||
source = "infisical/infisical"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Configure the Infisical Provider
|
||||
### Authentication
|
||||
|
||||
Set up the Infisical provider by specifying the `host` and `service_token`. Replace `<>` in `service_token` with your actual token. The `host` is only required if you are using a self-hosted instance of Infisical.
|
||||
Configure the provider using one of these authentication methods:
|
||||
|
||||
```hcl main.tf
|
||||
#### Machine Identity (Recommended)
|
||||
|
||||
Using a Machine Identity, you can authenticate your Terraform provider using either [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general) or [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth) methods.
|
||||
|
||||
```hcl
|
||||
provider "infisical" {
|
||||
host = "https://app.infisical.com" # Only required if using a self-hosted instance of Infisical, default is https://app.infisical.com
|
||||
client_id = "<>"
|
||||
client_secret = "<>"
|
||||
service_token = "<>" # DEPRECATED, USE MACHINE IDENTITY AUTH INSTEAD
|
||||
host = "https://app.infisical.com" # Optional for cloud, required for self-hosted
|
||||
auth {
|
||||
universal { # or use oidc authentication method by providing an identity_id
|
||||
client_id = var.infisical_client_id
|
||||
client_secret = var.infisical_client_secret
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
Learn more about [machine identities](/documentation/platform/identities/machine-identities).
|
||||
|
||||
#### Service Token (Legacy)
|
||||
|
||||
<Warning>
|
||||
Machine Identity authentication is strongly recommended as the secure and modern method. Service tokens are considered legacy and will be deprecated in a future release.
|
||||
</Warning>
|
||||
|
||||
```hcl
|
||||
provider "infisical" {
|
||||
host = "https://app.infisical.com"
|
||||
service_token = var.infisical_service_token
|
||||
}
|
||||
```
|
||||
|
||||
## Using Secrets in Terraform
|
||||
|
||||
Infisical provides two methods to fetch and use secrets in your Terraform configurations:
|
||||
|
||||
### Method 1: Ephemeral Resources (Recommended)
|
||||
|
||||
Ephemeral resources, introduced in Terraform v1.10, provide enhanced security by ensuring sensitive values are never persisted in state files. This is the recommended approach for handling secrets in your infrastructure code.
|
||||
|
||||
```hcl
|
||||
# Fetch database credentials ephemerally
|
||||
ephemeral "infisical_secret" "db_creds" {
|
||||
name = "DB_CREDENTIALS"
|
||||
env_slug = "prod"
|
||||
workspace_id = var.infisical_workspace_id
|
||||
folder_path = "/database"
|
||||
}
|
||||
|
||||
# Use the credentials to configure a provider
|
||||
provider "postgresql" {
|
||||
host = data.aws_db_instance.example.address
|
||||
port = data.aws_db_instance.example.port
|
||||
username = jsondecode(ephemeral.infisical_secret.db_creds.value)["username"]
|
||||
password = jsondecode(ephemeral.infisical_secret.db_creds.value)["password"]
|
||||
}
|
||||
```
|
||||
|
||||
Key benefits:
|
||||
- Values are never stored in state files
|
||||
- Secrets are fetched on-demand during each Terraform operation
|
||||
- Perfect for GitOps workflows
|
||||
- Improved security posture for your infrastructure as code
|
||||
|
||||
### Method 2: Data Sources
|
||||
|
||||
For backwards compatibility or when working with older Terraform versions, you can use the traditional data source approach:
|
||||
|
||||
```hcl
|
||||
# Fetch all secrets in a folder
|
||||
data "infisical_secrets" "my_secrets" {
|
||||
env_slug = "dev"
|
||||
workspace_id = var.infisical_workspace_id
|
||||
folder_path = "/api"
|
||||
}
|
||||
|
||||
# Use individual secrets
|
||||
resource "aws_db_instance" "example" {
|
||||
username = data.infisical_secrets.my_secrets.secrets["DB_USER"]
|
||||
password = data.infisical_secrets.my_secrets.secrets["DB_PASS"]
|
||||
}
|
||||
```
|
||||
|
||||
<Warning>
|
||||
It is recommended to use Terraform variables to pass your service token dynamically to avoid hard coding it
|
||||
When using data sources, secret values are stored in Terraform's state file. Ensure your state file is properly secured.
|
||||
</Warning>
|
||||
|
||||
### 3. Fetch Infisical Secrets
|
||||
## Common Use Cases
|
||||
|
||||
Use the `infisical_secrets` data source to fetch your secrets. In this block, you must set the `env_slug` and `folder_path` to scope the secrets you want.
|
||||
### Secure Database Credential Management
|
||||
|
||||
`env_slug` is the slug of the environment name. This slug name can be found under the project settings page on the Infisical dashboard.
|
||||
|
||||
`folder_path` is the path to the folder in a given environment. The path `/` for root of the environment where as `/folder1` is the folder at the root of the environment.
|
||||
|
||||
```hcl main.tf
|
||||
data "infisical_secrets" "my-secrets" {
|
||||
env_slug = "dev"
|
||||
folder_path = "/some-folder/another-folder"
|
||||
workspace_id = "your-project-id"
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Define Outputs
|
||||
|
||||
As an example, we are going to output your fetched secrets. Replace `SECRET-NAME` with the actual name of your secret.
|
||||
|
||||
For a single secret:
|
||||
|
||||
```hcl main.tf
|
||||
output "single-secret" {
|
||||
value = data.infisical_secrets.my-secrets.secrets["SECRET-NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
For all secrets:
|
||||
Manage database credentials securely without exposing sensitive information in your state files:
|
||||
|
||||
```hcl
|
||||
output "all-secrets" {
|
||||
value = data.infisical_secrets.my-secrets.secrets
|
||||
# Fetch database credentials securely
|
||||
ephemeral "infisical_secret" "db_creds" {
|
||||
name = "DB_CREDENTIALS"
|
||||
env_slug = "prod"
|
||||
workspace_id = var.infisical_workspace_id
|
||||
folder_path = "/database"
|
||||
}
|
||||
|
||||
# Use the credentials in your database instance
|
||||
resource "aws_db_instance" "example" {
|
||||
identifier = "my-database"
|
||||
allocated_storage = 20
|
||||
engine = "postgres"
|
||||
engine_version = "14.0"
|
||||
instance_class = "db.t3.micro"
|
||||
|
||||
# Securely inject credentials from Infisical
|
||||
username = jsondecode(ephemeral.infisical_secret.db_creds.value)["username"]
|
||||
password = jsondecode(ephemeral.infisical_secret.db_creds.value)["password"]
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Run Terraform
|
||||
### GitOps Workflow with OIDC
|
||||
|
||||
Once your configuration is complete, initialize your Terraform working directory:
|
||||
To eliminate the need for static credentials, you can authenticate your workflow using [OpenID Connect (OIDC)](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general) through providers like the [Infisical Secrets GitHub Action](https://github.com/Infisical/secrets-action).
|
||||
Once authenticated, you can securely access secrets through the Infisical provider:
|
||||
|
||||
```bash
|
||||
$ terraform init
|
||||
```hcl
|
||||
provider "infisical" {
|
||||
# Auth credentials automatically injected from the environment
|
||||
}
|
||||
|
||||
# Fetch deployment credentials
|
||||
ephemeral "infisical_secret" "deploy_token" {
|
||||
name = "DEPLOY_TOKEN"
|
||||
env_slug = "prod"
|
||||
workspace_id = var.infisical_workspace_id
|
||||
folder_path = "/deployment"
|
||||
}
|
||||
```
|
||||
For detailed instructions on setting up OIDC authentication with GitHub Actions, refer to our [GitHub Actions OIDC guide](https://infisical.com/docs/documentation/platform/identities/oidc-auth/github).
|
||||
|
||||
Then, run the plan command to view the fetched secrets:
|
||||
## Best Practices
|
||||
|
||||
```bash
|
||||
$ terraform plan
|
||||
```
|
||||
1. **Use Ephemeral Resources**: Whenever possible, use ephemeral resources instead of data sources for improved security.
|
||||
|
||||
Terraform will now fetch your secrets from Infisical and display them as output according to your configuration.
|
||||
2. **Organize Secrets**: Structure your secrets in Infisical using folders to maintain clean separation:
|
||||
```hcl
|
||||
ephemeral "infisical_secret" "db_secret" {
|
||||
folder_path = "/databases/postgresql" # Organized by service
|
||||
# ...
|
||||
}
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
3. **Variable Usage**: Use Terraform variables for workspace IDs and environment slugs:
|
||||
```hcl
|
||||
variable "environment" {
|
||||
description = "Environment (dev, staging, prod)"
|
||||
type = string
|
||||
}
|
||||
|
||||
You have now successfully set up and used the Infisical provider with Terraform to fetch secrets. For more information, visit the [Infisical documentation](https://registry.terraform.io/providers/Infisical/infisical/latest/docs). */}
|
||||
ephemeral "infisical_secret" "secret" {
|
||||
env_slug = var.environment
|
||||
# ...
|
||||
}
|
||||
```
|
||||
|
||||
4. **Error Handling**: Add lifecycle blocks for critical secrets:
|
||||
```hcl
|
||||
ephemeral "infisical_secret" "critical_secret" {
|
||||
# ...
|
||||
lifecycle {
|
||||
postcondition {
|
||||
condition = length(self.value) > 0
|
||||
error_message = "Critical secret must not be empty"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="What happens if I'm using an older version of Terraform?">
|
||||
If you're using Terraform < v1.10.0, you'll need to use the data source approach.
|
||||
Consider upgrading to take advantage of the enhanced security features provided
|
||||
by ephemeral resources.
|
||||
</Accordion>
|
||||
<Accordion title="Can I mix ephemeral resources and data sources?">
|
||||
Yes, you can use both in the same configuration. However, we recommend using
|
||||
ephemeral resources for any sensitive values to ensure they're not stored in state.
|
||||
</Accordion>
|
||||
<Accordion title="How do I secure my state file when using data sources?">
|
||||
When using data sources, follow Terraform's best practices for state management:
|
||||
- Use remote state with encryption at rest
|
||||
- Implement proper access controls
|
||||
- Consider using state encryption
|
||||
- Treat the state like a secret
|
||||
|
||||
Better yet, use ephemeral resources to avoid storing sensitive values in state entirely.
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
See also:
|
||||
- [Machine Identity setup guide](/documentation/platform/identities/machine-identities)
|
||||
- [Terraform Provider Registry](https://registry.terraform.io/providers/Infisical/infisical/latest/docs)
|
||||
- [GitOps Best Practices](https://www.infisical.com/blog/gitops-best-practices)
|
||||
|
140
docs/integrations/secret-syncs/databricks.mdx
Normal file
@@ -0,0 +1,140 @@
|
||||
---
|
||||
title: "Databricks Sync"
|
||||
description: "Learn how to configure a Databricks Sync for Infisical."
|
||||
---
|
||||
|
||||
**Prerequisites:**
|
||||
|
||||
- Set up and add secrets to [Infisical Cloud](https://app.infisical.com)
|
||||
- Create a [Databricks Connection](/integrations/app-connections/databricks)
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Infisical UI">
|
||||
1. Navigate to **Project** > **Integrations** and select the **Secret Syncs** tab. Click on the **Add Sync** button.
|
||||

|
||||
|
||||
2. Select the **Databricks** option.
|
||||

|
||||
|
||||
3. Configure the **Source** from where secrets should be retrieved, then click **Next**.
|
||||

|
||||
|
||||
- **Environment**: The project environment to retrieve secrets from.
|
||||
- **Secret Path**: The folder path to retrieve secrets from.
|
||||
|
||||
<Tip>
|
||||
If you need to sync secrets from multiple folder locations, check out [secret imports](/documentation/platform/secret-reference#secret-imports).
|
||||
</Tip>
|
||||
|
||||
4. Configure the **Destination** to where secrets should be deployed, then click **Next**.
|
||||

|
||||
|
||||
- **Databricks Connection**: The Databricks Connection to authenticate with.
|
||||
- **Scope**: The Databricks secret scope to sync secrets to.
|
||||
|
||||
<Note>
|
||||
You must create a secret scope in your Databricks workspace prior to configuration. Ensure your service principal has [Write permissions](https://docs.databricks.com/en/security/auth/access-control/index.html#secret-acls) for the specified secret scope.
|
||||
</Note>
|
||||
|
||||
5. Configure the **Sync Options** to specify how secrets should be synced, then click **Next**.
|
||||

|
||||
|
||||
- **Initial Sync Behavior**: Determines how Infisical should resolve the initial sync.
|
||||
- **Overwrite Destination Secrets**: Removes any secrets at the destination endpoint not present in Infisical.
|
||||
<Note>
|
||||
Databricks does not support importing secrets.
|
||||
</Note>
|
||||
- **Auto-Sync Enabled**: If enabled, secrets will automatically be synced from the source location when changes occur. Disable to enforce manual syncing only.
|
||||
|
||||
6. Configure the **Details** of your Databricks Sync, then click **Next**.
|
||||

|
||||
|
||||
- **Name**: The name of your sync. Must be slug-friendly.
|
||||
- **Description**: An optional description for your sync.
|
||||
|
||||
7. Review your Databricks Sync configuration, then click **Create Sync**.
|
||||

|
||||
|
||||
8. If enabled, your Databricks Sync will begin syncing your secrets to the destination endpoint.
|
||||

|
||||
|
||||
</Tab>
|
||||
<Tab title="API">
|
||||
To create an **Databricks Sync**, make an API request to the [Create Databricks Sync](/api-reference/endpoints/secret-syncs/databricks/create) API endpoint.
|
||||
|
||||
### Sample request
|
||||
|
||||
```bash Request
|
||||
curl --request POST \
|
||||
--url https://app.infisical.com/api/v1/secret-syncs/databricks \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"name": "my-databricks-sync",
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"description": "an example sync",
|
||||
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"environment": "dev",
|
||||
"secretPath": "/my-secrets",
|
||||
"isEnabled": true,
|
||||
"syncOptions": {
|
||||
"initialSyncBehavior": "overwrite-destination"
|
||||
},
|
||||
"destinationConfig": {
|
||||
"scope": "my-scope"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Sample response
|
||||
|
||||
```bash Response
|
||||
{
|
||||
"secretSync": {
|
||||
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"name": "my-databricks-sync",
|
||||
"description": "an example sync",
|
||||
"isEnabled": true,
|
||||
"version": 1,
|
||||
"folderId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"createdAt": "2023-11-07T05:31:56Z",
|
||||
"updatedAt": "2023-11-07T05:31:56Z",
|
||||
"syncStatus": "succeeded",
|
||||
"lastSyncJobId": "123",
|
||||
"lastSyncMessage": null,
|
||||
"lastSyncedAt": "2023-11-07T05:31:56Z",
|
||||
"importStatus": null,
|
||||
"lastImportJobId": null,
|
||||
"lastImportMessage": null,
|
||||
"lastImportedAt": null,
|
||||
"removeStatus": null,
|
||||
"lastRemoveJobId": null,
|
||||
"lastRemoveMessage": null,
|
||||
"lastRemovedAt": null,
|
||||
"syncOptions": {
|
||||
"initialSyncBehavior": "overwrite-destination"
|
||||
},
|
||||
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"connection": {
|
||||
"app": "databricks",
|
||||
"name": "my-databricks-connection",
|
||||
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a"
|
||||
},
|
||||
"environment": {
|
||||
"slug": "dev",
|
||||
"name": "Development",
|
||||
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a"
|
||||
},
|
||||
"folder": {
|
||||
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
|
||||
"path": "/my-secrets"
|
||||
},
|
||||
"destination": "databricks",
|
||||
"destinationConfig": {
|
||||
"scope": "my-scope"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
@@ -1,28 +1,34 @@
|
||||
---
|
||||
title: "Components"
|
||||
description: "Infisical's components span multiple clients, an API, and a storage backend."
|
||||
description: "Understand Infisical's core architectural components and how they work together."
|
||||
---
|
||||
|
||||
## Infisical API
|
||||
## Overview
|
||||
|
||||
The Infisical API (sometimes referred to as the **backend**) contains the core platform logic.
|
||||
Infisical is architected around several key components that work in concert to provide a secure and streamlined secret management experience. These components span the client, API, and storage layers, ensuring that secrets are protected at every stage of their lifecycle.
|
||||
|
||||
## Storage backend
|
||||
## 1. API (Backend)
|
||||
|
||||
Infisical relies on a storage backend to store data including users and secrets. Infisical's storage backend is Postgres.
|
||||
Infisical exposes a well-documented [REST API](https://infisical.com/docs/api-reference/overview/introduction) that enables programmatic interaction with the platform, enabling a wide range of use cases.
|
||||
|
||||
## Redis
|
||||
## 2. Storage Backend
|
||||
|
||||
Infisical uses [Redis](https://redis.com) to enable more complex workflows including a queuing system to manage long running asynchronous tasks, cron jobs, as well as reliable cache for frequently used resources.
|
||||
Infisical relies on a robust storage backend to durably store secrets, users, and other platform data. Infisical's storage backend is [PostgreSQL](https://www.postgresql.org/).
|
||||
|
||||
## Infisical Web UI
|
||||
## 3. Caching Layer
|
||||
|
||||
The Web UI is the browser-based portal that connects to the Infisical API.
|
||||
Infisical uses [Redis](https://redis.com) to enable more complex workflows including a queuing system to manage long-running asynchronous tasks, cron jobs, as well as reliable cache for frequently used resources.
|
||||
|
||||
## Infisical clients
|
||||
## 4. Clients
|
||||
|
||||
Clients are any application or infrastructure that connecting to the Infisical API using one of the below methods:
|
||||
- Public API: Making API requests directly to the Infisical API.
|
||||
- Client SDK: A platform-specific library with method abstractions for working with secrets. Currently, there are three official SDKs: [Node SDK](https://infisical.com/docs/sdks/languages/node), [Python SDK](https://infisical.com/docs/sdks/languages/python), and [Java SDK](https://infisical.com/docs/sdks/languages/java).
|
||||
- CLI: A terminal-based interface for interacting with the Infisical API.
|
||||
- Kubernetes Operator: This operator retrieves secrets from Infisical and securely store
|
||||
Clients are interfaces through which users and applications interact with the Infisical API:
|
||||
|
||||
- **Web UI**: A browser-based portal providing a user-friendly interface for managing secrets, configurations, and performing administrative tasks.
|
||||
|
||||
- [**CLI**](https://infisical.com/docs/cli): A terminal-based tool for interacting with the Infisical API, enabling automation, scripting, and integration into CI/CD pipelines.
|
||||
|
||||
- **SDKs (Software Development Kits)**: Platform-specific libraries with method abstractions for working with secrets. Supported languages include [Node.js](https://infisical.com/docs/sdks/languages/node), [Python](https://infisical.com/docs/sdks/languages/python), [Java](https://infisical.com/docs/sdks/languages/java), [Golang](https://infisical.com/docs/sdks/languages/go), [Ruby](https://infisical.com/docs/sdks/languages/ruby) and [.NET](https://infisical.com/docs/sdks/languages/csharp).
|
||||
|
||||
- [**Kubernetes Operator**](https://infisical.com/docs/integrations/platforms/kubernetes): A Kubernetes-native component that facilitates the secure retrieval and management of secrets within a Kubernetes cluster. The operator supports multiple custom resource definitions (CRDs) for syncing secrets.
|
||||
|
||||
- [**Infisical Agent**](https://infisical.com/docs/integrations/platforms/infisical-agent): Daemon that automatically fetches and manages access tokens and secrets to be used in various client resources.
|
||||
|
176
docs/mint.json
@@ -85,6 +85,10 @@
|
||||
"documentation/guides/microsoft-power-apps",
|
||||
"documentation/guides/organization-structure"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Setup",
|
||||
"pages": ["documentation/setup/networking"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -232,7 +236,8 @@
|
||||
"documentation/platform/identities/oidc-auth/general",
|
||||
"documentation/platform/identities/oidc-auth/github",
|
||||
"documentation/platform/identities/oidc-auth/circleci",
|
||||
"documentation/platform/identities/oidc-auth/gitlab"
|
||||
"documentation/platform/identities/oidc-auth/gitlab",
|
||||
"documentation/platform/identities/oidc-auth/terraform-cloud"
|
||||
]
|
||||
},
|
||||
"documentation/platform/mfa",
|
||||
@@ -285,7 +290,7 @@
|
||||
"pages": [
|
||||
"self-hosting/overview",
|
||||
{
|
||||
"group": "Containerized installation methods",
|
||||
"group": "Installation methods",
|
||||
"pages": [
|
||||
"self-hosting/deployment-options/standalone-infisical",
|
||||
"self-hosting/deployment-options/docker-swarm",
|
||||
@@ -293,12 +298,12 @@
|
||||
"self-hosting/deployment-options/kubernetes-helm"
|
||||
]
|
||||
},
|
||||
"self-hosting/guides/upgrading-infisical",
|
||||
"self-hosting/configuration/envars",
|
||||
"self-hosting/configuration/requirements",
|
||||
{
|
||||
"group": "Guides",
|
||||
"pages": [
|
||||
"self-hosting/configuration/schema-migrations",
|
||||
"self-hosting/guides/mongo-to-postgres",
|
||||
"self-hosting/guides/custom-certificates"
|
||||
]
|
||||
@@ -392,10 +397,11 @@
|
||||
"group": "Connections",
|
||||
"pages": [
|
||||
"integrations/app-connections/aws",
|
||||
"integrations/app-connections/github",
|
||||
"integrations/app-connections/gcp",
|
||||
"integrations/app-connections/azure-app-configuration",
|
||||
"integrations/app-connections/azure-key-vault",
|
||||
"integrations/app-connections/azure-app-configuration"
|
||||
"integrations/app-connections/databricks",
|
||||
"integrations/app-connections/gcp",
|
||||
"integrations/app-connections/github"
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -409,10 +415,11 @@
|
||||
"pages": [
|
||||
"integrations/secret-syncs/aws-parameter-store",
|
||||
"integrations/secret-syncs/aws-secrets-manager",
|
||||
"integrations/secret-syncs/github",
|
||||
"integrations/secret-syncs/gcp-secret-manager",
|
||||
"integrations/secret-syncs/azure-app-configuration",
|
||||
"integrations/secret-syncs/azure-key-vault",
|
||||
"integrations/secret-syncs/azure-app-configuration"
|
||||
"integrations/secret-syncs/databricks",
|
||||
"integrations/secret-syncs/gcp-secret-manager",
|
||||
"integrations/secret-syncs/github"
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -631,7 +638,8 @@
|
||||
"api-reference/endpoints/oidc-auth/attach",
|
||||
"api-reference/endpoints/oidc-auth/retrieve",
|
||||
"api-reference/endpoints/oidc-auth/update",
|
||||
"api-reference/endpoints/oidc-auth/revoke"
|
||||
"api-reference/endpoints/oidc-auth/revoke",
|
||||
"integrations/frameworks/terraform-cloud"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -825,27 +833,15 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "GitHub",
|
||||
"group": "Azure App Configuration",
|
||||
"pages": [
|
||||
"api-reference/endpoints/app-connections/github/list",
|
||||
"api-reference/endpoints/app-connections/github/available",
|
||||
"api-reference/endpoints/app-connections/github/get-by-id",
|
||||
"api-reference/endpoints/app-connections/github/get-by-name",
|
||||
"api-reference/endpoints/app-connections/github/create",
|
||||
"api-reference/endpoints/app-connections/github/update",
|
||||
"api-reference/endpoints/app-connections/github/delete"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "GCP",
|
||||
"pages": [
|
||||
"api-reference/endpoints/app-connections/gcp/list",
|
||||
"api-reference/endpoints/app-connections/gcp/available",
|
||||
"api-reference/endpoints/app-connections/gcp/get-by-id",
|
||||
"api-reference/endpoints/app-connections/gcp/get-by-name",
|
||||
"api-reference/endpoints/app-connections/gcp/create",
|
||||
"api-reference/endpoints/app-connections/gcp/update",
|
||||
"api-reference/endpoints/app-connections/gcp/delete"
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/list",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/available",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/get-by-id",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/get-by-name",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/create",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/update",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/delete"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -861,15 +857,39 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Azure App Configuration",
|
||||
"group": "Databricks",
|
||||
"pages": [
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/list",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/available",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/get-by-id",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/get-by-name",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/create",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/update",
|
||||
"api-reference/endpoints/app-connections/azure-app-configuration/delete"
|
||||
"api-reference/endpoints/app-connections/databricks/list",
|
||||
"api-reference/endpoints/app-connections/databricks/available",
|
||||
"api-reference/endpoints/app-connections/databricks/get-by-id",
|
||||
"api-reference/endpoints/app-connections/databricks/get-by-name",
|
||||
"api-reference/endpoints/app-connections/databricks/create",
|
||||
"api-reference/endpoints/app-connections/databricks/update",
|
||||
"api-reference/endpoints/app-connections/databricks/delete"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "GCP",
|
||||
"pages": [
|
||||
"api-reference/endpoints/app-connections/gcp/list",
|
||||
"api-reference/endpoints/app-connections/gcp/available",
|
||||
"api-reference/endpoints/app-connections/gcp/get-by-id",
|
||||
"api-reference/endpoints/app-connections/gcp/get-by-name",
|
||||
"api-reference/endpoints/app-connections/gcp/create",
|
||||
"api-reference/endpoints/app-connections/gcp/update",
|
||||
"api-reference/endpoints/app-connections/gcp/delete"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "GitHub",
|
||||
"pages": [
|
||||
"api-reference/endpoints/app-connections/github/list",
|
||||
"api-reference/endpoints/app-connections/github/available",
|
||||
"api-reference/endpoints/app-connections/github/get-by-id",
|
||||
"api-reference/endpoints/app-connections/github/get-by-name",
|
||||
"api-reference/endpoints/app-connections/github/create",
|
||||
"api-reference/endpoints/app-connections/github/update",
|
||||
"api-reference/endpoints/app-connections/github/delete"
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -908,30 +928,17 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "GitHub",
|
||||
"group": "Azure App Configuration",
|
||||
"pages": [
|
||||
"api-reference/endpoints/secret-syncs/github/list",
|
||||
"api-reference/endpoints/secret-syncs/github/get-by-id",
|
||||
"api-reference/endpoints/secret-syncs/github/get-by-name",
|
||||
"api-reference/endpoints/secret-syncs/github/create",
|
||||
"api-reference/endpoints/secret-syncs/github/update",
|
||||
"api-reference/endpoints/secret-syncs/github/delete",
|
||||
"api-reference/endpoints/secret-syncs/github/sync-secrets",
|
||||
"api-reference/endpoints/secret-syncs/github/remove-secrets"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "GCP Secret Manager",
|
||||
"pages": [
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/list",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/get-by-id",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/get-by-name",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/create",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/update",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/delete",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/sync-secrets",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/import-secrets",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/remove-secrets"
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/list",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/get-by-id",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/get-by-name",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/create",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/update",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/delete",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/sync-secrets",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/import-secrets",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/remove-secrets"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -949,20 +956,45 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Azure App Configuration",
|
||||
"group": "Databricks",
|
||||
"pages": [
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/list",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/get-by-id",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/get-by-name",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/create",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/update",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/delete",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/sync-secrets",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/import-secrets",
|
||||
"api-reference/endpoints/secret-syncs/azure-app-configuration/remove-secrets"
|
||||
"api-reference/endpoints/secret-syncs/databricks/list",
|
||||
"api-reference/endpoints/secret-syncs/databricks/get-by-id",
|
||||
"api-reference/endpoints/secret-syncs/databricks/get-by-name",
|
||||
"api-reference/endpoints/secret-syncs/databricks/create",
|
||||
"api-reference/endpoints/secret-syncs/databricks/update",
|
||||
"api-reference/endpoints/secret-syncs/databricks/delete",
|
||||
"api-reference/endpoints/secret-syncs/databricks/sync-secrets",
|
||||
"api-reference/endpoints/secret-syncs/databricks/remove-secrets"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "GCP Secret Manager",
|
||||
"pages": [
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/list",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/get-by-id",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/get-by-name",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/create",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/update",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/delete",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/sync-secrets",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/import-secrets",
|
||||
"api-reference/endpoints/secret-syncs/gcp-secret-manager/remove-secrets"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "GitHub",
|
||||
"pages": [
|
||||
"api-reference/endpoints/secret-syncs/github/list",
|
||||
"api-reference/endpoints/secret-syncs/github/get-by-id",
|
||||
"api-reference/endpoints/secret-syncs/github/get-by-name",
|
||||
"api-reference/endpoints/secret-syncs/github/create",
|
||||
"api-reference/endpoints/secret-syncs/github/update",
|
||||
"api-reference/endpoints/secret-syncs/github/delete",
|
||||
"api-reference/endpoints/secret-syncs/github/sync-secrets",
|
||||
"api-reference/endpoints/secret-syncs/github/remove-secrets"
|
||||
]
|
||||
}
|
||||
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -4,8 +4,6 @@ sidebarTitle: "Go"
|
||||
icon: "golang"
|
||||
---
|
||||
|
||||
|
||||
|
||||
If you're working with Go Lang, the official [Infisical Go SDK](https://github.com/infisical/go-sdk) package is the easiest way to fetch and work with secrets for your application.
|
||||
|
||||
- [Package](https://pkg.go.dev/github.com/infisical/go-sdk)
|
||||
@@ -57,7 +55,9 @@ func main() {
|
||||
This example demonstrates how to use the Infisical Go SDK in a simple Go application. The application retrieves a secret named `API_KEY` from the `dev` environment of the `YOUR_PROJECT_ID` project.
|
||||
|
||||
<Warning>
|
||||
We do not recommend hardcoding your [Machine Identity Tokens](/platform/identities/overview). Setting it as an environment variable would be best.
|
||||
We do not recommend hardcoding your [Machine Identity
|
||||
Tokens](/platform/identities/overview). Setting it as an environment variable
|
||||
would be best.
|
||||
</Warning>
|
||||
|
||||
# Installation
|
||||
@@ -95,6 +95,10 @@ client := infisical.NewInfisicalClient(context.Background(), infisical.Config{
|
||||
<ParamField query="SilentMode" type="boolean" default={false} optional>
|
||||
Whether or not to suppress logs such as warnings from the token refreshing process. Defaults to false if not specified.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="CacheExpiryInSeconds" type="number" default={0} optional>
|
||||
Defines how long certain responses should be cached in memory, in seconds. When set to a positive value, responses from specific methods (like secret fetching) will be cached for this duration. Set to 0 to disable caching.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
@@ -140,6 +144,7 @@ Call `.Auth().UniversalAuthLogin()` with empty arguments to use the following en
|
||||
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` - Your machine identity client secret.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err := client.Auth().UniversalAuthLogin("CLIENT_ID", "CLIENT_SECRET")
|
||||
|
||||
@@ -150,9 +155,12 @@ if err != nil {
|
||||
```
|
||||
|
||||
#### GCP ID Token Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Google Cloud Platform.
|
||||
Please [read more](/documentation/platform/identities/gcp-auth) about this authentication method.
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on Google Cloud Platform. Please [read
|
||||
more](/documentation/platform/identities/gcp-auth) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
@@ -162,6 +170,7 @@ Call `.Auth().GcpIdTokenAuthLogin()` with empty arguments to use the following e
|
||||
- `INFISICAL_GCP_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err := client.Auth().GcpIdTokenAuthLogin("YOUR_MACHINE_IDENTITY_ID")
|
||||
|
||||
@@ -181,6 +190,7 @@ Call `.Auth().GcpIamAuthLogin()` with empty arguments to use the following envir
|
||||
- `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` - The path to your GCP service account key file.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err = client.Auth().GcpIamAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_KEY_FILE_PATH")
|
||||
|
||||
@@ -191,9 +201,12 @@ if err != nil {
|
||||
```
|
||||
|
||||
#### AWS IAM Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on AWS.
|
||||
Please [read more](/documentation/platform/identities/aws-auth) about this authentication method.
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on AWS. Please [read
|
||||
more](/documentation/platform/identities/aws-auth) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
@@ -203,6 +216,7 @@ Call `.Auth().AwsIamAuthLogin()` with empty arguments to use the following envir
|
||||
- `INFISICAL_AWS_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err = client.Auth().AwsIamAuthLogin("MACHINE_IDENTITY_ID")
|
||||
|
||||
@@ -212,11 +226,13 @@ if err != nil {
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
#### Azure Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Azure.
|
||||
Please [read more](/documentation/platform/identities/azure-auth) about this authentication method.
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on Azure. Please [read
|
||||
more](/documentation/platform/identities/azure-auth) about this authentication
|
||||
method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
@@ -226,6 +242,7 @@ Call `.Auth().AzureAuthLogin()` with empty arguments to use the following enviro
|
||||
- `INFISICAL_AZURE_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
_, err = client.Auth().AzureAuthLogin("MACHINE_IDENTITY_ID")
|
||||
|
||||
@@ -236,9 +253,12 @@ if err != nil {
|
||||
```
|
||||
|
||||
#### Kubernetes Auth
|
||||
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Kubernetes.
|
||||
Please [read more](/documentation/platform/identities/kubernetes-auth) about this authentication method.
|
||||
Please note that this authentication method will only work if you're running
|
||||
your application on Kubernetes. Please [read
|
||||
more](/documentation/platform/identities/kubernetes-auth) about this
|
||||
authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
@@ -249,6 +269,7 @@ Call `.Auth().KubernetesAuthLogin()` with empty arguments to use the following e
|
||||
- `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH_ENV_NAME` - The environment variable name that contains the path to the service account token. This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
|
||||
**Using the SDK directly**
|
||||
|
||||
```go
|
||||
// Service account token path will default to /var/run/secrets/kubernetes.io/serviceaccount/token if empty value is passed
|
||||
_, err = client.Auth().KubernetesAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_TOKEN_PATH")
|
||||
@@ -262,6 +283,7 @@ if err != nil {
|
||||
## Working With Secrets
|
||||
|
||||
### List Secrets
|
||||
|
||||
`client.Secrets().List(options)`
|
||||
|
||||
Retrieve all secrets within the Infisical project and environment that client is connected to.
|
||||
@@ -311,7 +333,9 @@ secrets, err := client.Secrets().List(infisical.ListSecretsOptions{
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Retrieve Secret
|
||||
|
||||
`client.Secrets().Retrieve(options)`
|
||||
|
||||
Retrieve a secret from Infisical. By default `Secrets().Retrieve()` fetches and returns a shared secret.
|
||||
@@ -327,27 +351,31 @@ secret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{
|
||||
### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to retrieve.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to retrieve.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets
|
||||
should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not
|
||||
specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Create Secret
|
||||
|
||||
`client.Secrets().Create(options)`
|
||||
|
||||
Create a new secret in Infisical.
|
||||
@@ -363,36 +391,38 @@ secret, err := client.Secrets().Create(infisical.CreateSecretOptions{
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to create.
|
||||
</ParamField>
|
||||
<ParamField query="SecretValue" type="string" required>
|
||||
The value of the secret.
|
||||
</ParamField>
|
||||
<ParamField query="SecretComment" type="string" optional>
|
||||
A comment for the secret.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be created.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to create.
|
||||
</ParamField>
|
||||
<ParamField query="SecretValue" type="string" required>
|
||||
The value of the secret.
|
||||
</ParamField>
|
||||
<ParamField query="SecretComment" type="string" optional>
|
||||
A comment for the secret.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets
|
||||
should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be created.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not
|
||||
specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Update Secret
|
||||
|
||||
`client.Secrets().Update(options)`
|
||||
@@ -412,33 +442,42 @@ secret, err := client.Secrets().Update(infisical.UpdateSecretOptions{
|
||||
### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to update.
|
||||
</ParamField>
|
||||
<ParamField query="NewSecretValue" type="string" required>
|
||||
The new value of the secret.
|
||||
</ParamField>
|
||||
<ParamField query="NewSkipMultilineEncoding" type="boolean" default="false" optional>
|
||||
Whether or not to skip multiline encoding for the new secret value.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be updated.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to update.
|
||||
</ParamField>
|
||||
<ParamField query="NewSecretValue" type="string" required>
|
||||
The new value of the secret.
|
||||
</ParamField>
|
||||
<ParamField
|
||||
query="NewSkipMultilineEncoding"
|
||||
type="boolean"
|
||||
default="false"
|
||||
optional
|
||||
>
|
||||
Whether or not to skip multiline encoding for the new secret value.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets
|
||||
should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be updated.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not
|
||||
specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Delete Secret
|
||||
|
||||
`client.Secrets().Delete(options)`
|
||||
|
||||
Delete a secret in Infisical.
|
||||
@@ -454,30 +493,33 @@ secret, err := client.Secrets().Delete(infisical.DeleteSecretOptions{
|
||||
### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string">
|
||||
The key of the secret to update.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be deleted.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string">
|
||||
The key of the secret to update.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets
|
||||
should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be deleted.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not
|
||||
specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
## Working With folders
|
||||
|
||||
|
||||
###
|
||||
|
||||
### List Folders
|
||||
|
||||
`client.Folders().List(options)`
|
||||
|
||||
Retrieve all within the Infisical project and environment that client is connected to.
|
||||
@@ -510,7 +552,9 @@ folders, err := client.Folders().List(infisical.ListFoldersOptions{
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Create Folder
|
||||
|
||||
`client.Folders().Create(options)`
|
||||
|
||||
Create a new folder in Infisical.
|
||||
@@ -527,25 +571,27 @@ folder, err := client.Folders().Create(infisical.CreateFolderOptions{
|
||||
### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The ID of the project where the folder will be created.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment where the folder will be created.
|
||||
</ParamField>
|
||||
<ParamField query="Path" type="string" optional>
|
||||
The path to create the folder in. The root path is `/`.
|
||||
</ParamField>
|
||||
<ParamField query="Name" type="string" optional>
|
||||
The name of the folder to create.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The ID of the project where the folder will be created.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment where the folder will be
|
||||
created.
|
||||
</ParamField>
|
||||
<ParamField query="Path" type="string" optional>
|
||||
The path to create the folder in. The root path is `/`.
|
||||
</ParamField>
|
||||
<ParamField query="Name" type="string" optional>
|
||||
The name of the folder to create.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
|
||||
###
|
||||
|
||||
### Update Folder
|
||||
|
||||
`client.Folders().Update(options)`
|
||||
|
||||
Update an existing folder in Infisical.
|
||||
@@ -563,27 +609,30 @@ folder, err := client.Folders().Update(infisical.UpdateFolderOptions{
|
||||
### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The ID of the project where the folder will be updated.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where the folder lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Path" type="string" optional>
|
||||
The path from where the folder should be updated.
|
||||
</ParamField>
|
||||
<ParamField query="FolderID" type="string" required>
|
||||
The ID of the folder to update.
|
||||
</ParamField>
|
||||
<ParamField query="NewName" type="string" required>
|
||||
The new name of the folder.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The ID of the project where the folder will be updated.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where the folder
|
||||
lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Path" type="string" optional>
|
||||
The path from where the folder should be updated.
|
||||
</ParamField>
|
||||
<ParamField query="FolderID" type="string" required>
|
||||
The ID of the folder to update.
|
||||
</ParamField>
|
||||
<ParamField query="NewName" type="string" required>
|
||||
The new name of the folder.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
###
|
||||
|
||||
### Delete Folder
|
||||
|
||||
`client.Folders().Delete(options)`
|
||||
|
||||
Delete a folder in Infisical.
|
||||
@@ -620,6 +669,5 @@ deletedFolder, err := client.Folders().Delete(infisical.DeleteFolderOptions{
|
||||
The path from where the folder should be deleted.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|
||||
|
||||
|
@@ -22,33 +22,34 @@ The actual resource requirements will vary in direct proportion to the operation
|
||||
Infisical doesn’t require file storage as all persisted data is saved in the database.
|
||||
However, its logs and metrics are saved to disk for later viewing. As a result, we recommend provisioning 1-2 GB of storage.
|
||||
|
||||
### CPU
|
||||
### CPU and Memory (Per Container/Instance)
|
||||
|
||||
CPU requirements vary heavily on the volume of secret operations (reads and writes) you anticipate.
|
||||
Processing large volumes of secrets frequently and consistently will require higher CPU.
|
||||
Infisical is stateless and scales horizontally by running across multiple containers/instances. Each instance typically does **not** need more than **2–4 CPU cores** and **4–8 GB** of memory.
|
||||
If you need additional capacity, simply increase the **number** of containers/instances running in parallel.
|
||||
|
||||
Recommended minimum CPU hardware for different sizes of deployments:
|
||||
| **Deployment Size** | **CPU (Cores, per container)** | **Memory (GB, per container)** | **Recommended Number of Containers** |
|
||||
|---------------------|--------------------------------|--------------------------------|--------------------------------------|
|
||||
| **Small** | 2 | 4 | 2+ |
|
||||
| **Medium** | 2–4 | 4–8 | 5+ |
|
||||
| **Large** | 2–4 | 4–8 | 10+ |
|
||||
|
||||
- **small:** 2-4 core is the **recommended** minimum
|
||||
- **large:** 4-8 cores are suitable for larger deployments
|
||||
|
||||
### Memory Allocation
|
||||
|
||||
Memory needs depend on expected workload, including factors like user activity, automation level, and the frequency of secret operations.
|
||||
|
||||
Recommended minimum memory hardware for different sizes of deployments:
|
||||
- **small:** 4-8 GB is the **recommended** minimum
|
||||
- **large:** 16-32 GB are suitable for larger deployments
|
||||
> **Note:**
|
||||
> - Adding more containers (horizontal scaling) is generally the best way to handle spikes in secret operations.
|
||||
> - If you prefer, you can increase CPU/memory on a single container (vertical scaling), but horizontal scaling is more flexible and resilient.
|
||||
|
||||
## Database & caching layer
|
||||
|
||||
### Postgres
|
||||
|
||||
PostgreSQL is the only database supported by Infisical. Infisical has been extensively tested with Postgres version 16. We recommend using versions 14 and up for optimal compatibility.
|
||||
The compute required for Postgres is largely dependent on the number of secret operations (reads and writes) you expect. The more frequently you read and write secrets, the more compute you will need.
|
||||
You'll notice that storage requirements are high and this is because audit logs are by default stored in the database.
|
||||
|
||||
Recommended resource allocation based on deployment size:
|
||||
- **small:** 2 vCPU / 8 GB RAM / 20 GB Disk
|
||||
- **large:** 4vCPU / 16 GB RAM / 100 GB Disk
|
||||
|
||||
Recommended resource allocation based on deployment size. You may require more resources if you have a large number of secrets or high transaction volume:
|
||||
- **small:** 2 vCPU / 8 GB RAM / 100 GB Disk
|
||||
- **medium:** 4vCPU / 16 GB RAM / 200 GB Disk
|
||||
- **large:** 8vCPU / 32 GB RAM / 500 GB Disk
|
||||
|
||||
### Redis
|
||||
|
||||
|
@@ -1,60 +0,0 @@
|
||||
---
|
||||
title: "Schema migration"
|
||||
description: "Learn how to run Postgres schema migrations."
|
||||
---
|
||||
|
||||
Running schema migrations is a requirement before deploying Infisical.
|
||||
Each time you decide to upgrade your version of Infisical, it's necessary to run schema migrations for that specific version.
|
||||
The guide below outlines a step-by-step guide to help you manually run schema migrations for Infisical.
|
||||
|
||||
### Prerequisites
|
||||
- Docker installed on your machine
|
||||
- An active PostgreSQL database
|
||||
- Postgres database connection string
|
||||
|
||||
<Steps>
|
||||
<Step title="Pull the Infisical Docker Image">
|
||||
First, ensure you have the correct version of the Infisical Docker image. You can pull it from Docker Hub using the following command:
|
||||
```bash
|
||||
docker pull infisical/infisical:<version>
|
||||
```
|
||||
Replace `<version>` with the specific version number you intend to deploy. View available versions [here](https://hub.docker.com/r/infisical/infisical/tags)
|
||||
</Step>
|
||||
|
||||
<Step title="Set Up the Environment Variable">
|
||||
The Docker image requires a `DB_CONNECTION_URI` environment variable. This connection string should point to your PostgreSQL database. The format generally looks like this: `postgresql://username:password@host:port/database`.
|
||||
</Step>
|
||||
|
||||
<Step title="Run the Migration ">
|
||||
To run the schema migration for the version of Infisical you want to deploy, use the following Docker command:
|
||||
|
||||
```bash
|
||||
docker run --env DB_CONNECTION_URI=<your_connection_string> infisical/infisical:<version> npm run migration:latest
|
||||
```
|
||||
Replace `<your_connection_string>` with your actual PostgreSQL connection string, and `<version>` with the desired version number.
|
||||
</Step>
|
||||
|
||||
<Step title="Verify the Migration">
|
||||
After running the migration, it's good practice to check if the migration was successful. You can do this by checking the logs or accessing your database to ensure the schema has been updated accordingly.
|
||||
</Step>
|
||||
<Step title="Rollback If Needed">
|
||||
If you need to rollback a migration by one step, use the following command:
|
||||
|
||||
```bash
|
||||
docker run --env DB_CONNECTION_URI=<your_connection_string> infisical/infisical:<version> npm run migration:rollback
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Repeat for Each Version">
|
||||
It's important to run schema migrations for each version of the Infisical you deploy. For instance, if you're updating from `infisical/infisical:1` to `infisical/infisical:2`, ensure you run the schema migrations for `infisical/infisical:2` before deploying it.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
<Tip>
|
||||
In a production setting, we recommend a more structured approach to deploying migrations prior to upgrading Infisical. This can be accomplished via CI automation.
|
||||
</Tip>
|
||||
|
||||
### Additional discussion
|
||||
- Always back up your database before running migrations, especially in a production environment.
|
||||
- Test the migration process in a staging environment before applying it to production.
|
||||
- Keep track of the versions and their corresponding migrations to avoid any inconsistencies.
|
@@ -157,23 +157,6 @@ The [Docker stack file](https://github.com/Infisical/infisical/tree/main/docker-
|
||||
3lznscvk7k5t infisical_spolo2 replicated 1/1 ghcr.io/zalando/spilo-16:3.2-p2
|
||||
v04ml7rz2j5q infisical_spolo3 replicated 1/1 ghcr.io/zalando/spilo-16:3.2-p2
|
||||
```
|
||||
|
||||
<Note>
|
||||
You'll notice that service `infisical_infisical` will not be in running state.
|
||||
This is expected as the database does not yet have the desired schemas.
|
||||
Once the database schema migrations have been successfully applied, this issue should be resolved.
|
||||
</Note>
|
||||
</Step>
|
||||
|
||||
<Step title="Run schema migrations">
|
||||
Run the schema migration to initialize the database. Follow the [guide here](/self-hosting/configuration/schema-migrations) to learn how.
|
||||
|
||||
To run the migrations, you'll need to connect to the Postgres instance deployed on your Docker swarm. The default Postgres user credentials are defined in the Docker swarm: username: `postgres`, password: `postgres` and database: `postgres`.
|
||||
We recommend you change these credentials when deploying to production and creating a separate DB for Infisical.
|
||||
|
||||
<Info>
|
||||
After running the schema migrations, be sure to update the `.env` file to have the correct `DB_CONNECTION_URI`.
|
||||
</Info>
|
||||
</Step>
|
||||
|
||||
<Step title="View service status">
|
||||
|
@@ -78,18 +78,6 @@ description: "Learn how to use Helm chart to install Infisical on your Kubernete
|
||||
</Tabs>
|
||||
</Step>
|
||||
|
||||
<Step title="Database schema migration ">
|
||||
Infisical relies on a relational database, which means that database schemas need to be migrated before the instance can become operational.
|
||||
|
||||
To automate this process, the chart includes a option named `infisical.autoDatabaseSchemaMigration`.
|
||||
When this option is enabled, a deployment/upgrade will only occur _after_ a successful schema migration.
|
||||
|
||||
<Info>
|
||||
If you are using in-cluster Postgres, you may notice the migration job failing initially.
|
||||
This is expected as it is waiting for the database to be in ready state.
|
||||
</Info>
|
||||
</Step>
|
||||
|
||||
<Step title="Routing traffic to Infisical">
|
||||
By default, this chart uses Nginx as its Ingress controller to direct traffic to Infisical services.
|
||||
|
||||
|