Compare commits
176 Commits
daniel/fix
...
feat/add-i
Author | SHA1 | Date | |
---|---|---|---|
|
d4f9faf24d | ||
|
f9a1accf84 | ||
|
ca86f3d2b6 | ||
|
de466b4b86 | ||
|
106dc261de | ||
|
548a0aed2a | ||
|
b7b606ab9a | ||
|
00617ea7e8 | ||
|
6d9330e870 | ||
|
d026a9b988 | ||
|
c2c693d295 | ||
|
c9c77f6c58 | ||
|
36a34b0f58 | ||
|
45c153e592 | ||
|
eeaabe44ec | ||
|
4b37c0f1c4 | ||
|
973403c7f9 | ||
|
52fcf53d0e | ||
|
cbef9ea514 | ||
|
d0f8394f50 | ||
|
9c06cab99d | ||
|
c43a18904d | ||
|
dc0fe6920c | ||
|
077cbc97d5 | ||
|
f3da676b88 | ||
|
988c612048 | ||
|
7cf7eb5acb | ||
|
a2fd071b62 | ||
|
0d7a07dea3 | ||
|
f676b44335 | ||
|
00d83f9136 | ||
|
eca6871cbc | ||
|
97cff783cf | ||
|
3767ec9521 | ||
|
f31340cf53 | ||
|
908358b841 | ||
|
b2a88a4384 | ||
|
ab73e77499 | ||
|
095a049661 | ||
|
3a51155d23 | ||
|
c5f361a3e5 | ||
|
5ace8ed073 | ||
|
193d6dad54 | ||
|
0f36fc46b3 | ||
|
4a1a399fd8 | ||
|
d19e2f64f0 | ||
|
1e0f54d9a4 | ||
|
8d55c2802e | ||
|
e9639df8ce | ||
|
e0f5ecbe7b | ||
|
3e230555fb | ||
|
31e27ad1d7 | ||
|
4962a63888 | ||
|
ad92565783 | ||
|
6c98c96a15 | ||
|
f0a70d8769 | ||
|
9e9de9f527 | ||
|
6af4a06c02 | ||
|
fe6dc248b6 | ||
|
d64e2fa243 | ||
|
7d380f9b43 | ||
|
76c8410081 | ||
|
6df90fa825 | ||
|
c042bafba3 | ||
|
8067df821e | ||
|
1906896e56 | ||
|
a8ccfd9c92 | ||
|
32609b95a0 | ||
|
08d3436217 | ||
|
2ae45dc1cc | ||
|
44a898fb15 | ||
|
4d194052b5 | ||
|
1d622bb121 | ||
|
ecca6f4db5 | ||
|
b198f97930 | ||
|
63a9e46936 | ||
|
7c067551a4 | ||
|
5c149c6ac6 | ||
|
c19f8839ff | ||
|
1193ddbed1 | ||
|
c6c71a04e8 | ||
|
6457c34712 | ||
|
6a83b58de4 | ||
|
d47c586a52 | ||
|
88156c8cd8 | ||
|
27d5d90d02 | ||
|
0100ddfb99 | ||
|
2bc6db1c47 | ||
|
92f2f16656 | ||
|
07ca1ed424 | ||
|
18c5dd3cbd | ||
|
467e3aab56 | ||
|
577b432861 | ||
|
dda6b1d233 | ||
|
e83f31249a | ||
|
18e69578f0 | ||
|
0685a5ea8b | ||
|
3142d36ea1 | ||
|
bdc7c018eb | ||
|
9506b60d02 | ||
|
ed25b82113 | ||
|
83bd97fc70 | ||
|
1d5115972b | ||
|
d26521be0b | ||
|
473f8137fd | ||
|
bcd65333c0 | ||
|
719d0ea30f | ||
|
371b96a13a | ||
|
c5c00b520c | ||
|
8de4443be1 | ||
|
96ad3b0264 | ||
|
aaef339e21 | ||
|
e3beeb68eb | ||
|
d0c76ae4b4 | ||
|
a5cf6f40c7 | ||
|
f121f8e828 | ||
|
54c8da8ab6 | ||
|
6e0dfc72e4 | ||
|
b226fdac9d | ||
|
3c36d5dbd2 | ||
|
a5f895ad91 | ||
|
9f66b9bb4d | ||
|
80e55a9341 | ||
|
5142d6f3c1 | ||
|
c8677ac548 | ||
|
df51d05c46 | ||
|
4f2f7b2f70 | ||
|
d79ffbe37e | ||
|
2c237ee277 | ||
|
56cc248425 | ||
|
61fcb2b605 | ||
|
992cc03eca | ||
|
f0e7c459e2 | ||
|
29d0694a16 | ||
|
66e5edcfc0 | ||
|
f13930bc6b | ||
|
0d5514834d | ||
|
b495156444 | ||
|
65a2b0116b | ||
|
21c6160c84 | ||
|
8a2268956a | ||
|
2675aa6969 | ||
|
6bad13738f | ||
|
dbae6968c9 | ||
|
e019f3811b | ||
|
24935f4e07 | ||
|
1835777832 | ||
|
cb237831c7 | ||
|
49d2ea6f2e | ||
|
3b2a2d1a73 | ||
|
f490fb6616 | ||
|
c4f9a3b31e | ||
|
afcf15df55 | ||
|
bf8aee25fe | ||
|
ebdfe31c17 | ||
|
e65ce932dd | ||
|
c119f506fd | ||
|
93638baba7 | ||
|
bad97774c4 | ||
|
68f5be2ff1 | ||
|
0b54099789 | ||
|
9b2a2eda0c | ||
|
ccef9646c6 | ||
|
458639e93d | ||
|
35998e98cf | ||
|
e19b67f9a2 | ||
|
f41ec46a35 | ||
|
33aa9ea1a7 | ||
|
21bd468307 | ||
|
e95109c446 | ||
|
93d5180dfc | ||
|
a9bec84d27 | ||
|
e3f87382a3 | ||
|
736f067178 | ||
|
f3ea7b3dfd | ||
|
777dfd5f58 |
@@ -50,6 +50,13 @@ jobs:
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_GAMMA_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
@@ -74,21 +81,21 @@ jobs:
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-postgres-deployment:
|
||||
|
31
README.md
@@ -48,25 +48,26 @@
|
||||
|
||||
## Introduction
|
||||
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their secrets like API keys, database credentials, and configurations.
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
|
||||
|
||||
We're on a mission to make secret management more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
|
||||
## Features
|
||||
|
||||
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
|
||||
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
|
||||
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
|
||||
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
|
||||
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
|
||||
- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
|
||||
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
|
||||
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
|
||||
- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
|
||||
- **[Internal PKI](https://infisical.com/docs/documentation/platform/pki/private-ca)** to create Private CA hierarchies and start issuing and managing X.509 digital certificates.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
|
||||
|
||||
And much more.
|
||||
|
||||
@@ -74,9 +75,9 @@ And much more.
|
||||
|
||||
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
|
144
backend/package-lock.json
generated
@@ -38,6 +38,7 @@
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
@@ -57,6 +58,7 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"passport-github": "^1.1.0",
|
||||
@@ -6459,12 +6461,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/braces": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"fill-range": "^7.0.1"
|
||||
"fill-range": "^7.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
@@ -6790,6 +6792,17 @@
|
||||
"integrity": "sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/connect-redis": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/connect-redis/-/connect-redis-7.1.1.tgz",
|
||||
"integrity": "sha512-M+z7alnCJiuzKa8/1qAYdGUXHYfDnLolOGAUjOioB07pP39qxjG+X9ibsud7qUBc4jMV5Mcy3ugGv8eFcgamJQ==",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"express-session": ">=1"
|
||||
}
|
||||
},
|
||||
"node_modules/console-control-strings": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
|
||||
@@ -7896,6 +7909,55 @@
|
||||
"node": ">= 0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/express-session": {
|
||||
"version": "1.18.0",
|
||||
"resolved": "https://registry.npmjs.org/express-session/-/express-session-1.18.0.tgz",
|
||||
"integrity": "sha512-m93QLWr0ju+rOwApSsyso838LQwgfs44QtOP/WBiwtAgPIo/SAh1a5c6nn2BR6mFNZehTpqKDESzP+fRHVbxwQ==",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"cookie": "0.6.0",
|
||||
"cookie-signature": "1.0.7",
|
||||
"debug": "2.6.9",
|
||||
"depd": "~2.0.0",
|
||||
"on-headers": "~1.0.2",
|
||||
"parseurl": "~1.3.3",
|
||||
"safe-buffer": "5.2.1",
|
||||
"uid-safe": "~2.1.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/express-session/node_modules/cookie": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
|
||||
"integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/express-session/node_modules/cookie-signature": {
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.7.tgz",
|
||||
"integrity": "sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/express-session/node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/express-session/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/express/node_modules/cookie": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
|
||||
@@ -8115,9 +8177,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
@@ -9603,6 +9665,14 @@
|
||||
"node": ">= 0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/jose": {
|
||||
"version": "4.15.5",
|
||||
"resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz",
|
||||
"integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/panva"
|
||||
}
|
||||
},
|
||||
"node_modules/joycon": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz",
|
||||
@@ -10728,6 +10798,14 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/object-hash": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz",
|
||||
"integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/object-inspect": {
|
||||
"version": "1.13.1",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz",
|
||||
@@ -10851,6 +10929,14 @@
|
||||
"@octokit/core": ">=5"
|
||||
}
|
||||
},
|
||||
"node_modules/oidc-token-hash": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.0.3.tgz",
|
||||
"integrity": "sha512-IF4PcGgzAr6XXSff26Sk/+P4KZFJVuHAJZj3wgO3vX2bMdNVp/QXTP3P7CEm9V1IdG8lDLY3HhiqpsE/nOwpPw==",
|
||||
"engines": {
|
||||
"node": "^10.13.0 || >=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/on-exit-leak-free": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz",
|
||||
@@ -10870,6 +10956,15 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/on-headers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz",
|
||||
"integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
@@ -10897,6 +10992,20 @@
|
||||
"resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz",
|
||||
"integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="
|
||||
},
|
||||
"node_modules/openid-client": {
|
||||
"version": "5.6.5",
|
||||
"resolved": "https://registry.npmjs.org/openid-client/-/openid-client-5.6.5.tgz",
|
||||
"integrity": "sha512-5P4qO9nGJzB5PI0LFlhj4Dzg3m4odt0qsJTfyEtZyOlkgpILwEioOhVVJOrS1iVH494S4Ee5OCjjg6Bf5WOj3w==",
|
||||
"dependencies": {
|
||||
"jose": "^4.15.5",
|
||||
"lru-cache": "^6.0.0",
|
||||
"object-hash": "^2.2.0",
|
||||
"oidc-token-hash": "^5.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/panva"
|
||||
}
|
||||
},
|
||||
"node_modules/optionator": {
|
||||
"version": "0.9.3",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
|
||||
@@ -11948,6 +12057,15 @@
|
||||
"resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz",
|
||||
"integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="
|
||||
},
|
||||
"node_modules/random-bytes": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz",
|
||||
"integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/randombytes": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
|
||||
@@ -14027,6 +14145,18 @@
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/uid-safe": {
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz",
|
||||
"integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"random-bytes": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/uid2": {
|
||||
"version": "0.0.4",
|
||||
"resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.4.tgz",
|
||||
|
@@ -99,6 +99,7 @@
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
@@ -118,6 +119,7 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"passport-github": "^1.1.0",
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
@@ -13,6 +13,7 @@ import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
@@ -102,6 +103,7 @@ declare module "fastify" {
|
||||
permission: TPermissionServiceFactory;
|
||||
org: TOrgServiceFactory;
|
||||
orgRole: TOrgRoleServiceFactory;
|
||||
oidc: TOidcConfigServiceFactory;
|
||||
superAdmin: TSuperAdminServiceFactory;
|
||||
user: TUserServiceFactory;
|
||||
group: TGroupServiceFactory;
|
||||
|
4
backend/src/@types/knex.d.ts
vendored
@@ -134,6 +134,9 @@ import {
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOidcConfigs,
|
||||
TOidcConfigsInsert,
|
||||
TOidcConfigsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
@@ -549,6 +552,7 @@ declare module "knex/types/tables" {
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: Knex.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
|
@@ -0,0 +1,61 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
|
||||
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKey"
|
||||
);
|
||||
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyIV"
|
||||
);
|
||||
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyTag"
|
||||
);
|
||||
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyEncoding"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
|
||||
if (!doesPasswordFieldExist) t.string("hashedPassword");
|
||||
if (!doesPrivateKeyFieldExist) t.text("serverEncryptedPrivateKey");
|
||||
if (!doesPrivateKeyIVFieldExist) t.text("serverEncryptedPrivateKeyIV");
|
||||
if (!doesPrivateKeyTagFieldExist) t.text("serverEncryptedPrivateKeyTag");
|
||||
if (!doesPrivateKeyEncodingFieldExist) t.text("serverEncryptedPrivateKeyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
|
||||
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKey"
|
||||
);
|
||||
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyIV"
|
||||
);
|
||||
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyTag"
|
||||
);
|
||||
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyEncoding"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
|
||||
if (doesPasswordFieldExist) t.dropColumn("hashedPassword");
|
||||
if (doesPrivateKeyFieldExist) t.dropColumn("serverEncryptedPrivateKey");
|
||||
if (doesPrivateKeyIVFieldExist) t.dropColumn("serverEncryptedPrivateKeyIV");
|
||||
if (doesPrivateKeyTagFieldExist) t.dropColumn("serverEncryptedPrivateKeyTag");
|
||||
if (doesPrivateKeyEncodingFieldExist) t.dropColumn("serverEncryptedPrivateKeyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
49
backend/src/db/migrations/20240624161942_add-oidc-auth.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.OidcConfig))) {
|
||||
await knex.schema.createTable(TableName.OidcConfig, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("discoveryURL");
|
||||
tb.string("issuer");
|
||||
tb.string("authorizationEndpoint");
|
||||
tb.string("jwksUri");
|
||||
tb.string("tokenEndpoint");
|
||||
tb.string("userinfoEndpoint");
|
||||
tb.text("encryptedClientId").notNullable();
|
||||
tb.string("configurationType").notNullable();
|
||||
tb.string("clientIdIV").notNullable();
|
||||
tb.string("clientIdTag").notNullable();
|
||||
tb.text("encryptedClientSecret").notNullable();
|
||||
tb.string("clientSecretIV").notNullable();
|
||||
tb.string("clientSecretTag").notNullable();
|
||||
tb.string("allowedEmailDomains").nullable();
|
||||
tb.boolean("isActive").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
tb.uuid("orgId").notNullable().unique();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
tb.boolean("trustOidcEmails").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.OidcConfig);
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustOidcEmails");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@@ -43,6 +43,7 @@ export * from "./kms-root-config";
|
||||
export * from "./ldap-configs";
|
||||
export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
export * from "./oidc-configs";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
|
@@ -78,6 +78,7 @@ export enum TableName {
|
||||
SecretRotationOutput = "secret_rotation_outputs",
|
||||
SamlConfig = "saml_configs",
|
||||
LdapConfig = "ldap_configs",
|
||||
OidcConfig = "oidc_configs",
|
||||
LdapGroupMap = "ldap_group_maps",
|
||||
AuditLog = "audit_logs",
|
||||
AuditLogStream = "audit_log_streams",
|
||||
|
34
backend/src/db/schemas/oidc-configs.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OidcConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
discoveryURL: z.string().nullable().optional(),
|
||||
issuer: z.string().nullable().optional(),
|
||||
authorizationEndpoint: z.string().nullable().optional(),
|
||||
jwksUri: z.string().nullable().optional(),
|
||||
tokenEndpoint: z.string().nullable().optional(),
|
||||
userinfoEndpoint: z.string().nullable().optional(),
|
||||
encryptedClientId: z.string(),
|
||||
configurationType: z.string(),
|
||||
clientIdIV: z.string(),
|
||||
clientIdTag: z.string(),
|
||||
encryptedClientSecret: z.string(),
|
||||
clientSecretIV: z.string(),
|
||||
clientSecretTag: z.string(),
|
||||
allowedEmailDomains: z.string().nullable().optional(),
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
export type TOidcConfigsInsert = Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TOidcConfigsUpdate = Partial<Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>>;
|
@@ -16,7 +16,8 @@ export const SuperAdminSchema = z.object({
|
||||
allowedSignUpDomain: z.string().nullable().optional(),
|
||||
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000"),
|
||||
trustSamlEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional()
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustOidcEmails: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@@ -21,7 +21,12 @@ export const UserEncryptionKeysSchema = z.object({
|
||||
tag: z.string(),
|
||||
salt: z.string(),
|
||||
verifier: z.string(),
|
||||
userId: z.string().uuid()
|
||||
userId: z.string().uuid(),
|
||||
hashedPassword: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKey: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyIV: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyTag: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyEncoding: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TUserEncryptionKeys = z.infer<typeof UserEncryptionKeysSchema>;
|
||||
|
@@ -8,6 +8,7 @@ import { registerGroupRouter } from "./group-router";
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerLdapRouter } from "./ldap-router";
|
||||
import { registerLicenseRouter } from "./license-router";
|
||||
import { registerOidcRouter } from "./oidc-router";
|
||||
import { registerOrgRoleRouter } from "./org-role-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
import { registerProjectRouter } from "./project-router";
|
||||
@@ -64,7 +65,14 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/pki" }
|
||||
);
|
||||
|
||||
await server.register(registerSamlRouter, { prefix: "/sso" });
|
||||
await server.register(
|
||||
async (ssoRouter) => {
|
||||
await ssoRouter.register(registerSamlRouter);
|
||||
await ssoRouter.register(registerOidcRouter, { prefix: "/oidc" });
|
||||
},
|
||||
{ prefix: "/sso" }
|
||||
);
|
||||
|
||||
await server.register(registerScimRouter, { prefix: "/scim" });
|
||||
await server.register(registerLdapRouter, { prefix: "/ldap" });
|
||||
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
|
||||
|
@@ -53,7 +53,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
// eslint-disable-next-line
|
||||
async (req: IncomingMessage, user, cb) => {
|
||||
try {
|
||||
if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." });
|
||||
if (!user.mail) throw new BadRequestError({ message: "Invalid request. Missing mail attribute on user." });
|
||||
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
|
||||
|
||||
let groups: { dn: string; cn: string }[] | undefined;
|
||||
|
355
backend/src/ee/routes/v1/oidc-router.ts
Normal file
@@ -0,0 +1,355 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
// All the any rules are disabled because passport typesense with fastify is really poor
|
||||
|
||||
import { Authenticator, Strategy } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { Redis } from "ioredis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
|
||||
/*
|
||||
- OIDC protocol cannot work without sessions: https://github.com/panva/node-openid-client/issues/190
|
||||
- Current redis usage is not ideal and will eventually have to be refactored to use a better structure
|
||||
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
|
||||
*/
|
||||
const redisStore = new RedisStore({
|
||||
client: redis,
|
||||
prefix: "oidc-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
||||
await server.register(fastifySession, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
|
||||
store: redisStore,
|
||||
cookie: {
|
||||
secure: appCfg.HTTPS_ENABLED,
|
||||
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
|
||||
}
|
||||
});
|
||||
|
||||
await server.register(passport.initialize());
|
||||
await server.register(passport.secureSession());
|
||||
|
||||
// redirect to IDP for login
|
||||
server.route({
|
||||
url: "/login",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
orgSlug: z.string().trim(),
|
||||
callbackPort: z.string().trim().optional()
|
||||
})
|
||||
},
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const { orgSlug, callbackPort } = req.query;
|
||||
|
||||
// ensure fresh session state per login attempt
|
||||
await req.session.regenerate();
|
||||
|
||||
req.session.set<any>("oidcOrgSlug", orgSlug);
|
||||
|
||||
if (callbackPort) {
|
||||
req.session.set<any>("callbackPort", callbackPort);
|
||||
}
|
||||
|
||||
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(orgSlug, callbackPort);
|
||||
return (
|
||||
passport.authenticate(oidcStrategy as Strategy, {
|
||||
scope: "profile email openid"
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
handler: () => {}
|
||||
});
|
||||
|
||||
// callback route after login from IDP
|
||||
server.route({
|
||||
url: "/callback",
|
||||
method: "GET",
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const oidcOrgSlug = req.session.get<any>("oidcOrgSlug");
|
||||
const callbackPort = req.session.get<any>("callbackPort");
|
||||
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(oidcOrgSlug, callbackPort);
|
||||
|
||||
return (
|
||||
passport.authenticate(oidcStrategy as Strategy, {
|
||||
failureRedirect: "/api/v1/sso/oidc/login/error",
|
||||
session: false,
|
||||
failureMessage: true
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
|
||||
if (req.passportUser.isUserCompleted) {
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
);
|
||||
}
|
||||
|
||||
// signup
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/login/error",
|
||||
method: "GET",
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
|
||||
return res.status(500).send({
|
||||
error: "Authentication error",
|
||||
details: req.query
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/config",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
orgSlug: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
isActive: true,
|
||||
orgId: true,
|
||||
allowedEmailDomains: true
|
||||
}).extend({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { orgSlug } = req.query;
|
||||
const oidc = await server.services.oidc.getOidc({
|
||||
orgSlug,
|
||||
type: "external",
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod
|
||||
});
|
||||
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/config",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z
|
||||
.object({
|
||||
allowedEmailDomains: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.default("")
|
||||
.transform((data) => {
|
||||
if (data === "") return "";
|
||||
// Trim each ID and join with ', ' to ensure formatting
|
||||
return data
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
}),
|
||||
discoveryURL: z.string().trim(),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
||||
issuer: z.string().trim(),
|
||||
authorizationEndpoint: z.string().trim(),
|
||||
jwksUri: z.string().trim(),
|
||||
tokenEndpoint: z.string().trim(),
|
||||
userinfoEndpoint: z.string().trim(),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean()
|
||||
})
|
||||
.partial()
|
||||
.merge(z.object({ orgSlug: z.string() })),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
allowedEmailDomains: true,
|
||||
isActive: true
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const oidc = await server.services.oidc.updateOidcCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/config",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z
|
||||
.object({
|
||||
allowedEmailDomains: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.default("")
|
||||
.transform((data) => {
|
||||
if (data === "") return "";
|
||||
// Trim each ID and join with ', ' to ensure formatting
|
||||
return data
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
}),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
||||
issuer: z.string().trim().optional().default(""),
|
||||
discoveryURL: z.string().trim().optional().default(""),
|
||||
authorizationEndpoint: z.string().trim().optional().default(""),
|
||||
jwksUri: z.string().trim().optional().default(""),
|
||||
tokenEndpoint: z.string().trim().optional().default(""),
|
||||
userinfoEndpoint: z.string().trim().optional().default(""),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean(),
|
||||
orgSlug: z.string().trim()
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
||||
if (!data.issuer) {
|
||||
ctx.addIssue({
|
||||
path: ["issuer"],
|
||||
message: "Issuer is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.authorizationEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["authorizationEndpoint"],
|
||||
message: "Authorization endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.jwksUri) {
|
||||
ctx.addIssue({
|
||||
path: ["jwksUri"],
|
||||
message: "JWKS URI is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.tokenEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["tokenEndpoint"],
|
||||
message: "Token endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.userinfoEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["userinfoEndpoint"],
|
||||
message: "Userinfo endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// eslint-disable-next-line no-lonely-if
|
||||
if (!data.discoveryURL) {
|
||||
ctx.addIssue({
|
||||
path: ["discoveryURL"],
|
||||
message: "Discovery URL is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
handler: async (req) => {
|
||||
const oidc = await server.services.oidc.createOidcCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
};
|
@@ -73,7 +73,13 @@ type TLdapConfigServiceFactoryDep = {
|
||||
>;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "transaction"
|
||||
| "updateById"
|
||||
| "findUserEncKeyByUserIdsBatch"
|
||||
| "find"
|
||||
| "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
@@ -592,12 +598,14 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
|
@@ -27,6 +27,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
auditLogStreams: false,
|
||||
auditLogStreamLimit: 3,
|
||||
samlSSO: false,
|
||||
oidcSSO: false,
|
||||
scim: false,
|
||||
ldap: false,
|
||||
groups: false,
|
||||
|
@@ -44,6 +44,7 @@ export type TFeatureSet = {
|
||||
auditLogStreams: false;
|
||||
auditLogStreamLimit: 3;
|
||||
samlSSO: false;
|
||||
oidcSSO: false;
|
||||
scim: false;
|
||||
ldap: false;
|
||||
groups: false;
|
||||
|
11
backend/src/ee/services/oidc/oidc-config-dal.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TOidcConfigDALFactory = ReturnType<typeof oidcConfigDALFactory>;
|
||||
|
||||
export const oidcConfigDALFactory = (db: TDbClient) => {
|
||||
const oidcCfgOrm = ormify(db, TableName.OidcConfig);
|
||||
|
||||
return { ...oidcCfgOrm };
|
||||
};
|
637
backend/src/ee/services/oidc/oidc-config-service.ts
Normal file
@@ -0,0 +1,637 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
||||
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
|
||||
import { TOidcConfigDALFactory } from "./oidc-config-dal";
|
||||
import {
|
||||
OIDCConfigurationType,
|
||||
TCreateOidcCfgDTO,
|
||||
TGetOidcCfgDTO,
|
||||
TOidcLoginDTO,
|
||||
TUpdateOidcCfgDTO
|
||||
} from "./oidc-config-types";
|
||||
|
||||
type TOidcConfigServiceFactoryDep = {
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
|
||||
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
|
||||
|
||||
export const oidcConfigServiceFactory = ({
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
licenseService,
|
||||
permissionService,
|
||||
tokenService,
|
||||
orgBotDAL,
|
||||
smtpService,
|
||||
oidcConfigDAL
|
||||
}: TOidcConfigServiceFactoryDep) => {
|
||||
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found",
|
||||
name: "OrgNotFound"
|
||||
});
|
||||
}
|
||||
if (dto.type === "external") {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
dto.actor,
|
||||
dto.actorId,
|
||||
org.id,
|
||||
dto.actorAuthMethod,
|
||||
dto.actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
}
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.findOne({
|
||||
orgId: org.id
|
||||
});
|
||||
|
||||
if (!oidcCfg) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find organization OIDC configuration"
|
||||
});
|
||||
}
|
||||
|
||||
// decrypt and return cfg
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
|
||||
oidcCfg;
|
||||
|
||||
let clientId = "";
|
||||
if (encryptedClientId && clientIdIV && clientIdTag) {
|
||||
clientId = decryptSymmetric({
|
||||
ciphertext: encryptedClientId,
|
||||
key,
|
||||
tag: clientIdTag,
|
||||
iv: clientIdIV
|
||||
});
|
||||
}
|
||||
|
||||
let clientSecret = "";
|
||||
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
|
||||
clientSecret = decryptSymmetric({
|
||||
key,
|
||||
tag: clientSecretTag,
|
||||
iv: clientSecretIV,
|
||||
ciphertext: encryptedClientSecret
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
id: oidcCfg.id,
|
||||
issuer: oidcCfg.issuer,
|
||||
authorizationEndpoint: oidcCfg.authorizationEndpoint,
|
||||
configurationType: oidcCfg.configurationType,
|
||||
discoveryURL: oidcCfg.discoveryURL,
|
||||
jwksUri: oidcCfg.jwksUri,
|
||||
tokenEndpoint: oidcCfg.tokenEndpoint,
|
||||
userinfoEndpoint: oidcCfg.userinfoEndpoint,
|
||||
orgId: oidcCfg.orgId,
|
||||
isActive: oidcCfg.isActive,
|
||||
allowedEmailDomains: oidcCfg.allowedEmailDomains,
|
||||
clientId,
|
||||
clientSecret
|
||||
};
|
||||
};
|
||||
|
||||
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
|
||||
const serverCfg = await getServerCfg();
|
||||
const appCfg = getConfig();
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.OIDC
|
||||
});
|
||||
|
||||
const organization = await orgDAL.findOrgById(orgId);
|
||||
if (!organization) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
let user: TUsers;
|
||||
if (userAlias) {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
const foundUser = await userDAL.findById(userAlias.userId, tx);
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: foundUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && foundUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return foundUser;
|
||||
});
|
||||
} else {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(externalId, userDAL);
|
||||
newUser = await userDAL.create(
|
||||
{
|
||||
email,
|
||||
firstName,
|
||||
isEmailVerified: serverCfg.trustOidcEmails,
|
||||
username: serverCfg.trustOidcEmails ? email : uniqueUsername,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
aliasType: UserAliasType.OIDC,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: newUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && newUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return newUser;
|
||||
});
|
||||
}
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
authMethod: AuthMethod.OIDC,
|
||||
authType: UserAliasType.OIDC,
|
||||
isUserCompleted,
|
||||
...(callbackPort && { callbackPort })
|
||||
},
|
||||
appCfg.AUTH_SECRET,
|
||||
{
|
||||
expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME
|
||||
}
|
||||
);
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
const updateOidcCfg = async ({
|
||||
orgSlug,
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorId,
|
||||
issuer,
|
||||
isActive,
|
||||
authorizationEndpoint,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
clientId,
|
||||
clientSecret
|
||||
}: TUpdateOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(org.id);
|
||||
if (!plan.oidcSSO)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
org.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
|
||||
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const updateQuery: TOidcConfigsUpdate = {
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
issuer,
|
||||
authorizationEndpoint,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
jwksUri,
|
||||
isActive
|
||||
};
|
||||
|
||||
if (clientId !== undefined) {
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
updateQuery.encryptedClientId = encryptedClientId;
|
||||
updateQuery.clientIdIV = clientIdIV;
|
||||
updateQuery.clientIdTag = clientIdTag;
|
||||
}
|
||||
|
||||
if (clientSecret !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
updateQuery.encryptedClientSecret = encryptedClientSecret;
|
||||
updateQuery.clientSecretIV = clientSecretIV;
|
||||
updateQuery.clientSecretTag = clientSecretTag;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
|
||||
return ssoConfig;
|
||||
};
|
||||
|
||||
const createOidcCfg = async ({
|
||||
orgSlug,
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorId,
|
||||
issuer,
|
||||
isActive,
|
||||
authorizationEndpoint,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
clientId,
|
||||
clientSecret
|
||||
}: TCreateOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(org.id);
|
||||
if (!plan.oidcSSO)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
org.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: org.id,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.create({
|
||||
issuer,
|
||||
isActive,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
authorizationEndpoint,
|
||||
allowedEmailDomains,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
orgId: org.id,
|
||||
encryptedClientId,
|
||||
clientIdIV,
|
||||
clientIdTag,
|
||||
encryptedClientSecret,
|
||||
clientSecretIV,
|
||||
clientSecretTag
|
||||
});
|
||||
|
||||
return oidcCfg;
|
||||
};
|
||||
|
||||
const getOrgAuthStrategy = async (orgSlug: string, callbackPort?: string) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found."
|
||||
});
|
||||
}
|
||||
|
||||
const oidcCfg = await getOidc({
|
||||
type: "internal",
|
||||
orgSlug
|
||||
});
|
||||
|
||||
if (!oidcCfg || !oidcCfg.isActive) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to authenticate with OIDC SSO"
|
||||
});
|
||||
}
|
||||
|
||||
let issuer: Issuer;
|
||||
if (oidcCfg.configurationType === OIDCConfigurationType.DISCOVERY_URL) {
|
||||
if (!oidcCfg.discoveryURL) {
|
||||
throw new BadRequestError({
|
||||
message: "OIDC not configured correctly"
|
||||
});
|
||||
}
|
||||
issuer = await Issuer.discover(oidcCfg.discoveryURL);
|
||||
} else {
|
||||
if (
|
||||
!oidcCfg.issuer ||
|
||||
!oidcCfg.authorizationEndpoint ||
|
||||
!oidcCfg.jwksUri ||
|
||||
!oidcCfg.tokenEndpoint ||
|
||||
!oidcCfg.userinfoEndpoint
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "OIDC not configured correctly"
|
||||
});
|
||||
}
|
||||
issuer = new OpenIdIssuer({
|
||||
issuer: oidcCfg.issuer,
|
||||
authorization_endpoint: oidcCfg.authorizationEndpoint,
|
||||
jwks_uri: oidcCfg.jwksUri,
|
||||
token_endpoint: oidcCfg.tokenEndpoint,
|
||||
userinfo_endpoint: oidcCfg.userinfoEndpoint
|
||||
});
|
||||
}
|
||||
|
||||
const client = new issuer.Client({
|
||||
client_id: oidcCfg.clientId,
|
||||
client_secret: oidcCfg.clientSecret,
|
||||
redirect_uris: [`${appCfg.SITE_URL}/api/v1/sso/oidc/callback`]
|
||||
});
|
||||
|
||||
const strategy = new OpenIdStrategy(
|
||||
{
|
||||
client,
|
||||
passReqToCallback: true
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(_req: any, tokenSet: TokenSet, cb: any) => {
|
||||
const claims = tokenSet.claims();
|
||||
if (!claims.email || !claims.given_name) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid request. Missing email or first name"
|
||||
});
|
||||
}
|
||||
|
||||
if (oidcCfg.allowedEmailDomains) {
|
||||
const allowedDomains = oidcCfg.allowedEmailDomains.split(", ");
|
||||
if (!allowedDomains.includes(claims.email.split("@")[1])) {
|
||||
throw new BadRequestError({
|
||||
message: "Email not allowed."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
oidcLogin({
|
||||
email: claims.email,
|
||||
externalId: claims.sub,
|
||||
firstName: claims.given_name ?? "",
|
||||
lastName: claims.family_name ?? "",
|
||||
orgId: org.id,
|
||||
callbackPort
|
||||
})
|
||||
.then(({ isUserCompleted, providerAuthToken }) => {
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
return strategy;
|
||||
};
|
||||
|
||||
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg };
|
||||
};
|
56
backend/src/ee/services/oidc/oidc-config-types.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { TGenericPermission } from "@app/lib/types";
|
||||
|
||||
export enum OIDCConfigurationType {
|
||||
CUSTOM = "custom",
|
||||
DISCOVERY_URL = "discoveryURL"
|
||||
}
|
||||
|
||||
export type TOidcLoginDTO = {
|
||||
externalId: string;
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
orgId: string;
|
||||
callbackPort?: string;
|
||||
};
|
||||
|
||||
export type TGetOidcCfgDTO =
|
||||
| ({
|
||||
type: "external";
|
||||
orgSlug: string;
|
||||
} & TGenericPermission)
|
||||
| {
|
||||
type: "internal";
|
||||
orgSlug: string;
|
||||
};
|
||||
|
||||
export type TCreateOidcCfgDTO = {
|
||||
issuer?: string;
|
||||
authorizationEndpoint?: string;
|
||||
discoveryURL?: string;
|
||||
configurationType: OIDCConfigurationType;
|
||||
allowedEmailDomains?: string;
|
||||
jwksUri?: string;
|
||||
tokenEndpoint?: string;
|
||||
userinfoEndpoint?: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
isActive: boolean;
|
||||
orgSlug: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TUpdateOidcCfgDTO = Partial<{
|
||||
issuer: string;
|
||||
authorizationEndpoint: string;
|
||||
allowedEmailDomains: string;
|
||||
discoveryURL: string;
|
||||
jwksUri: string;
|
||||
configurationType: OIDCConfigurationType;
|
||||
tokenEndpoint: string;
|
||||
userinfoEndpoint: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
isActive: boolean;
|
||||
orgSlug: string;
|
||||
}> &
|
||||
TGenericPermission;
|
@@ -116,7 +116,6 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.IncidentAccount);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.SecretScanning);
|
||||
|
@@ -41,7 +41,10 @@ import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } f
|
||||
|
||||
type TSamlConfigServiceFactoryDep = {
|
||||
samlConfigDAL: Pick<TSamlConfigDALFactory, "create" | "findOne" | "update" | "findById">;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById" | "findById">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
@@ -452,6 +455,7 @@ export const samlConfigServiceFactory = ({
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
@@ -464,6 +468,7 @@ export const samlConfigServiceFactory = ({
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: authProvider,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
authType: UserAliasType.SAML,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
|
@@ -331,8 +331,8 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
|
||||
*
|
||||
* This function operates in three main steps:
|
||||
* 1. Pruning snapshots from root/non-versioned folders.
|
||||
* 2. Pruning snapshots from versioned folders.
|
||||
* 1. Pruning snapshots from current folders.
|
||||
* 2. Pruning snapshots from non-current folders (versioned ones).
|
||||
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
|
||||
*
|
||||
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
|
||||
@@ -350,7 +350,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
|
||||
try {
|
||||
let uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// cleanup snapshots from root/non-versioned folders
|
||||
// cleanup snapshots from current folders
|
||||
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
|
||||
while (true) {
|
||||
const folderBatch = await db(TableName.SecretFolder)
|
||||
@@ -382,12 +382,11 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
|
||||
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
|
||||
.whereNull(`${TableName.SecretFolder}.parentId`)
|
||||
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
|
||||
.delete();
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`Failed to prune snapshots from root/non-versioned folders in range ${batchEntries[0]}:${
|
||||
`Failed to prune snapshots from current folders in range ${batchEntries[0]}:${
|
||||
batchEntries[batchEntries.length - 1]
|
||||
}`
|
||||
);
|
||||
@@ -399,7 +398,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
}
|
||||
|
||||
// cleanup snapshots from versioned folders
|
||||
// cleanup snapshots from non-current folders
|
||||
uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
@@ -440,7 +439,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
.delete();
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`Failed to prune snapshots from versioned folders in range ${batchEntries[0]}:${
|
||||
`Failed to prune snapshots from non-current folders in range ${batchEntries[0]}:${
|
||||
batchEntries[batchEntries.length - 1]
|
||||
}`
|
||||
);
|
||||
|
@@ -508,12 +508,27 @@ export const SECRET_TAGS = {
|
||||
LIST: {
|
||||
projectId: "The ID of the project to list tags from."
|
||||
},
|
||||
GET_TAG_BY_ID: {
|
||||
projectId: "The ID of the project to get tags from.",
|
||||
tagId: "The ID of the tag to get details"
|
||||
},
|
||||
GET_TAG_BY_SLUG: {
|
||||
projectId: "The ID of the project to get tags from.",
|
||||
tagSlug: "The slug of the tag to get details"
|
||||
},
|
||||
CREATE: {
|
||||
projectId: "The ID of the project to create the tag in.",
|
||||
name: "The name of the tag to create.",
|
||||
slug: "The slug of the tag to create.",
|
||||
color: "The color of the tag to create."
|
||||
},
|
||||
UPDATE: {
|
||||
projectId: "The ID of the project to update the tag in.",
|
||||
tagId: "The ID of the tag to get details",
|
||||
name: "The name of the tag to update.",
|
||||
slug: "The slug of the tag to update.",
|
||||
color: "The color of the tag to update."
|
||||
},
|
||||
DELETE: {
|
||||
tagId: "The ID of the tag to delete.",
|
||||
projectId: "The ID of the project to delete the tag from."
|
||||
|
@@ -29,7 +29,7 @@ const envSchema = z
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
|
||||
BCRYPT_SALT_ROUND: z.number().default(12),
|
||||
NODE_ENV: z.enum(["development", "test", "production"]).default("production"),
|
||||
SALT_ROUNDS: z.coerce.number().default(10),
|
||||
INITIAL_ORGANIZATION_NAME: zpStr(z.string().optional()),
|
||||
|
@@ -6,7 +6,7 @@ import tweetnacl from "tweetnacl-util";
|
||||
|
||||
import { TUserEncryptionKeys } from "@app/db/schemas";
|
||||
|
||||
import { decryptSymmetric, encryptAsymmetric, encryptSymmetric } from "./encryption";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, encryptAsymmetric, encryptSymmetric } from "./encryption";
|
||||
|
||||
export const generateSrpServerKey = async (salt: string, verifier: string) => {
|
||||
// eslint-disable-next-line new-cap
|
||||
@@ -97,30 +97,55 @@ export const generateUserSrpKeys = async (email: string, password: string) => {
|
||||
};
|
||||
};
|
||||
|
||||
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
|
||||
const derivedKey = await argon2.hash(password, {
|
||||
salt: Buffer.from(user.salt),
|
||||
memoryCost: 65536,
|
||||
timeCost: 3,
|
||||
parallelism: 1,
|
||||
hashLength: 32,
|
||||
type: argon2.argon2id,
|
||||
raw: true
|
||||
});
|
||||
if (!derivedKey) throw new Error("Failed to derive key from password");
|
||||
const key = decryptSymmetric({
|
||||
ciphertext: user.protectedKey!,
|
||||
iv: user.protectedKeyIV!,
|
||||
tag: user.protectedKeyTag!,
|
||||
key: derivedKey.toString("base64")
|
||||
});
|
||||
const privateKey = decryptSymmetric({
|
||||
ciphertext: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag,
|
||||
key
|
||||
});
|
||||
return privateKey;
|
||||
export const getUserPrivateKey = async (
|
||||
password: string,
|
||||
user: Pick<
|
||||
TUserEncryptionKeys,
|
||||
| "protectedKeyTag"
|
||||
| "protectedKey"
|
||||
| "protectedKeyIV"
|
||||
| "encryptedPrivateKey"
|
||||
| "iv"
|
||||
| "salt"
|
||||
| "tag"
|
||||
| "encryptionVersion"
|
||||
>
|
||||
) => {
|
||||
if (user.encryptionVersion === 1) {
|
||||
return decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag,
|
||||
key: password.slice(0, 32).padStart(32 + (password.slice(0, 32).length - new Blob([password]).size), "0")
|
||||
});
|
||||
}
|
||||
if (user.encryptionVersion === 2 && user.protectedKey && user.protectedKeyIV && user.protectedKeyTag) {
|
||||
const derivedKey = await argon2.hash(password, {
|
||||
salt: Buffer.from(user.salt),
|
||||
memoryCost: 65536,
|
||||
timeCost: 3,
|
||||
parallelism: 1,
|
||||
hashLength: 32,
|
||||
type: argon2.argon2id,
|
||||
raw: true
|
||||
});
|
||||
if (!derivedKey) throw new Error("Failed to derive key from password");
|
||||
const key = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: user.protectedKey,
|
||||
iv: user.protectedKeyIV,
|
||||
tag: user.protectedKeyTag,
|
||||
key: derivedKey
|
||||
});
|
||||
|
||||
const privateKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag,
|
||||
key: Buffer.from(key, "hex")
|
||||
});
|
||||
return privateKey;
|
||||
}
|
||||
throw new Error(`GetUserPrivateKey: Encryption version not found`);
|
||||
};
|
||||
|
||||
export const buildUserProjectKey = async (privateKey: string, publickey: string) => {
|
||||
|
@@ -59,6 +59,18 @@ export class BadRequestError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
export class NotFoundError extends Error {
|
||||
name: string;
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||
super(message ?? "The requested entity is not found");
|
||||
this.name = name || "NotFound";
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
||||
export class DisableRotationErrors extends Error {
|
||||
name: string;
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import {
|
||||
BadRequestError,
|
||||
DatabaseError,
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
} from "@app/lib/errors";
|
||||
@@ -15,6 +16,8 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
req.log.error(error);
|
||||
if (error instanceof BadRequestError) {
|
||||
void res.status(400).send({ statusCode: 400, message: error.message, error: error.name });
|
||||
} else if (error instanceof NotFoundError) {
|
||||
void res.status(404).send({ statusCode: 404, message: error.message, error: error.name });
|
||||
} else if (error instanceof UnauthorizedError) {
|
||||
void res.status(403).send({ statusCode: 403, message: error.message, error: error.name });
|
||||
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {
|
||||
|
@@ -32,6 +32,8 @@ import { ldapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-conf
|
||||
import { ldapGroupMapDALFactory } from "@app/ee/services/ldap-config/ldap-group-map-dal";
|
||||
import { licenseDALFactory } from "@app/ee/services/license/license-dal";
|
||||
import { licenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { oidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||
import { oidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
|
||||
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
@@ -250,6 +252,7 @@ export const registerRoutes = async (
|
||||
const ldapConfigDAL = ldapConfigDALFactory(db);
|
||||
const ldapGroupMapDAL = ldapGroupMapDALFactory(db);
|
||||
|
||||
const oidcConfigDAL = oidcConfigDALFactory(db);
|
||||
const accessApprovalPolicyDAL = accessApprovalPolicyDALFactory(db);
|
||||
const accessApprovalRequestDAL = accessApprovalRequestDALFactory(db);
|
||||
const accessApprovalPolicyApproverDAL = accessApprovalPolicyApproverDALFactory(db);
|
||||
@@ -903,6 +906,19 @@ export const registerRoutes = async (
|
||||
secretSharingDAL
|
||||
});
|
||||
|
||||
const oidcService = oidcConfigServiceFactory({
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService,
|
||||
orgBotDAL,
|
||||
permissionService,
|
||||
oidcConfigDAL
|
||||
});
|
||||
|
||||
await superAdminService.initServerCfg();
|
||||
//
|
||||
// setup the communication with license key server
|
||||
@@ -923,6 +939,7 @@ export const registerRoutes = async (
|
||||
permission: permissionService,
|
||||
org: orgService,
|
||||
orgRole: orgRoleService,
|
||||
oidc: oidcService,
|
||||
apiKey: apiKeyService,
|
||||
authToken: tokenService,
|
||||
superAdmin: superAdminService,
|
||||
|
@@ -51,7 +51,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
allowSignUp: z.boolean().optional(),
|
||||
allowedSignUpDomain: z.string().optional().nullable(),
|
||||
trustSamlEmails: z.boolean().optional(),
|
||||
trustLdapEmails: z.boolean().optional()
|
||||
trustLdapEmails: z.boolean().optional(),
|
||||
trustOidcEmails: z.boolean().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -79,6 +80,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
body: z.object({
|
||||
email: z.string().email().trim(),
|
||||
password: z.string().trim(),
|
||||
firstName: z.string().trim(),
|
||||
lastName: z.string().trim().optional(),
|
||||
protectedKey: z.string().trim(),
|
||||
|
@@ -198,7 +198,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityKubernetesAuth: IdentityKubernetesAuthsSchema
|
||||
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@@ -51,7 +51,8 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
|
||||
encryptedPrivateKeyIV: z.string().trim(),
|
||||
encryptedPrivateKeyTag: z.string().trim(),
|
||||
salt: z.string().trim(),
|
||||
verifier: z.string().trim()
|
||||
verifier: z.string().trim(),
|
||||
password: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -309,4 +309,32 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
return { membership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:workspaceId/leave",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
membership: ProjectMembershipsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const membership = await server.services.projectMembership.leaveProject({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
projectId: req.params.workspaceId
|
||||
});
|
||||
return { membership };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -36,6 +36,67 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/tags/:tagId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_ID.projectId),
|
||||
tagId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_ID.tagId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaceTag: SecretTagsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTag = await server.services.secretTag.getTagById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.tagId
|
||||
});
|
||||
return { workspaceTag };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/tags/slug/:tagSlug",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_SLUG.projectId),
|
||||
tagSlug: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_SLUG.tagSlug)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaceTag: SecretTagsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTag = await server.services.secretTag.getTagBySlug({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
slug: req.params.tagSlug,
|
||||
projectId: req.params.projectId
|
||||
});
|
||||
return { workspaceTag };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:projectId/tags",
|
||||
@@ -71,6 +132,42 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:projectId/tags/:tagId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(SECRET_TAGS.UPDATE.projectId),
|
||||
tagId: z.string().trim().describe(SECRET_TAGS.UPDATE.tagId)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().describe(SECRET_TAGS.UPDATE.name),
|
||||
slug: z.string().trim().describe(SECRET_TAGS.UPDATE.slug),
|
||||
color: z.string().trim().describe(SECRET_TAGS.UPDATE.color)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaceTag: SecretTagsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTag = await server.services.secretTag.updateTag({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
id: req.params.tagId
|
||||
});
|
||||
return { workspaceTag };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:projectId/tags/:tagId",
|
||||
|
@@ -259,4 +259,50 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/token-exchange",
|
||||
method: "POST",
|
||||
schema: {
|
||||
body: z.object({
|
||||
providerAuthToken: z.string(),
|
||||
email: z.string()
|
||||
})
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const userAgent = req.headers["user-agent"];
|
||||
if (!userAgent) throw new Error("user agent header is required");
|
||||
|
||||
const data = await server.services.login.oauth2TokenExchange({
|
||||
email: req.body.email,
|
||||
ip: req.realIp,
|
||||
userAgent,
|
||||
providerAuthToken: req.body.providerAuthToken
|
||||
});
|
||||
|
||||
if (data.isMfaEnabled) {
|
||||
return { mfaEnabled: true, token: data.token } as const; // for discriminated union
|
||||
}
|
||||
|
||||
void res.setCookie("jid", data.token.refresh, {
|
||||
httpOnly: true,
|
||||
path: "/",
|
||||
sameSite: "strict",
|
||||
secure: appCfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
return {
|
||||
mfaEnabled: false,
|
||||
encryptionVersion: data.user.encryptionVersion,
|
||||
token: data.token.access,
|
||||
publicKey: data.user.publicKey,
|
||||
encryptedPrivateKey: data.user.encryptedPrivateKey,
|
||||
iv: data.user.iv,
|
||||
tag: data.user.tag,
|
||||
protectedKey: data.user.protectedKey || null,
|
||||
protectedKeyIV: data.user.protectedKeyIV || null,
|
||||
protectedKeyTag: data.user.protectedKeyTag || null
|
||||
} as const;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -19,7 +19,23 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true }))
|
||||
user: UsersSchema.merge(
|
||||
UserEncryptionKeysSchema.pick({
|
||||
clientPublicKey: true,
|
||||
serverPrivateKey: true,
|
||||
encryptionVersion: true,
|
||||
protectedKey: true,
|
||||
protectedKeyIV: true,
|
||||
protectedKeyTag: true,
|
||||
publicKey: true,
|
||||
encryptedPrivateKey: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
salt: true,
|
||||
verifier: true,
|
||||
userId: true
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -30,6 +46,26 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/private-key",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
privateKey: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
|
||||
handler: async (req) => {
|
||||
const privateKey = await server.services.user.getUserPrivateKey(req.permission.id);
|
||||
return { privateKey };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:userId/unlock",
|
||||
|
@@ -255,7 +255,23 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Retrieve the current user on the request",
|
||||
response: {
|
||||
200: z.object({
|
||||
user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true }))
|
||||
user: UsersSchema.merge(
|
||||
UserEncryptionKeysSchema.pick({
|
||||
clientPublicKey: true,
|
||||
serverPrivateKey: true,
|
||||
encryptionVersion: true,
|
||||
protectedKey: true,
|
||||
protectedKeyIV: true,
|
||||
protectedKeyTag: true,
|
||||
publicKey: true,
|
||||
encryptedPrivateKey: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
salt: true,
|
||||
verifier: true,
|
||||
userId: true
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@@ -81,7 +81,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
email: z.string().trim(),
|
||||
providerAuthToken: z.string().trim().optional(),
|
||||
clientProof: z.string().trim(),
|
||||
captchaToken: z.string().trim().optional()
|
||||
captchaToken: z.string().trim().optional(),
|
||||
password: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.discriminatedUnion("mfaEnabled", [
|
||||
@@ -112,7 +113,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
ip: req.realIp,
|
||||
userAgent,
|
||||
providerAuthToken: req.body.providerAuthToken,
|
||||
clientProof: req.body.clientProof
|
||||
clientProof: req.body.clientProof,
|
||||
password: req.body.password
|
||||
});
|
||||
|
||||
if (data.isMfaEnabled) {
|
||||
|
@@ -8,7 +8,7 @@ import {
|
||||
SecretType,
|
||||
ServiceTokenScopes
|
||||
} from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
@@ -259,18 +259,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
return { secrets, imports };
|
||||
}
|
||||
});
|
||||
@@ -367,18 +369,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: 1,
|
||||
workspaceId: secret.workspace,
|
||||
environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: 1,
|
||||
workspaceId: secret.workspace,
|
||||
environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
return { secret };
|
||||
}
|
||||
});
|
||||
@@ -723,24 +727,22 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
// TODO: Move to telemetry plugin
|
||||
let shouldRecordK8Event = false;
|
||||
if (req.headers["user-agent"] === "k8-operatoer") {
|
||||
const randomNumber = Math.random();
|
||||
if (randomNumber > 0.95) {
|
||||
shouldRecordK8Event = true;
|
||||
}
|
||||
}
|
||||
// let shouldRecordK8Event = false;
|
||||
// if (req.headers["user-agent"] === "k8-operatoer") {
|
||||
// const randomNumber = Math.random();
|
||||
// if (randomNumber > 0.95) {
|
||||
// shouldRecordK8Event = true;
|
||||
// }
|
||||
// }
|
||||
|
||||
const shouldCapture =
|
||||
req.query.workspaceId !== "650e71fbae3e6c8572f436d4" &&
|
||||
(req.headers["user-agent"] !== "k8-operator" || shouldRecordK8Event);
|
||||
const approximateNumberTotalSecrets = secrets.length * 20;
|
||||
req.query.workspaceId !== "650e71fbae3e6c8572f436d4" && req.headers["user-agent"] !== "k8-operator";
|
||||
if (shouldCapture) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: shouldRecordK8Event ? approximateNumberTotalSecrets : secrets.length,
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId: req.query.workspaceId,
|
||||
environment: req.query.environment,
|
||||
secretPath: req.query.secretPath,
|
||||
@@ -817,18 +819,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: 1,
|
||||
workspaceId: req.query.workspaceId,
|
||||
environment: req.query.environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: 1,
|
||||
workspaceId: req.query.workspaceId,
|
||||
environment: req.query.environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
return { secret };
|
||||
}
|
||||
});
|
||||
|
@@ -102,7 +102,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
verifier: z.string().trim(),
|
||||
organizationName: z.string().trim().min(1),
|
||||
providerAuthToken: z.string().trim().optional().nullish(),
|
||||
attributionSource: z.string().trim().optional()
|
||||
attributionSource: z.string().trim().optional(),
|
||||
password: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -167,6 +168,7 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
body: z.object({
|
||||
email: z.string().email().trim(),
|
||||
password: z.string(),
|
||||
firstName: z.string().trim(),
|
||||
lastName: z.string().trim().optional(),
|
||||
protectedKey: z.string().trim(),
|
||||
|
@@ -15,10 +15,10 @@ export const validateProviderAuthToken = (providerToken: string, username?: stri
|
||||
if (decodedToken.username !== username) throw new Error("Invalid auth credentials");
|
||||
|
||||
if (decodedToken.organizationId) {
|
||||
return { orgId: decodedToken.organizationId, authMethod: decodedToken.authMethod };
|
||||
return { orgId: decodedToken.organizationId, authMethod: decodedToken.authMethod, userName: decodedToken.username };
|
||||
}
|
||||
|
||||
return { authMethod: decodedToken.authMethod, orgId: null };
|
||||
return { authMethod: decodedToken.authMethod, orgId: null, userName: decodedToken.username };
|
||||
};
|
||||
|
||||
export const validateSignUpAuthorization = (token: string, userId: string, validate = true) => {
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { TUsers, UserDeviceSchema } from "@app/db/schemas";
|
||||
@@ -5,7 +6,10 @@ import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
|
||||
import { TTokenDALFactory } from "../auth-token/auth-token-dal";
|
||||
@@ -19,6 +23,7 @@ import {
|
||||
TLoginClientProofDTO,
|
||||
TLoginGenServerPublicKeyDTO,
|
||||
TOauthLoginDTO,
|
||||
TOauthTokenExchangeDTO,
|
||||
TVerifyMfaTokenDTO
|
||||
} from "./auth-login-type";
|
||||
import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType } from "./auth-type";
|
||||
@@ -101,7 +106,7 @@ export const authLoginServiceFactory = ({
|
||||
user: TUsers;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
organizationId: string | undefined;
|
||||
organizationId?: string;
|
||||
authMethod: AuthMethod;
|
||||
}) => {
|
||||
const cfg = getConfig();
|
||||
@@ -178,7 +183,8 @@ export const authLoginServiceFactory = ({
|
||||
ip,
|
||||
userAgent,
|
||||
providerAuthToken,
|
||||
captchaToken
|
||||
captchaToken,
|
||||
password
|
||||
}: TLoginClientProofDTO) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
@@ -196,7 +202,10 @@ export const authLoginServiceFactory = ({
|
||||
const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email);
|
||||
|
||||
authMethod = decodedProviderToken.authMethod;
|
||||
if ((isAuthMethodSaml(authMethod) || authMethod === AuthMethod.LDAP) && decodedProviderToken.orgId) {
|
||||
if (
|
||||
(isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod)) &&
|
||||
decodedProviderToken.orgId
|
||||
) {
|
||||
organizationId = decodedProviderToken.orgId;
|
||||
}
|
||||
}
|
||||
@@ -248,14 +257,35 @@ export const authLoginServiceFactory = ({
|
||||
throw new Error("Failed to authenticate. Try again?");
|
||||
}
|
||||
|
||||
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
|
||||
serverPrivateKey: null,
|
||||
clientPublicKey: null
|
||||
});
|
||||
|
||||
await userDAL.updateById(userEnc.userId, {
|
||||
consecutiveFailedPasswordAttempts: 0
|
||||
});
|
||||
// from password decrypt the private key
|
||||
if (password) {
|
||||
const privateKey = await getUserPrivateKey(password, userEnc).catch((err) => {
|
||||
logger.error(
|
||||
err,
|
||||
`loginExchangeClientProof: private key generation failed for [userId=${user.id}] and [email=${user.email}] `
|
||||
);
|
||||
return "";
|
||||
});
|
||||
const hashedPassword = await bcrypt.hash(password, cfg.BCRYPT_SALT_ROUND);
|
||||
const { iv, tag, ciphertext, encoding } = infisicalSymmetricEncypt(privateKey);
|
||||
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
|
||||
serverPrivateKey: null,
|
||||
clientPublicKey: null,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKey: ciphertext,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyEncoding: encoding
|
||||
});
|
||||
} else {
|
||||
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
|
||||
serverPrivateKey: null,
|
||||
clientPublicKey: null
|
||||
});
|
||||
}
|
||||
|
||||
// send multi factor auth token if they it enabled
|
||||
if (userEnc.isMfaEnabled && userEnc.email) {
|
||||
@@ -499,8 +529,14 @@ export const authLoginServiceFactory = ({
|
||||
authMethods: [authMethod],
|
||||
isGhost: false
|
||||
});
|
||||
} else {
|
||||
const isLinkingRequired = !user?.authMethods?.includes(authMethod);
|
||||
if (isLinkingRequired) {
|
||||
user = await userDAL.updateById(user.id, { authMethods: [...(user.authMethods || []), authMethod] });
|
||||
}
|
||||
}
|
||||
const isLinkingRequired = !user?.authMethods?.includes(authMethod);
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const isUserCompleted = user.isAccepted;
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
@@ -511,9 +547,9 @@ export const authLoginServiceFactory = ({
|
||||
isEmailVerified: user.isEmailVerified,
|
||||
firstName: user.firstName,
|
||||
lastName: user.lastName,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
authMethod,
|
||||
isUserCompleted,
|
||||
isLinkingRequired,
|
||||
...(callbackPort
|
||||
? {
|
||||
callbackPort
|
||||
@@ -525,10 +561,72 @@ export const authLoginServiceFactory = ({
|
||||
expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME
|
||||
}
|
||||
);
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
/**
|
||||
* Handles OAuth2 token exchange for user login with private key handoff.
|
||||
*
|
||||
* The process involves exchanging a provider's authorization token for an Infisical access token.
|
||||
* The provider token is returned to the client, who then sends it back to obtain the Infisical access token.
|
||||
*
|
||||
* This approach is used instead of directly sending the access token for the following reasons:
|
||||
* 1. To facilitate easier logic changes from SRP OAuth to simple OAuth.
|
||||
* 2. To avoid attaching the access token to the URL, which could be logged. The provider token has a very short lifespan, reducing security risks.
|
||||
*/
|
||||
const oauth2TokenExchange = async ({ userAgent, ip, providerAuthToken, email }: TOauthTokenExchangeDTO) => {
|
||||
const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email);
|
||||
|
||||
const appCfg = getConfig();
|
||||
const { authMethod, userName } = decodedProviderToken;
|
||||
if (!userName) throw new BadRequestError({ message: "Missing user name" });
|
||||
const organizationId =
|
||||
(isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod)) &&
|
||||
decodedProviderToken.orgId
|
||||
? decodedProviderToken.orgId
|
||||
: undefined;
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
if (!userEnc) throw new BadRequestError({ message: "Invalid token" });
|
||||
if (!userEnc.serverEncryptedPrivateKey)
|
||||
throw new BadRequestError({ message: "Key handoff incomplete. Please try logging in again." });
|
||||
// send multi factor auth token if they it enabled
|
||||
if (userEnc.isMfaEnabled && userEnc.email) {
|
||||
enforceUserLockStatus(Boolean(userEnc.isLocked), userEnc.temporaryLockDateEnd);
|
||||
|
||||
const mfaToken = jwt.sign(
|
||||
{
|
||||
authMethod,
|
||||
authTokenType: AuthTokenType.MFA_TOKEN,
|
||||
userId: userEnc.userId
|
||||
},
|
||||
appCfg.AUTH_SECRET,
|
||||
{
|
||||
expiresIn: appCfg.JWT_MFA_LIFETIME
|
||||
}
|
||||
);
|
||||
|
||||
await sendUserMfaCode({
|
||||
userId: userEnc.userId,
|
||||
email: userEnc.email
|
||||
});
|
||||
|
||||
return { isMfaEnabled: true, token: mfaToken } as const;
|
||||
}
|
||||
|
||||
const token = await generateUserTokens({
|
||||
user: { ...userEnc, id: userEnc.userId },
|
||||
ip,
|
||||
userAgent,
|
||||
authMethod,
|
||||
organizationId
|
||||
});
|
||||
|
||||
return { token, isMfaEnabled: false, user: userEnc } as const;
|
||||
};
|
||||
|
||||
/*
|
||||
* logout user by incrementing the version by 1 meaning any old session will become invalid
|
||||
* as there number is behind
|
||||
@@ -542,6 +640,7 @@ export const authLoginServiceFactory = ({
|
||||
loginExchangeClientProof,
|
||||
logout,
|
||||
oauth2Login,
|
||||
oauth2TokenExchange,
|
||||
resendMfaToken,
|
||||
verifyMfaToken,
|
||||
selectOrganization,
|
||||
|
@@ -13,6 +13,7 @@ export type TLoginClientProofDTO = {
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
captchaToken?: string;
|
||||
password?: string;
|
||||
};
|
||||
|
||||
export type TVerifyMfaTokenDTO = {
|
||||
@@ -31,3 +32,10 @@ export type TOauthLoginDTO = {
|
||||
authMethod: AuthMethod;
|
||||
callbackPort?: string;
|
||||
};
|
||||
|
||||
export type TOauthTokenExchangeDTO = {
|
||||
providerAuthToken: string;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
email: string;
|
||||
};
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
|
||||
@@ -57,7 +58,8 @@ export const authPaswordServiceFactory = ({
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier,
|
||||
tokenVersionId
|
||||
tokenVersionId,
|
||||
password
|
||||
}: TChangePasswordDTO) => {
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(userId);
|
||||
if (!userEnc) throw new Error("Failed to find user");
|
||||
@@ -76,6 +78,8 @@ export const authPaswordServiceFactory = ({
|
||||
);
|
||||
if (!isValidClientProof) throw new Error("Failed to authenticate. Try again?");
|
||||
|
||||
const appCfg = getConfig();
|
||||
const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND);
|
||||
await userDAL.updateUserEncryptionByUserId(userId, {
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
@@ -87,7 +91,8 @@ export const authPaswordServiceFactory = ({
|
||||
salt,
|
||||
verifier,
|
||||
serverPrivateKey: null,
|
||||
clientPublicKey: null
|
||||
clientPublicKey: null,
|
||||
hashedPassword
|
||||
});
|
||||
|
||||
if (tokenVersionId) {
|
||||
|
@@ -10,6 +10,7 @@ export type TChangePasswordDTO = {
|
||||
salt: string;
|
||||
verifier: string;
|
||||
tokenVersionId?: string;
|
||||
password: string;
|
||||
};
|
||||
|
||||
export type TResetPasswordViaBackupKeyDTO = {
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipStatus, TableName } from "@app/db/schemas";
|
||||
@@ -6,6 +7,8 @@ import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-grou
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { isDisposableEmail } from "@app/lib/validator";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
@@ -119,6 +122,7 @@ export const authSignupServiceFactory = ({
|
||||
|
||||
const completeEmailAccountSignup = async ({
|
||||
email,
|
||||
password,
|
||||
firstName,
|
||||
lastName,
|
||||
providerAuthToken,
|
||||
@@ -137,6 +141,7 @@ export const authSignupServiceFactory = ({
|
||||
userAgent,
|
||||
authorization
|
||||
}: TCompleteAccountSignupDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const user = await userDAL.findOne({ username: email });
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
@@ -152,6 +157,18 @@ export const authSignupServiceFactory = ({
|
||||
validateSignUpAuthorization(authorization, user.id);
|
||||
}
|
||||
|
||||
const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND);
|
||||
const privateKey = await getUserPrivateKey(password, {
|
||||
salt,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
encryptionVersion: 2
|
||||
});
|
||||
const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(privateKey);
|
||||
const updateduser = await authDAL.transaction(async (tx) => {
|
||||
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
|
||||
if (!us) throw new Error("User not found");
|
||||
@@ -166,12 +183,20 @@ export const authSignupServiceFactory = ({
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
tag: encryptedPrivateKeyTag,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKeyEncoding: encoding,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKey: ciphertext
|
||||
},
|
||||
tx
|
||||
);
|
||||
// If it's SAML Auth and the organization ID is present, we should check if the user has a pending invite for this org, and accept it
|
||||
if ((isAuthMethodSaml(authMethod) || authMethod === AuthMethod.LDAP) && organizationId) {
|
||||
if (
|
||||
(isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod as AuthMethod)) &&
|
||||
organizationId
|
||||
) {
|
||||
const [pendingOrgMembership] = await orgDAL.findMembership({
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: user.id,
|
||||
status: OrgMembershipStatus.Invited,
|
||||
@@ -227,7 +252,6 @@ export const authSignupServiceFactory = ({
|
||||
userId: updateduser.info.id
|
||||
});
|
||||
if (!tokenSession) throw new Error("Failed to create token");
|
||||
const appCfg = getConfig();
|
||||
|
||||
const accessToken = jwt.sign(
|
||||
{
|
||||
@@ -265,6 +289,7 @@ export const authSignupServiceFactory = ({
|
||||
ip,
|
||||
salt,
|
||||
email,
|
||||
password,
|
||||
verifier,
|
||||
firstName,
|
||||
publicKey,
|
||||
@@ -295,6 +320,19 @@ export const authSignupServiceFactory = ({
|
||||
name: "complete account invite"
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND);
|
||||
const privateKey = await getUserPrivateKey(password, {
|
||||
salt,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
encryptionVersion: 2
|
||||
});
|
||||
const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(privateKey);
|
||||
const updateduser = await authDAL.transaction(async (tx) => {
|
||||
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
|
||||
if (!us) throw new Error("User not found");
|
||||
@@ -310,7 +348,12 @@ export const authSignupServiceFactory = ({
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
tag: encryptedPrivateKeyTag,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKeyEncoding: encoding,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKey: ciphertext
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -343,7 +386,6 @@ export const authSignupServiceFactory = ({
|
||||
userId: updateduser.info.id
|
||||
});
|
||||
if (!tokenSession) throw new Error("Failed to create token");
|
||||
const appCfg = getConfig();
|
||||
|
||||
const accessToken = jwt.sign(
|
||||
{
|
||||
|
@@ -1,5 +1,6 @@
|
||||
export type TCompleteAccountSignupDTO = {
|
||||
email: string;
|
||||
password: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
protectedKey: string;
|
||||
@@ -21,6 +22,7 @@ export type TCompleteAccountSignupDTO = {
|
||||
|
||||
export type TCompleteAccountInviteDTO = {
|
||||
email: string;
|
||||
password: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
protectedKey: string;
|
||||
|
@@ -8,7 +8,8 @@ export enum AuthMethod {
|
||||
JUMPCLOUD_SAML = "jumpcloud-saml",
|
||||
GOOGLE_SAML = "google-saml",
|
||||
KEYCLOAK_SAML = "keycloak-saml",
|
||||
LDAP = "ldap"
|
||||
LDAP = "ldap",
|
||||
OIDC = "oidc"
|
||||
}
|
||||
|
||||
export enum AuthTokenType {
|
||||
|
@@ -442,7 +442,34 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const updatedKubernetesAuth = await identityKubernetesAuthDAL.updateById(identityKubernetesAuth.id, updateQuery);
|
||||
|
||||
return { ...updatedKubernetesAuth, orgId: identityMembershipOrg.orgId };
|
||||
const updatedCACert =
|
||||
updatedKubernetesAuth.encryptedCaCert && updatedKubernetesAuth.caCertIV && updatedKubernetesAuth.caCertTag
|
||||
? decryptSymmetric({
|
||||
ciphertext: updatedKubernetesAuth.encryptedCaCert,
|
||||
iv: updatedKubernetesAuth.caCertIV,
|
||||
tag: updatedKubernetesAuth.caCertTag,
|
||||
key
|
||||
})
|
||||
: "";
|
||||
|
||||
const updatedTokenReviewerJwt =
|
||||
updatedKubernetesAuth.encryptedTokenReviewerJwt &&
|
||||
updatedKubernetesAuth.tokenReviewerJwtIV &&
|
||||
updatedKubernetesAuth.tokenReviewerJwtTag
|
||||
? decryptSymmetric({
|
||||
ciphertext: updatedKubernetesAuth.encryptedTokenReviewerJwt,
|
||||
iv: updatedKubernetesAuth.tokenReviewerJwtIV,
|
||||
tag: updatedKubernetesAuth.tokenReviewerJwtTag,
|
||||
key
|
||||
})
|
||||
: "";
|
||||
|
||||
return {
|
||||
...updatedKubernetesAuth,
|
||||
orgId: identityMembershipOrg.orgId,
|
||||
caCert: updatedCACert,
|
||||
tokenReviewerJwt: updatedTokenReviewerJwt
|
||||
};
|
||||
};
|
||||
|
||||
const getKubernetesAuth = async ({
|
||||
|
@@ -18,7 +18,7 @@ import {
|
||||
UpdateSecretCommand
|
||||
} from "@aws-sdk/client-secrets-manager";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import AWS from "aws-sdk";
|
||||
import AWS, { AWSError } from "aws-sdk";
|
||||
import { AxiosError } from "axios";
|
||||
import sodium from "libsodium-wrappers";
|
||||
import isEqual from "lodash.isequal";
|
||||
@@ -257,14 +257,27 @@ const syncSecretsGCPSecretManager = async ({
|
||||
const syncSecretsAzureKeyVault = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
accessToken,
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn
|
||||
}: {
|
||||
integration: TIntegrations;
|
||||
integration: TIntegrations & {
|
||||
projectId: string;
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
secretPath: string;
|
||||
};
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessToken: string;
|
||||
createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
}) => {
|
||||
interface GetAzureKeyVaultSecret {
|
||||
id: string; // secret URI
|
||||
value: string;
|
||||
attributes: {
|
||||
enabled: true;
|
||||
created: number;
|
||||
@@ -361,6 +374,83 @@ const syncSecretsAzureKeyVault = async ({
|
||||
}
|
||||
});
|
||||
|
||||
const secretsToAdd: { [key: string]: string } = {};
|
||||
const secretsToUpdate: { [key: string]: string } = {};
|
||||
const secretKeysToRemoveFromDelete = new Set<string>();
|
||||
|
||||
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
|
||||
if (!integration.lastUsed) {
|
||||
Object.keys(res).forEach((key) => {
|
||||
// first time using integration
|
||||
const underscoredKey = key.replace(/-/g, "_");
|
||||
|
||||
// -> apply initial sync behavior
|
||||
switch (metadata.initialSyncBehavior) {
|
||||
case IntegrationInitialSyncBehavior.PREFER_TARGET: {
|
||||
if (!(underscoredKey in secrets)) {
|
||||
secretsToAdd[underscoredKey] = res[key].value;
|
||||
setSecrets.push({
|
||||
key,
|
||||
value: res[key].value
|
||||
});
|
||||
} else if (secrets[underscoredKey]?.value !== res[key].value) {
|
||||
secretsToUpdate[underscoredKey] = res[key].value;
|
||||
const toEditSecretIndex = setSecrets.findIndex((secret) => secret.key === key);
|
||||
if (toEditSecretIndex >= 0) {
|
||||
setSecrets[toEditSecretIndex].value = res[key].value;
|
||||
}
|
||||
}
|
||||
|
||||
secretKeysToRemoveFromDelete.add(key);
|
||||
|
||||
break;
|
||||
}
|
||||
case IntegrationInitialSyncBehavior.PREFER_SOURCE: {
|
||||
if (!(underscoredKey in secrets)) {
|
||||
secretsToAdd[underscoredKey] = res[key].value;
|
||||
setSecrets.push({
|
||||
key,
|
||||
value: res[key].value
|
||||
});
|
||||
}
|
||||
|
||||
secretKeysToRemoveFromDelete.add(key);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (Object.keys(secretsToUpdate).length) {
|
||||
await updateManySecretsRawFn({
|
||||
projectId: integration.projectId,
|
||||
environment: integration.environment.slug,
|
||||
path: integration.secretPath,
|
||||
secrets: Object.keys(secretsToUpdate).map((key) => ({
|
||||
secretName: key,
|
||||
secretValue: secretsToUpdate[key],
|
||||
type: SecretType.Shared,
|
||||
secretComment: ""
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
if (Object.keys(secretsToAdd).length) {
|
||||
await createManySecretsRawFn({
|
||||
projectId: integration.projectId,
|
||||
environment: integration.environment.slug,
|
||||
path: integration.secretPath,
|
||||
secrets: Object.keys(secretsToAdd).map((key) => ({
|
||||
secretName: key,
|
||||
secretValue: secretsToAdd[key],
|
||||
type: SecretType.Shared,
|
||||
secretComment: ""
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
const setSecretAzureKeyVault = async ({
|
||||
key,
|
||||
value,
|
||||
@@ -428,7 +518,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
});
|
||||
}
|
||||
|
||||
for await (const deleteSecret of deleteSecrets) {
|
||||
for await (const deleteSecret of deleteSecrets.filter((secret) => !secretKeysToRemoveFromDelete.has(secret.key))) {
|
||||
const { key } = deleteSecret;
|
||||
await request.delete(`${integration.app}/secrets/${key}?api-version=7.3`, {
|
||||
headers: {
|
||||
@@ -452,7 +542,11 @@ const syncSecretsAWSParameterStore = async ({
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
if (!accessId) return;
|
||||
let response: { isSynced: boolean; syncMessage: string } | null = null;
|
||||
|
||||
if (!accessId) {
|
||||
throw new Error("AWS access ID is required");
|
||||
}
|
||||
|
||||
const config = new AWS.Config({
|
||||
region: integration.region as string,
|
||||
@@ -557,6 +651,11 @@ const syncSecretsAWSParameterStore = async ({
|
||||
`AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)`
|
||||
);
|
||||
}
|
||||
|
||||
response = {
|
||||
isSynced: false,
|
||||
syncMessage: (err as AWSError)?.message || "Error syncing with AWS Parameter Store"
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -585,6 +684,8 @@ const syncSecretsAWSParameterStore = async ({
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -603,7 +704,9 @@ const syncSecretsAWSSecretManager = async ({
|
||||
}) => {
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
|
||||
if (!accessId) return;
|
||||
if (!accessId) {
|
||||
throw new Error("AWS access ID is required");
|
||||
}
|
||||
|
||||
const secretsManager = new SecretsManagerClient({
|
||||
region: integration.region as string,
|
||||
@@ -722,7 +825,7 @@ const syncSecretsAWSSecretManager = async ({
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// case when AWS manager can't find the specified secret
|
||||
// case 1: when AWS manager can't find the specified secret
|
||||
if (err instanceof ResourceNotFoundException && secretsManager) {
|
||||
await secretsManager.send(
|
||||
new CreateSecretCommand({
|
||||
@@ -734,6 +837,9 @@ const syncSecretsAWSSecretManager = async ({
|
||||
: []
|
||||
})
|
||||
);
|
||||
// case 2: something unexpected went wrong, so we'll throw the error to reflect the error in the integration sync status
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -753,14 +859,12 @@ const syncSecretsAWSSecretManager = async ({
|
||||
const syncSecretsHeroku = async ({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
integrationDAL: Pick<TIntegrationDALFactory, "updateById">;
|
||||
integration: TIntegrations & {
|
||||
projectId: string;
|
||||
environment: {
|
||||
@@ -862,10 +966,6 @@ const syncSecretsHeroku = async ({
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastUsed: new Date()
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -2656,7 +2756,9 @@ const syncSecretsHashiCorpVault = async ({
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
if (!accessId) return;
|
||||
if (!accessId) {
|
||||
throw new Error("Access ID is required");
|
||||
}
|
||||
|
||||
interface LoginAppRoleRes {
|
||||
auth: {
|
||||
@@ -3486,6 +3588,8 @@ export const syncIntegrationSecrets = async ({
|
||||
accessToken: string;
|
||||
appendices?: { prefix: string; suffix: string };
|
||||
}) => {
|
||||
let response: { isSynced: boolean; syncMessage: string } | null = null;
|
||||
|
||||
switch (integration.integration) {
|
||||
case Integrations.GCP_SECRET_MANAGER:
|
||||
await syncSecretsGCPSecretManager({
|
||||
@@ -3498,11 +3602,13 @@ export const syncIntegrationSecrets = async ({
|
||||
await syncSecretsAzureKeyVault({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
accessToken,
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn
|
||||
});
|
||||
break;
|
||||
case Integrations.AWS_PARAMETER_STORE:
|
||||
await syncSecretsAWSParameterStore({
|
||||
response = await syncSecretsAWSParameterStore({
|
||||
integration,
|
||||
secrets,
|
||||
accessId,
|
||||
@@ -3521,7 +3627,6 @@ export const syncIntegrationSecrets = async ({
|
||||
await syncSecretsHeroku({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
@@ -3727,4 +3832,6 @@ export const syncIntegrationSecrets = async ({
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid integration" });
|
||||
}
|
||||
|
||||
return response;
|
||||
};
|
||||
|
@@ -36,6 +36,7 @@ import {
|
||||
TDeleteProjectMembershipsDTO,
|
||||
TGetProjectMembershipByUsernameDTO,
|
||||
TGetProjectMembershipDTO,
|
||||
TLeaveProjectDTO,
|
||||
TUpdateProjectMembershipDTO
|
||||
} from "./project-membership-types";
|
||||
import { TProjectUserMembershipRoleDALFactory } from "./project-user-membership-role-dal";
|
||||
@@ -531,6 +532,53 @@ export const projectMembershipServiceFactory = ({
|
||||
return memberships;
|
||||
};
|
||||
|
||||
const leaveProject = async ({ projectId, actorId, actor }: TLeaveProjectDTO) => {
|
||||
if (actor !== ActorType.USER) {
|
||||
throw new BadRequestError({ message: "Only users can leave projects" });
|
||||
}
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
if (project.version !== ProjectVersion.V2) {
|
||||
throw new BadRequestError({
|
||||
message: "Please ask your project administrator to upgrade the project before leaving."
|
||||
});
|
||||
}
|
||||
|
||||
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId);
|
||||
|
||||
if (!projectMembers?.length) {
|
||||
throw new BadRequestError({ message: "Failed to find project members" });
|
||||
}
|
||||
|
||||
if (projectMembers.length < 2) {
|
||||
throw new BadRequestError({ message: "You cannot leave the project as you are the only member" });
|
||||
}
|
||||
|
||||
const adminMembers = projectMembers.filter(
|
||||
(member) => member.roles.map((r) => r.role).includes("admin") && member.userId !== actorId
|
||||
);
|
||||
if (!adminMembers.length) {
|
||||
throw new BadRequestError({
|
||||
message: "You cannot leave the project as you are the only admin. Promote another user to admin before leaving."
|
||||
});
|
||||
}
|
||||
|
||||
const deletedMembership = (
|
||||
await projectMembershipDAL.delete({
|
||||
projectId: project.id,
|
||||
userId: actorId
|
||||
})
|
||||
)?.[0];
|
||||
|
||||
if (!deletedMembership) {
|
||||
throw new BadRequestError({ message: "Failed to leave project" });
|
||||
}
|
||||
|
||||
return deletedMembership;
|
||||
};
|
||||
|
||||
return {
|
||||
getProjectMemberships,
|
||||
getProjectMembershipByUsername,
|
||||
@@ -538,6 +586,7 @@ export const projectMembershipServiceFactory = ({
|
||||
addUsersToProjectNonE2EE,
|
||||
deleteProjectMemberships,
|
||||
deleteProjectMembership, // TODO: Remove this
|
||||
addUsersToProject
|
||||
addUsersToProject,
|
||||
leaveProject
|
||||
};
|
||||
};
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TGetProjectMembershipDTO = TProjectPermission;
|
||||
export type TLeaveProjectDTO = Omit<TProjectPermission, "actorOrgId" | "actorAuthMethod">;
|
||||
export enum ProjectUserMembershipTemporaryMode {
|
||||
Relative = "relative"
|
||||
}
|
||||
|
@@ -2,10 +2,17 @@ import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
import { TSecretTagDALFactory } from "./secret-tag-dal";
|
||||
import { TCreateTagDTO, TDeleteTagDTO, TListProjectTagsDTO } from "./secret-tag-types";
|
||||
import {
|
||||
TCreateTagDTO,
|
||||
TDeleteTagDTO,
|
||||
TGetTagByIdDTO,
|
||||
TGetTagBySlugDTO,
|
||||
TListProjectTagsDTO,
|
||||
TUpdateTagDTO
|
||||
} from "./secret-tag-types";
|
||||
|
||||
type TSecretTagServiceFactoryDep = {
|
||||
secretTagDAL: TSecretTagDALFactory;
|
||||
@@ -48,6 +55,28 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
|
||||
return newTag;
|
||||
};
|
||||
|
||||
const updateTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, name, color, slug }: TUpdateTagDTO) => {
|
||||
const tag = await secretTagDAL.findById(id);
|
||||
if (!tag) throw new BadRequestError({ message: "Tag doesn't exist" });
|
||||
|
||||
if (slug) {
|
||||
const existingTag = await secretTagDAL.findOne({ slug, projectId: tag.projectId });
|
||||
if (existingTag && existingTag.id !== tag.id) throw new BadRequestError({ message: "Tag already exist" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
tag.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Tags);
|
||||
|
||||
const updatedTag = await secretTagDAL.updateById(tag.id, { name, color, slug });
|
||||
return updatedTag;
|
||||
};
|
||||
|
||||
const deleteTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TDeleteTagDTO) => {
|
||||
const tag = await secretTagDAL.findById(id);
|
||||
if (!tag) throw new BadRequestError({ message: "Tag doesn't exist" });
|
||||
@@ -65,6 +94,38 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
|
||||
return deletedTag;
|
||||
};
|
||||
|
||||
const getTagById = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TGetTagByIdDTO) => {
|
||||
const tag = await secretTagDAL.findById(id);
|
||||
if (!tag) throw new NotFoundError({ message: "Tag doesn't exist" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
tag.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
|
||||
|
||||
return tag;
|
||||
};
|
||||
|
||||
const getTagBySlug = async ({ actorId, actor, actorOrgId, actorAuthMethod, slug, projectId }: TGetTagBySlugDTO) => {
|
||||
const tag = await secretTagDAL.findOne({ projectId, slug });
|
||||
if (!tag) throw new NotFoundError({ message: "Tag doesn't exist" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
tag.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
|
||||
|
||||
return tag;
|
||||
};
|
||||
|
||||
const getProjectTags = async ({ actor, actorId, actorOrgId, actorAuthMethod, projectId }: TListProjectTagsDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@@ -79,5 +140,5 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
|
||||
return tags;
|
||||
};
|
||||
|
||||
return { createTag, deleteTag, getProjectTags };
|
||||
return { createTag, deleteTag, getProjectTags, getTagById, getTagBySlug, updateTag };
|
||||
};
|
||||
|
@@ -6,6 +6,21 @@ export type TCreateTagDTO = {
|
||||
slug: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateTagDTO = {
|
||||
id: string;
|
||||
name?: string;
|
||||
slug?: string;
|
||||
color?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetTagByIdDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetTagBySlugDTO = {
|
||||
slug: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TDeleteTagDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@@ -421,94 +421,88 @@ export const secretQueueFactory = ({
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) {
|
||||
logger.error(new Error("Secret path not found"));
|
||||
return;
|
||||
throw new Error("Secret path not found");
|
||||
}
|
||||
|
||||
// start syncing all linked imports also
|
||||
if (depth < MAX_SYNC_SECRET_DEPTH) {
|
||||
// find all imports made with the given environment and secret path
|
||||
const linkSourceDto = {
|
||||
projectId,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: secretPath,
|
||||
isReplication: false
|
||||
};
|
||||
const imports = await secretImportDAL.find(linkSourceDto);
|
||||
// find all imports made with the given environment and secret path
|
||||
const linkSourceDto = {
|
||||
projectId,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: secretPath,
|
||||
isReplication: false
|
||||
};
|
||||
const imports = await secretImportDAL.find(linkSourceDto);
|
||||
|
||||
if (imports.length) {
|
||||
// keep calling sync secret for all the imports made
|
||||
const importedFolderIds = unique(imports, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
|
||||
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
imports
|
||||
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueSecretQueueKey(
|
||||
foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
foldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
projectId,
|
||||
secretPath: foldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const secretReferences = await secretDAL.findReferencedSecretReferences(
|
||||
projectId,
|
||||
folder.environment.slug,
|
||||
secretPath
|
||||
if (imports.length) {
|
||||
// keep calling sync secret for all the imports made
|
||||
const importedFolderIds = unique(imports, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
|
||||
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
imports
|
||||
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueSecretQueueKey(
|
||||
foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
foldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
projectId,
|
||||
secretPath: foldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const secretReferences = await secretDAL.findReferencedSecretReferences(
|
||||
projectId,
|
||||
folder.environment.slug,
|
||||
secretPath
|
||||
);
|
||||
if (secretReferences.length) {
|
||||
const referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const referencedFolders = await folderDAL.findSecretPathByFolderIds(projectId, referencedFolderIds);
|
||||
const referencedFoldersGroupedById = groupBy(referencedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
secretReferences
|
||||
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0]?.path))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueSecretQueueKey(
|
||||
referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
referencedFoldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
projectId,
|
||||
secretPath: referencedFoldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
if (secretReferences.length) {
|
||||
const referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const referencedFolders = await folderDAL.findSecretPathByFolderIds(projectId, referencedFolderIds);
|
||||
const referencedFoldersGroupedById = groupBy(referencedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
secretReferences
|
||||
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0]?.path))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueSecretQueueKey(
|
||||
referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
referencedFoldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
projectId,
|
||||
secretPath: referencedFoldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
logger.info(`getIntegrationSecrets: Secret depth exceeded for [projectId=${projectId}] [folderId=${folder.id}]`);
|
||||
}
|
||||
|
||||
const integrations = await integrationDAL.findByProjectIdV2(projectId, environment); // note: returns array of integrations + integration auths in this environment
|
||||
@@ -550,7 +544,7 @@ export const secretQueueFactory = ({
|
||||
}
|
||||
|
||||
try {
|
||||
await syncIntegrationSecrets({
|
||||
const response = await syncIntegrationSecrets({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
@@ -568,13 +562,15 @@ export const secretQueueFactory = ({
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastSyncJobId: job.id,
|
||||
lastUsed: new Date(),
|
||||
syncMessage: "",
|
||||
isSynced: true
|
||||
syncMessage: response?.syncMessage ?? "",
|
||||
isSynced: response?.isSynced ?? true
|
||||
});
|
||||
} catch (err: unknown) {
|
||||
} catch (err) {
|
||||
logger.info("Secret integration sync error: %o", err);
|
||||
|
||||
const message =
|
||||
err instanceof AxiosError ? JSON.stringify((err as AxiosError)?.response?.data) : (err as Error)?.message;
|
||||
(err instanceof AxiosError ? JSON.stringify(err?.response?.data) : (err as Error)?.message) ||
|
||||
"Unknown error occurred.";
|
||||
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastSyncJobId: job.id,
|
||||
|
@@ -1,6 +1,10 @@
|
||||
import bcrypt from "bcrypt";
|
||||
|
||||
import { TSuperAdmin, TSuperAdminUpdate } from "@app/db/schemas";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TAuthLoginFactory } from "../auth/auth-login-service";
|
||||
@@ -77,6 +81,7 @@ export const superAdminServiceFactory = ({
|
||||
firstName,
|
||||
salt,
|
||||
email,
|
||||
password,
|
||||
verifier,
|
||||
publicKey,
|
||||
protectedKey,
|
||||
@@ -92,6 +97,18 @@ export const superAdminServiceFactory = ({
|
||||
const existingUser = await userDAL.findOne({ email });
|
||||
if (existingUser) throw new BadRequestError({ name: "Admin sign up", message: "User already exist" });
|
||||
|
||||
const privateKey = await getUserPrivateKey(password, {
|
||||
encryptionVersion: 2,
|
||||
salt,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
});
|
||||
const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND);
|
||||
const { iv, tag, ciphertext, encoding } = infisicalSymmetricEncypt(privateKey);
|
||||
const userInfo = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.create(
|
||||
{
|
||||
@@ -119,7 +136,12 @@ export const superAdminServiceFactory = ({
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
verifier,
|
||||
userId: newUser.id
|
||||
userId: newUser.id,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKey: ciphertext,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyEncoding: encoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -1,5 +1,6 @@
|
||||
export type TAdminSignUpDTO = {
|
||||
email: string;
|
||||
password: string;
|
||||
publicKey: string;
|
||||
salt: string;
|
||||
lastName?: string;
|
||||
|
@@ -1,4 +1,5 @@
|
||||
export enum UserAliasType {
|
||||
LDAP = "ldap",
|
||||
SAML = "saml"
|
||||
SAML = "saml",
|
||||
OIDC = "oidc"
|
||||
}
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
@@ -230,6 +232,21 @@ export const userServiceFactory = ({
|
||||
);
|
||||
};
|
||||
|
||||
const getUserPrivateKey = async (userId: string) => {
|
||||
const user = await userDAL.findUserEncKeyByUserId(userId);
|
||||
if (!user?.serverEncryptedPrivateKey || !user.serverEncryptedPrivateKeyIV || !user.serverEncryptedPrivateKeyTag) {
|
||||
throw new BadRequestError({ message: "Private key not found. Please login again" });
|
||||
}
|
||||
const privateKey = infisicalSymmetricDecrypt({
|
||||
ciphertext: user.serverEncryptedPrivateKey,
|
||||
tag: user.serverEncryptedPrivateKeyTag,
|
||||
iv: user.serverEncryptedPrivateKeyIV,
|
||||
keyEncoding: user.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
return privateKey;
|
||||
};
|
||||
|
||||
return {
|
||||
sendEmailVerificationCode,
|
||||
verifyEmailVerificationCode,
|
||||
@@ -240,6 +257,7 @@ export const userServiceFactory = ({
|
||||
getMe,
|
||||
createUserAction,
|
||||
getUserAction,
|
||||
unlockUser
|
||||
unlockUser,
|
||||
getUserPrivateKey
|
||||
};
|
||||
};
|
||||
|
55
cli/go.mod
@@ -10,6 +10,7 @@ require (
|
||||
github.com/fatih/semgroup v1.2.0
|
||||
github.com/gitleaks/go-gitdiff v0.8.0
|
||||
github.com/h2non/filetype v1.1.3
|
||||
github.com/infisical/go-sdk v0.2.0
|
||||
github.com/mattn/go-isatty v0.0.14
|
||||
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a
|
||||
github.com/muesli/mango-cobra v1.2.0
|
||||
@@ -22,23 +23,48 @@ require (
|
||||
github.com/rs/zerolog v1.26.1
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/spf13/viper v1.8.1
|
||||
github.com/stretchr/testify v1.8.1
|
||||
golang.org/x/crypto v0.14.0
|
||||
golang.org/x/term v0.13.0
|
||||
github.com/stretchr/testify v1.9.0
|
||||
golang.org/x/crypto v0.23.0
|
||||
golang.org/x/term v0.20.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/auth v0.5.1 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.3.0 // indirect
|
||||
cloud.google.com/go/iam v1.1.8 // indirect
|
||||
github.com/alessio/shellescape v1.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.27.2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 // indirect
|
||||
github.com/aws/smithy-go v1.20.2 // indirect
|
||||
github.com/chzyer/readline v1.5.1 // indirect
|
||||
github.com/danieljoos/wincred v1.2.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dvsekhvalnov/jose2go v1.5.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.4.9 // indirect
|
||||
github.com/go-logr/logr v1.4.1 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-openapi/errors v0.20.2 // indirect
|
||||
github.com/go-openapi/strfmt v0.21.3 // indirect
|
||||
github.com/godbus/dbus/v5 v5.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/s2a-go v0.1.7 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.12.4 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/magiconair/properties v1.8.5 // indirect
|
||||
@@ -59,17 +85,30 @@ require (
|
||||
github.com/subosito/gotenv v1.2.0 // indirect
|
||||
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
|
||||
go.mongodb.org/mongo-driver v1.10.0 // indirect
|
||||
golang.org/x/net v0.17.0 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
golang.org/x/sys v0.13.0 // indirect
|
||||
golang.org/x/text v0.13.0 // indirect
|
||||
go.opencensus.io v0.24.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
|
||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||
golang.org/x/net v0.25.0 // indirect
|
||||
golang.org/x/oauth2 v0.21.0 // indirect
|
||||
golang.org/x/sync v0.7.0 // indirect
|
||||
golang.org/x/sys v0.20.0 // indirect
|
||||
golang.org/x/text v0.15.0 // indirect
|
||||
golang.org/x/time v0.5.0 // indirect
|
||||
google.golang.org/api v0.183.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157 // indirect
|
||||
google.golang.org/grpc v1.64.0 // indirect
|
||||
google.golang.org/protobuf v1.34.1 // indirect
|
||||
gopkg.in/ini.v1 v1.62.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/fatih/color v1.13.0
|
||||
github.com/go-resty/resty/v2 v2.10.0
|
||||
github.com/go-resty/resty/v2 v2.13.1
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/jedib0t/go-pretty v4.3.0+incompatible
|
||||
github.com/manifoldco/promptui v0.9.0
|
||||
|
125
cli/go.sum
@@ -18,15 +18,23 @@ cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmW
|
||||
cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
|
||||
cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
|
||||
cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
|
||||
cloud.google.com/go/auth v0.5.1 h1:0QNO7VThG54LUzKiQxv8C6x1YX7lUrzlAa1nVLF8CIw=
|
||||
cloud.google.com/go/auth v0.5.1/go.mod h1:vbZT8GjzDf3AVqCcQmqeeM32U9HBFc32vVVAbwDsa6s=
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.2 h1:+TTV8aXpjeChS9M+aTtN/TjdQnzJvmzKFt//oWu7HX4=
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.2/go.mod h1:wcYjgpZI9+Yu7LyYBg4pqSiaRkfEK3GQcpb7C/uyF1Q=
|
||||
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
|
||||
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
|
||||
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
|
||||
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
|
||||
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
|
||||
cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc=
|
||||
cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k=
|
||||
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
|
||||
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
|
||||
cloud.google.com/go/iam v1.1.8 h1:r7umDwhj+BQyz0ScZMp4QrGXjSTI3ZINnpgU2nlB/K0=
|
||||
cloud.google.com/go/iam v1.1.8/go.mod h1:GvE6lyMmfxXauzNq8NbgJbeVQNspG+tcdL/W8QO1+zE=
|
||||
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
|
||||
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
|
||||
@@ -49,6 +57,32 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGLmAjMPwCCCo7Jf0W6f9slllCkkv7vyc1yOSg=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/aws/aws-sdk-go-v2 v1.27.2 h1:pLsTXqX93rimAOZG2FIYraDQstZaaGVVN4tNw65v0h8=
|
||||
github.com/aws/aws-sdk-go-v2 v1.27.2/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.18 h1:wFvAnwOKKe7QAyIxziwSKjmer9JBMH1vzIL6W+fYuKk=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.18/go.mod h1:0xz6cgdX55+kmppvPm2IaKzIXOheGJhAufacPJaXZ7c=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.18 h1:D/ALDWqK4JdY3OFgA2thcPO1c9aYTT5STS/CvnkqY1c=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.18/go.mod h1:JuitCWq+F5QGUrmMPsk945rop6bB57jdscu+Glozdnc=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 h1:dDgptDO9dxeFkXy+tEgVkzSClHZje/6JkPW5aZyEvrQ=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5/go.mod h1:gjvE2KBUgUQhcv89jqxrIxH9GaKs1JbZzWejj/DaHGA=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 h1:cy8ahBJuhtM8GTTSyOkfy6WVPV1IE+SS5/wfXUYuulw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9/go.mod h1:CZBXGLaJnEZI6EVNcPd7a6B5IC5cA/GkRWtu9fp3S6Y=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 h1:A4SYk07ef04+vxZToz9LWvAXl9LW0NClpPpMsi31cz0=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9/go.mod h1:5jJcHuwDagxN+ErjQ3PU3ocf6Ylc/p9x+BLO/+X4iXw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 h1:o4T+fKxA3gTMcluBNZZXE9DNaMkJuUL1O3mffCUjoJo=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11/go.mod h1:84oZdJ+VjuJKs9v1UTC9NaodRZRseOXCTgku+vQJWR8=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 h1:gEYM2GSpr4YNWc6hCd5nod4+d4kd9vWIAWrmGuLdlMw=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11/go.mod h1:gVvwPdPNYehHSP9Rs7q27U1EU+3Or2ZpXvzAYJNh63w=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 h1:iXjh3uaH3vsVcnyZX7MqCoCfcyxIrVE9iOQruRaWPrQ=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5/go.mod h1:5ZXesEuy/QcO0WUnt+4sDkxhdXRHTu2yG0uCSH8B6os=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 h1:M/1u4HBpwLuMtjlxuI2y6HoVLzF5e2mfxHCg7ZVMYmk=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12/go.mod h1:kcfd+eTdEi/40FIbLq4Hif3XMXnl5b/+t/KTfLt9xIk=
|
||||
github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q=
|
||||
github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
|
||||
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M=
|
||||
@@ -97,6 +131,8 @@ github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w=
|
||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||
github.com/fatih/semgroup v1.2.0 h1:h/OLXwEM+3NNyAdZEpMiH1OzfplU09i2qXPVThGZvyg=
|
||||
github.com/fatih/semgroup v1.2.0/go.mod h1:1KAD4iIYfXjE4U13B48VM4z9QUwV5Tt8O4rS879kgm8=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
@@ -105,12 +141,17 @@ github.com/gitleaks/go-gitdiff v0.8.0/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
|
||||
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-openapi/errors v0.20.2 h1:dxy7PGTqEh94zj2E3h1cUmQQWiM1+aeCROfAr02EmK8=
|
||||
github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtKG7o=
|
||||
github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg=
|
||||
github.com/go-resty/resty/v2 v2.10.0 h1:Qla4W/+TMmv0fOeeRqzEpXPLfTUnR5HZ1+lGs+CkiCo=
|
||||
github.com/go-resty/resty/v2 v2.10.0/go.mod h1:iiP/OpA0CkcL3IGt1O0+/SIItFUbkkyw5BGXiVdTu+A=
|
||||
github.com/go-resty/resty/v2 v2.13.1 h1:x+LHXBI2nMB1vqndymf26quycC4aggYJ7DECYbiz03g=
|
||||
github.com/go-resty/resty/v2 v2.13.1/go.mod h1:GznXlLxkq6Nh4sU59rPmUw3VtgpO3aS96ORAI6Q7d+0=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
@@ -119,6 +160,8 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||
@@ -144,6 +187,8 @@ github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
|
||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
@@ -157,8 +202,9 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
@@ -175,11 +221,18 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe
|
||||
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
|
||||
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
|
||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/googleapis/gax-go/v2 v2.12.4 h1:9gWcmF85Wvq4ryPFvGFaOgPIs1AQX0d0bcbGw4Z96qg=
|
||||
github.com/googleapis/gax-go/v2 v2.12.4/go.mod h1:KYEYLorsnIGDi/rPC8b5TdlB9kbKoFubselGIoBMCwI=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||
@@ -210,6 +263,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/infisical/go-sdk v0.2.0 h1:n1/KNdYpeQavSqVwC9BfeV8VRzf3N2X9zO1tzQOSj5Q=
|
||||
github.com/infisical/go-sdk v0.2.0/go.mod h1:vHTDVw3k+wfStXab513TGk1n53kaKF2xgLqpw/xvtl4=
|
||||
github.com/jedib0t/go-pretty v4.3.0+incompatible h1:CGs8AVhEKg/n9YbUenWmNStRW2PHJzaeDodcfvRAbIo=
|
||||
github.com/jedib0t/go-pretty v4.3.0+incompatible/go.mod h1:XemHduiw8R651AF9Pt4FwCTKeG3oo7hrHJAoznj9nag=
|
||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
@@ -330,8 +385,9 @@ github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
@@ -340,8 +396,9 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
||||
@@ -372,6 +429,18 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
|
||||
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
||||
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
|
||||
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
|
||||
go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
|
||||
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
|
||||
go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
|
||||
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||
@@ -385,8 +454,9 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
|
||||
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
@@ -465,8 +535,9 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
|
||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@@ -479,6 +550,8 @@ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ
|
||||
golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs=
|
||||
golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -491,8 +564,9 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -546,14 +620,16 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
|
||||
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
@@ -565,13 +641,14 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
|
||||
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
||||
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
@@ -652,6 +729,8 @@ google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjR
|
||||
google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=
|
||||
google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=
|
||||
google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8=
|
||||
google.golang.org/api v0.183.0 h1:PNMeRDwo1pJdgNcFQ9GstuLe/noWKIc89pRWRLMvLwE=
|
||||
google.golang.org/api v0.183.0/go.mod h1:q43adC5/pHoSZTx5h2mSmdF7NcyfW9JuDyIOJAgS9ZQ=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
@@ -700,6 +779,10 @@ google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6D
|
||||
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
|
||||
google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e h1:SkdGTrROJl2jRGT/Fxv5QUf9jtdKCQh4KQJXbXVLAi0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e/go.mod h1:LweJcLbyVij6rCex8YunD8DYR5VDonap/jYl3ZRxcIU=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157 h1:Zy9XzmMEflZ/MAaA7vNcoebnRAld7FsPW1EeBB7V0m8=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
@@ -720,6 +803,8 @@ google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAG
|
||||
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
|
||||
google.golang.org/grpc v1.64.0 h1:KH3VH9y/MgNQg1dE7b3XfVK0GsPSIzJwdF617gUSbvY=
|
||||
google.golang.org/grpc v1.64.0/go.mod h1:oxjF8E3FBnjp+/gVFYdWacaLDx9na1aqy9oovLpxQYg=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
@@ -732,6 +817,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
|
||||
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
@@ -391,6 +391,7 @@ func CallCreateSecretsV3(httpClient *resty.Client, request CreateSecretV3Request
|
||||
}
|
||||
|
||||
func CallDeleteSecretsV3(httpClient *resty.Client, request DeleteSecretV3Request) error {
|
||||
|
||||
var secretsResponse GetEncryptedSecretsV3Response
|
||||
response, err := httpClient.
|
||||
R().
|
||||
@@ -490,7 +491,7 @@ func CallUniversalAuthLogin(httpClient *resty.Client, request UniversalAuthLogin
|
||||
return universalAuthLoginResponse, nil
|
||||
}
|
||||
|
||||
func CallUniversalAuthRefreshAccessToken(httpClient *resty.Client, request UniversalAuthRefreshRequest) (UniversalAuthRefreshResponse, error) {
|
||||
func CallMachineIdentityRefreshAccessToken(httpClient *resty.Client, request UniversalAuthRefreshRequest) (UniversalAuthRefreshResponse, error) {
|
||||
var universalAuthRefreshResponse UniversalAuthRefreshResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
@@ -500,11 +501,11 @@ func CallUniversalAuthRefreshAccessToken(httpClient *resty.Client, request Unive
|
||||
Post(fmt.Sprintf("%v/v1/auth/token/renew", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallUniversalAuthRefreshAccessToken: Unable to complete api request [err=%s]", err)
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallMachineIdentityRefreshAccessToken: Unable to complete api request [err=%s]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallUniversalAuthRefreshAccessToken: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallMachineIdentityRefreshAccessToken: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
}
|
||||
|
||||
return universalAuthRefreshResponse, nil
|
||||
@@ -566,3 +567,39 @@ func CallCreateDynamicSecretLeaseV1(httpClient *resty.Client, request CreateDyna
|
||||
|
||||
return createDynamicSecretLeaseResponse, nil
|
||||
}
|
||||
|
||||
func CallCreateRawSecretsV3(httpClient *resty.Client, request CreateRawSecretV3Request) error {
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetBody(request).
|
||||
Post(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("CallCreateRawSecretsV3: Unable to complete api request [err=%w]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return fmt.Errorf("CallCreateRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func CallUpdateRawSecretsV3(httpClient *resty.Client, request UpdateRawSecretByNameV3Request) error {
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetBody(request).
|
||||
Patch(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("CallUpdateRawSecretsV3: Unable to complete api request [err=%w]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return fmt.Errorf("CallUpdateRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@@ -161,6 +161,14 @@ type Secret struct {
|
||||
PlainTextKey string `json:"plainTextKey"`
|
||||
}
|
||||
|
||||
type RawSecret struct {
|
||||
SecretKey string `json:"secretKey,omitempty"`
|
||||
SecretValue string `json:"secretValue,omitempty"`
|
||||
Type string `json:"type,omitempty"`
|
||||
SecretComment string `json:"secretComment,omitempty"`
|
||||
ID string `json:"id,omitempty"`
|
||||
}
|
||||
|
||||
type GetEncryptedWorkspaceKeyRequest struct {
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
}
|
||||
@@ -233,6 +241,7 @@ type GetLoginOneV2Response struct {
|
||||
type GetLoginTwoV2Request struct {
|
||||
Email string `json:"email"`
|
||||
ClientProof string `json:"clientProof"`
|
||||
Password string `json:"password"`
|
||||
}
|
||||
|
||||
type GetLoginTwoV2Response struct {
|
||||
@@ -409,12 +418,23 @@ type CreateSecretV3Request struct {
|
||||
SecretPath string `json:"secretPath"`
|
||||
}
|
||||
|
||||
type CreateRawSecretV3Request struct {
|
||||
SecretName string `json:"-"`
|
||||
WorkspaceID string `json:"workspaceId"`
|
||||
Type string `json:"type,omitempty"`
|
||||
Environment string `json:"environment"`
|
||||
SecretPath string `json:"secretPath,omitempty"`
|
||||
SecretValue string `json:"secretValue"`
|
||||
SecretComment string `json:"secretComment,omitempty"`
|
||||
SkipMultilineEncoding bool `json:"skipMultilineEncoding,omitempty"`
|
||||
}
|
||||
|
||||
type DeleteSecretV3Request struct {
|
||||
SecretName string `json:"secretName"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
Environment string `json:"environment"`
|
||||
Type string `json:"type"`
|
||||
SecretPath string `json:"secretPath"`
|
||||
Type string `json:"type,omitempty"`
|
||||
SecretPath string `json:"secretPath,omitempty"`
|
||||
}
|
||||
|
||||
type UpdateSecretByNameV3Request struct {
|
||||
@@ -427,6 +447,15 @@ type UpdateSecretByNameV3Request struct {
|
||||
SecretValueTag string `json:"secretValueTag"`
|
||||
}
|
||||
|
||||
type UpdateRawSecretByNameV3Request struct {
|
||||
SecretName string `json:"-"`
|
||||
WorkspaceID string `json:"workspaceId"`
|
||||
Environment string `json:"environment"`
|
||||
SecretPath string `json:"secretPath,omitempty"`
|
||||
SecretValue string `json:"secretValue"`
|
||||
Type string `json:"type,omitempty"`
|
||||
}
|
||||
|
||||
type GetSingleSecretByNameV3Request struct {
|
||||
SecretName string `json:"secretName"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
|
@@ -20,6 +20,7 @@ import (
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
infisicalSdk "github.com/infisical/go-sdk"
|
||||
"github.com/rs/zerolog/log"
|
||||
"gopkg.in/yaml.v2"
|
||||
|
||||
@@ -59,9 +60,26 @@ type UniversalAuth struct {
|
||||
RemoveClientSecretOnRead bool `yaml:"remove_client_secret_on_read"`
|
||||
}
|
||||
|
||||
type OAuthConfig struct {
|
||||
ClientID string `yaml:"client-id"`
|
||||
ClientSecret string `yaml:"client-secret"`
|
||||
type KubernetesAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
ServiceAccountToken string `yaml:"service-account-token"`
|
||||
}
|
||||
|
||||
type AzureAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
}
|
||||
|
||||
type GcpIdTokenAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
}
|
||||
|
||||
type GcpIamAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
ServiceAccountKey string `yaml:"service-account-key"`
|
||||
}
|
||||
|
||||
type AwsIamAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
}
|
||||
|
||||
type Sink struct {
|
||||
@@ -87,15 +105,6 @@ type Template struct {
|
||||
} `yaml:"config"`
|
||||
}
|
||||
|
||||
func newAgentTemplateChannels(templates []Template) map[string]chan bool {
|
||||
// we keep each destination as an identifier for various channel
|
||||
templateChannel := make(map[string]chan bool)
|
||||
for _, template := range templates {
|
||||
templateChannel[template.DestinationPath] = make(chan bool)
|
||||
}
|
||||
return templateChannel
|
||||
}
|
||||
|
||||
type DynamicSecretLease struct {
|
||||
LeaseID string
|
||||
ExpireAt time.Time
|
||||
@@ -256,6 +265,14 @@ func WriteBytesToFile(data *bytes.Buffer, outputPath string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func ParseAuthConfig(authConfigFile []byte, destination interface{}) error {
|
||||
if err := yaml.Unmarshal(authConfigFile, destination); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseAgentConfig(configFile []byte) (*Config, error) {
|
||||
var rawConfig struct {
|
||||
Infisical InfisicalConfig `yaml:"infisical"`
|
||||
@@ -283,36 +300,13 @@ func ParseAgentConfig(configFile []byte) (*Config, error) {
|
||||
config := &Config{
|
||||
Infisical: rawConfig.Infisical,
|
||||
Auth: AuthConfig{
|
||||
Type: rawConfig.Auth.Type,
|
||||
Type: rawConfig.Auth.Type,
|
||||
Config: rawConfig.Auth.Config,
|
||||
},
|
||||
Sinks: rawConfig.Sinks,
|
||||
Templates: rawConfig.Templates,
|
||||
}
|
||||
|
||||
// Marshal and then unmarshal the config based on the type
|
||||
configBytes, err := yaml.Marshal(rawConfig.Auth.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch rawConfig.Auth.Type {
|
||||
case "universal-auth":
|
||||
var tokenConfig UniversalAuth
|
||||
if err := yaml.Unmarshal(configBytes, &tokenConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config.Auth.Config = tokenConfig
|
||||
case "oauth": // aws, gcp, k8s service account, etc
|
||||
var oauthConfig OAuthConfig
|
||||
if err := yaml.Unmarshal(configBytes, &oauthConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
config.Auth.Config = oauthConfig
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown auth type: %s", rawConfig.Auth.Type)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
@@ -337,7 +331,7 @@ func dynamicSecretTemplateFunction(accessToken string, dynamicSecretManager *Dyn
|
||||
return func(args ...string) (map[string]interface{}, error) {
|
||||
argLength := len(args)
|
||||
if argLength != 4 && argLength != 5 {
|
||||
return nil, fmt.Errorf("Invalid arguments found for dynamic-secret function. Check template %i", templateId)
|
||||
return nil, fmt.Errorf("invalid arguments found for dynamic-secret function. Check template %d", templateId)
|
||||
}
|
||||
|
||||
projectSlug, envSlug, secretPath, slug, ttl := args[0], args[1], args[2], args[3], ""
|
||||
@@ -421,32 +415,54 @@ func ProcessBase64Template(templateId int, encodedTemplate string, data interfac
|
||||
}
|
||||
|
||||
type AgentManager struct {
|
||||
accessToken string
|
||||
accessTokenTTL time.Duration
|
||||
accessTokenMaxTTL time.Duration
|
||||
accessTokenFetchedTime time.Time
|
||||
accessTokenRefreshedTime time.Time
|
||||
mutex sync.Mutex
|
||||
filePaths []Sink // Store file paths if needed
|
||||
templates []Template
|
||||
dynamicSecretLeases *DynamicSecretLeaseManager
|
||||
clientIdPath string
|
||||
clientSecretPath string
|
||||
newAccessTokenNotificationChan chan bool
|
||||
removeClientSecretOnRead bool
|
||||
cachedClientSecret string
|
||||
exitAfterAuth bool
|
||||
accessToken string
|
||||
accessTokenTTL time.Duration
|
||||
accessTokenMaxTTL time.Duration
|
||||
accessTokenFetchedTime time.Time
|
||||
accessTokenRefreshedTime time.Time
|
||||
mutex sync.Mutex
|
||||
filePaths []Sink // Store file paths if needed
|
||||
templates []Template
|
||||
dynamicSecretLeases *DynamicSecretLeaseManager
|
||||
|
||||
authConfigBytes []byte
|
||||
authStrategy util.AuthStrategyType
|
||||
|
||||
newAccessTokenNotificationChan chan bool
|
||||
removeUniversalAuthClientSecretOnRead bool
|
||||
cachedUniversalAuthClientSecret string
|
||||
exitAfterAuth bool
|
||||
|
||||
infisicalClient infisicalSdk.InfisicalClientInterface
|
||||
}
|
||||
|
||||
func NewAgentManager(fileDeposits []Sink, templates []Template, clientIdPath string, clientSecretPath string, newAccessTokenNotificationChan chan bool, removeClientSecretOnRead bool, exitAfterAuth bool) *AgentManager {
|
||||
type NewAgentMangerOptions struct {
|
||||
FileDeposits []Sink
|
||||
Templates []Template
|
||||
|
||||
AuthConfigBytes []byte
|
||||
AuthStrategy util.AuthStrategyType
|
||||
|
||||
NewAccessTokenNotificationChan chan bool
|
||||
ExitAfterAuth bool
|
||||
}
|
||||
|
||||
func NewAgentManager(options NewAgentMangerOptions) *AgentManager {
|
||||
|
||||
return &AgentManager{
|
||||
filePaths: fileDeposits,
|
||||
templates: templates,
|
||||
clientIdPath: clientIdPath,
|
||||
clientSecretPath: clientSecretPath,
|
||||
newAccessTokenNotificationChan: newAccessTokenNotificationChan,
|
||||
removeClientSecretOnRead: removeClientSecretOnRead,
|
||||
exitAfterAuth: exitAfterAuth,
|
||||
filePaths: options.FileDeposits,
|
||||
templates: options.Templates,
|
||||
|
||||
authConfigBytes: options.AuthConfigBytes,
|
||||
authStrategy: options.AuthStrategy,
|
||||
|
||||
newAccessTokenNotificationChan: options.NewAccessTokenNotificationChan,
|
||||
exitAfterAuth: options.ExitAfterAuth,
|
||||
|
||||
infisicalClient: infisicalSdk.NewInfisicalClient(infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT, // ? Should we perhaps use a different user agent for the Agent for better analytics?
|
||||
}),
|
||||
}
|
||||
|
||||
}
|
||||
@@ -469,52 +485,164 @@ func (tm *AgentManager) GetToken() string {
|
||||
return tm.accessToken
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchUniversalAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
var universalAuthConfig UniversalAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &universalAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
clientID, err := util.GetEnvVarOrFileContent(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME, universalAuthConfig.ClientIDPath)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get client id: %v", err)
|
||||
}
|
||||
|
||||
clientSecret, err := util.GetEnvVarOrFileContent("INFISICAL_UNIVERSAL_CLIENT_SECRET", universalAuthConfig.ClientSecretPath)
|
||||
if err != nil {
|
||||
if len(tm.cachedUniversalAuthClientSecret) == 0 {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get client secret: %v", err)
|
||||
}
|
||||
clientSecret = tm.cachedUniversalAuthClientSecret
|
||||
}
|
||||
|
||||
tm.cachedUniversalAuthClientSecret = clientSecret
|
||||
if tm.removeUniversalAuthClientSecretOnRead {
|
||||
defer os.Remove(universalAuthConfig.ClientSecretPath)
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().UniversalAuthLogin(clientID, clientSecret)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchKubernetesAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var kubernetesAuthConfig KubernetesAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &kubernetesAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, kubernetesAuthConfig.IdentityID)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
serviceAccountTokenPath := os.Getenv(util.INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME)
|
||||
if serviceAccountTokenPath == "" {
|
||||
serviceAccountTokenPath = kubernetesAuthConfig.ServiceAccountToken
|
||||
if serviceAccountTokenPath == "" {
|
||||
serviceAccountTokenPath = "/var/run/secrets/kubernetes.io/serviceaccount/token"
|
||||
}
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().KubernetesAuthLogin(identityId, serviceAccountTokenPath)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchAzureAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var azureAuthConfig AzureAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &azureAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, azureAuthConfig.IdentityID)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().AzureAuthLogin(identityId)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchGcpIdTokenAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var gcpIdTokenAuthConfig GcpIdTokenAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &gcpIdTokenAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, gcpIdTokenAuthConfig.IdentityID)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().GcpIdTokenAuthLogin(identityId)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchGcpIamAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var gcpIamAuthConfig GcpIamAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &gcpIamAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, gcpIamAuthConfig.IdentityID)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
serviceAccountKeyPath := os.Getenv(util.INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME)
|
||||
if serviceAccountKeyPath == "" {
|
||||
// we don't need to read this file, because the service account key path is directly read inside the sdk
|
||||
serviceAccountKeyPath = gcpIamAuthConfig.ServiceAccountKey
|
||||
if serviceAccountKeyPath == "" {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("gcp service account key path not found")
|
||||
}
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().GcpIamAuthLogin(identityId, serviceAccountKeyPath)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchAwsIamAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var awsIamAuthConfig AwsIamAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &awsIamAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, awsIamAuthConfig.IdentityID)
|
||||
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().AwsIamAuthLogin(identityId)
|
||||
|
||||
}
|
||||
|
||||
// Fetches a new access token using client credentials
|
||||
func (tm *AgentManager) FetchNewAccessToken() error {
|
||||
clientID := os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
|
||||
if clientID == "" {
|
||||
clientIDAsByte, err := ReadFile(tm.clientIdPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to read client id from file path '%s' due to error: %v", tm.clientIdPath, err)
|
||||
}
|
||||
clientID = string(clientIDAsByte)
|
||||
|
||||
authStrategies := map[util.AuthStrategyType]func() (credential infisicalSdk.MachineIdentityCredential, e error){
|
||||
util.AuthStrategy.UNIVERSAL_AUTH: tm.FetchUniversalAuthAccessToken,
|
||||
util.AuthStrategy.KUBERNETES_AUTH: tm.FetchKubernetesAuthAccessToken,
|
||||
util.AuthStrategy.AZURE_AUTH: tm.FetchAzureAuthAccessToken,
|
||||
util.AuthStrategy.GCP_ID_TOKEN_AUTH: tm.FetchGcpIdTokenAuthAccessToken,
|
||||
util.AuthStrategy.GCP_IAM_AUTH: tm.FetchGcpIamAuthAccessToken,
|
||||
util.AuthStrategy.AWS_IAM_AUTH: tm.FetchAwsIamAuthAccessToken,
|
||||
}
|
||||
|
||||
clientSecret := os.Getenv("INFISICAL_UNIVERSAL_CLIENT_SECRET")
|
||||
if clientSecret == "" {
|
||||
clientSecretAsByte, err := ReadFile(tm.clientSecretPath)
|
||||
if err != nil {
|
||||
if len(tm.cachedClientSecret) == 0 {
|
||||
return fmt.Errorf("unable to read client secret from file and no cached client secret found: %v", err)
|
||||
} else {
|
||||
clientSecretAsByte = []byte(tm.cachedClientSecret)
|
||||
}
|
||||
}
|
||||
clientSecret = string(clientSecretAsByte)
|
||||
if _, ok := authStrategies[tm.authStrategy]; !ok {
|
||||
return fmt.Errorf("auth strategy %s not found", tm.authStrategy)
|
||||
}
|
||||
|
||||
// remove client secret after first read
|
||||
if tm.removeClientSecretOnRead {
|
||||
os.Remove(tm.clientSecretPath)
|
||||
}
|
||||
credential, err := authStrategies[tm.authStrategy]()
|
||||
|
||||
// save as cache in memory
|
||||
tm.cachedClientSecret = clientSecret
|
||||
|
||||
loginResponse, err := util.UniversalAuthLogin(clientID, clientSecret)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
accessTokenTTL := time.Duration(loginResponse.AccessTokenTTL * int(time.Second))
|
||||
accessTokenMaxTTL := time.Duration(loginResponse.AccessTokenMaxTTL * int(time.Second))
|
||||
accessTokenTTL := time.Duration(credential.ExpiresIn * int64(time.Second))
|
||||
accessTokenMaxTTL := time.Duration(credential.AccessTokenMaxTTL * int64(time.Second))
|
||||
|
||||
if accessTokenTTL <= time.Duration(5)*time.Second {
|
||||
util.PrintErrorMessageAndExit("At this this, agent does not support refresh of tokens with 5 seconds or less ttl. Please increase access token ttl and try again")
|
||||
util.PrintErrorMessageAndExit("At this time, agent does not support refresh of tokens with 5 seconds or less ttl. Please increase access token ttl and try again")
|
||||
}
|
||||
|
||||
tm.accessTokenFetchedTime = time.Now()
|
||||
tm.SetToken(loginResponse.AccessToken, accessTokenTTL, accessTokenMaxTTL)
|
||||
tm.SetToken(credential.AccessToken, accessTokenTTL, accessTokenMaxTTL)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -527,7 +655,7 @@ func (tm *AgentManager) RefreshAccessToken() error {
|
||||
SetRetryWaitTime(5 * time.Second)
|
||||
|
||||
accessToken := tm.GetToken()
|
||||
response, err := api.CallUniversalAuthRefreshAccessToken(httpClient, api.UniversalAuthRefreshRequest{AccessToken: accessToken})
|
||||
response, err := api.CallMachineIdentityRefreshAccessToken(httpClient, api.UniversalAuthRefreshRequest{AccessToken: accessToken})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -564,6 +692,7 @@ func (tm *AgentManager) ManageTokenLifecycle() {
|
||||
continue
|
||||
}
|
||||
} else if time.Now().After(accessTokenMaxTTLExpiresInTime) {
|
||||
// case: token has reached max ttl and we should re-authenticate entirely (cannot refresh)
|
||||
log.Info().Msgf("token has reached max ttl, attempting to re authenticate...")
|
||||
err := tm.FetchNewAccessToken()
|
||||
if err != nil {
|
||||
@@ -574,6 +703,7 @@ func (tm *AgentManager) ManageTokenLifecycle() {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
// case: token ttl has expired, but the token is still within max ttl, so we can refresh
|
||||
log.Info().Msgf("attempting to refresh existing token...")
|
||||
err := tm.RefreshAccessToken()
|
||||
if err != nil {
|
||||
@@ -770,18 +900,33 @@ var agentCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
|
||||
if agentConfig.Auth.Type != "universal-auth" {
|
||||
util.PrintErrorMessageAndExit("Only auth type of 'universal-auth' is supported at this time")
|
||||
}
|
||||
authMethodValid, authStrategy := util.IsAuthMethodValid(agentConfig.Auth.Type, false)
|
||||
|
||||
configUniversalAuthType := agentConfig.Auth.Config.(UniversalAuth)
|
||||
if !authMethodValid {
|
||||
util.PrintErrorMessageAndExit(fmt.Sprintf("The auth method '%s' is not supported.", agentConfig.Auth.Type))
|
||||
}
|
||||
|
||||
tokenRefreshNotifier := make(chan bool)
|
||||
sigChan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
filePaths := agentConfig.Sinks
|
||||
tm := NewAgentManager(filePaths, agentConfig.Templates, configUniversalAuthType.ClientIDPath, configUniversalAuthType.ClientSecretPath, tokenRefreshNotifier, configUniversalAuthType.RemoveClientSecretOnRead, agentConfig.Infisical.ExitAfterAuth)
|
||||
|
||||
configBytes, err := yaml.Marshal(agentConfig.Auth.Config)
|
||||
if err != nil {
|
||||
log.Error().Msgf("unable to marshal auth config because %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
tm := NewAgentManager(NewAgentMangerOptions{
|
||||
FileDeposits: filePaths,
|
||||
Templates: agentConfig.Templates,
|
||||
AuthConfigBytes: configBytes,
|
||||
NewAccessTokenNotificationChan: tokenRefreshNotifier,
|
||||
ExitAfterAuth: agentConfig.Infisical.ExitAfterAuth,
|
||||
AuthStrategy: authStrategy,
|
||||
})
|
||||
|
||||
tm.dynamicSecretLeases = NewDynamicSecretLeaseManager(sigChan)
|
||||
|
||||
go tm.ManageTokenLifecycle()
|
||||
|
@@ -55,6 +55,11 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
format, err := cmd.Flags().GetString("format")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@@ -70,11 +75,6 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@@ -169,9 +169,9 @@ func init() {
|
||||
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
|
||||
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
exportCmd.Flags().Bool("include-imports", true, "Imported linked secrets")
|
||||
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
exportCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
exportCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets from")
|
||||
exportCmd.Flags().String("projectId", "", "manually set the projectId to export secrets from")
|
||||
exportCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
exportCmd.Flags().String("template", "", "The path to the template file used to render secrets")
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
@@ -71,10 +72,6 @@ var getCmd = &cobra.Command{
|
||||
var createCmd = &cobra.Command{
|
||||
Use: "create",
|
||||
Short: "Create a folder",
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
@@ -84,6 +81,16 @@ var createCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
folderPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@@ -95,19 +102,31 @@ var createCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if folderName == "" {
|
||||
util.HandleError(fmt.Errorf("Invalid folder name"), "Folder name cannot be empty")
|
||||
util.HandleError(errors.New("invalid folder name, folder name cannot be empty"))
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
}
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
}
|
||||
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
params := models.CreateFolderParameters{
|
||||
FolderName: folderName,
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
FolderPath: folderPath,
|
||||
WorkspaceId: projectId,
|
||||
}
|
||||
|
||||
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
|
||||
params.InfisicalToken = token.Token
|
||||
}
|
||||
|
||||
_, err = util.CreateFolder(params)
|
||||
@@ -124,10 +143,6 @@ var createCmd = &cobra.Command{
|
||||
var deleteCmd = &cobra.Command{
|
||||
Use: "delete",
|
||||
Short: "Delete a folder",
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
@@ -138,6 +153,16 @@ var deleteCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
folderPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@@ -149,21 +174,29 @@ var deleteCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if folderName == "" {
|
||||
util.HandleError(fmt.Errorf("Invalid folder name"), "Folder name cannot be empty")
|
||||
util.HandleError(errors.New("invalid folder name, folder name cannot be empty"))
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
}
|
||||
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
params := models.DeleteFolderParameters{
|
||||
FolderName: folderName,
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
WorkspaceId: projectId,
|
||||
Environment: environmentName,
|
||||
FolderPath: folderPath,
|
||||
}
|
||||
|
||||
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
|
||||
params.InfisicalToken = token.Token
|
||||
}
|
||||
|
||||
_, err = util.DeleteFolder(params)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to delete folder")
|
||||
|
@@ -34,6 +34,8 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/crypto/argon2"
|
||||
"golang.org/x/term"
|
||||
|
||||
infisicalSdk "github.com/infisical/go-sdk"
|
||||
)
|
||||
|
||||
type params struct {
|
||||
@@ -44,6 +46,86 @@ type params struct {
|
||||
keyLength uint32
|
||||
}
|
||||
|
||||
func handleUniversalAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
clientId, err := util.GetCmdFlagOrEnv(cmd, "client-id", util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
|
||||
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
clientSecret, err := util.GetCmdFlagOrEnv(cmd, "client-secret", util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().UniversalAuthLogin(clientId, clientSecret)
|
||||
}
|
||||
|
||||
func handleKubernetesAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
serviceAccountTokenPath, err := util.GetCmdFlagOrEnv(cmd, "service-account-token-path", util.INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().KubernetesAuthLogin(identityId, serviceAccountTokenPath)
|
||||
}
|
||||
|
||||
func handleAzureAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().AzureAuthLogin(identityId)
|
||||
}
|
||||
|
||||
func handleGcpIdTokenAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().GcpIdTokenAuthLogin(identityId)
|
||||
}
|
||||
|
||||
func handleGcpIamAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
serviceAccountKeyFilePath, err := util.GetCmdFlagOrEnv(cmd, "service-account-key-file-path", util.INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().GcpIamAuthLogin(identityId, serviceAccountKeyFilePath)
|
||||
}
|
||||
|
||||
func handleAwsIamAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().AwsIamAuthLogin(identityId)
|
||||
}
|
||||
|
||||
func formatAuthMethod(authMethod string) string {
|
||||
return strings.ReplaceAll(authMethod, "-", " ")
|
||||
}
|
||||
|
||||
const ADD_USER = "Add a new account login"
|
||||
const REPLACE_USER = "Override current logged in user"
|
||||
const EXIT_USER_MENU = "Exit"
|
||||
@@ -56,6 +138,11 @@ var loginCmd = &cobra.Command{
|
||||
DisableFlagsInUseLine: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
})
|
||||
|
||||
loginMethod, err := cmd.Flags().GetString("method")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@@ -65,12 +152,13 @@ var loginCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if loginMethod != "user" && loginMethod != "universal-auth" {
|
||||
util.PrintErrorMessageAndExit("Invalid login method. Please use either 'user' or 'universal-auth'")
|
||||
authMethodValid, strategy := util.IsAuthMethodValid(loginMethod, true)
|
||||
if !authMethodValid {
|
||||
util.PrintErrorMessageAndExit(fmt.Sprintf("Invalid login method: %s", loginMethod))
|
||||
}
|
||||
|
||||
// standalone user auth
|
||||
if loginMethod == "user" {
|
||||
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
|
||||
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
|
||||
@@ -133,7 +221,7 @@ var loginCmd = &cobra.Command{
|
||||
|
||||
err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Unable to store your credentials in system vault [%s]")
|
||||
log.Error().Msgf("Unable to store your credentials in system vault")
|
||||
log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq")
|
||||
log.Debug().Err(err)
|
||||
//return here
|
||||
@@ -160,47 +248,33 @@ var loginCmd = &cobra.Command{
|
||||
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
|
||||
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
|
||||
Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION))
|
||||
} else if loginMethod == "universal-auth" {
|
||||
} else {
|
||||
|
||||
clientId, err := cmd.Flags().GetString("client-id")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
authStrategies := map[util.AuthStrategyType]func(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error){
|
||||
util.AuthStrategy.UNIVERSAL_AUTH: handleUniversalAuthLogin,
|
||||
util.AuthStrategy.KUBERNETES_AUTH: handleKubernetesAuthLogin,
|
||||
util.AuthStrategy.AZURE_AUTH: handleAzureAuthLogin,
|
||||
util.AuthStrategy.GCP_ID_TOKEN_AUTH: handleGcpIdTokenAuthLogin,
|
||||
util.AuthStrategy.GCP_IAM_AUTH: handleGcpIamAuthLogin,
|
||||
util.AuthStrategy.AWS_IAM_AUTH: handleAwsIamAuthLogin,
|
||||
}
|
||||
|
||||
clientSecret, err := cmd.Flags().GetString("client-secret")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if clientId == "" {
|
||||
clientId = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
|
||||
if clientId == "" {
|
||||
util.PrintErrorMessageAndExit("Please provide client-id")
|
||||
}
|
||||
}
|
||||
if clientSecret == "" {
|
||||
clientSecret = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME)
|
||||
if clientSecret == "" {
|
||||
util.PrintErrorMessageAndExit("Please provide client-secret")
|
||||
}
|
||||
}
|
||||
|
||||
res, err := util.UniversalAuthLogin(clientId, clientSecret)
|
||||
credential, err := authStrategies[strategy](cmd, infisicalClient)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
util.HandleError(fmt.Errorf("unable to authenticate with %s [err=%v]", formatAuthMethod(loginMethod), err))
|
||||
}
|
||||
|
||||
if plainOutput {
|
||||
fmt.Println(res.AccessToken)
|
||||
fmt.Println(credential.AccessToken)
|
||||
return
|
||||
}
|
||||
|
||||
boldGreen := color.New(color.FgGreen).Add(color.Bold)
|
||||
boldPlain := color.New(color.Bold)
|
||||
time.Sleep(time.Second * 1)
|
||||
boldGreen.Printf(">>>> Successfully authenticated with Universal Auth!\n\n")
|
||||
boldPlain.Printf("Universal Auth Access Token:\n%v", res.AccessToken)
|
||||
boldGreen.Printf(">>>> Successfully authenticated with %s!\n\n", formatAuthMethod(loginMethod))
|
||||
boldPlain.Printf("Access Token:\n%v", credential.AccessToken)
|
||||
|
||||
plainBold := color.New(color.Bold)
|
||||
plainBold.Println("\n\nYou can use this access token to authenticate through other commands in the CLI.")
|
||||
@@ -376,9 +450,12 @@ func init() {
|
||||
rootCmd.AddCommand(loginCmd)
|
||||
loginCmd.Flags().BoolP("interactive", "i", false, "login via the command line")
|
||||
loginCmd.Flags().String("method", "user", "login method [user, universal-auth]")
|
||||
loginCmd.Flags().String("client-id", "", "client id for universal auth")
|
||||
loginCmd.Flags().Bool("plain", false, "only output the token without any formatting")
|
||||
loginCmd.Flags().String("client-id", "", "client id for universal auth")
|
||||
loginCmd.Flags().String("client-secret", "", "client secret for universal auth")
|
||||
loginCmd.Flags().String("machine-identity-id", "", "machine identity id for kubernetes, azure, gcp-id-token, gcp-iam, and aws-iam auth methods")
|
||||
loginCmd.Flags().String("service-account-token-path", "", "service account token path for kubernetes auth")
|
||||
loginCmd.Flags().String("service-account-key-file-path", "", "service account key file path for GCP IAM auth")
|
||||
}
|
||||
|
||||
func DomainOverridePrompt() (bool, error) {
|
||||
@@ -539,6 +616,7 @@ func getFreshUserCredentials(email string, password string) (*api.GetLoginOneV2R
|
||||
loginTwoResponseResult, err := api.CallLogin2V2(httpClient, api.GetLoginTwoV2Request{
|
||||
Email: email,
|
||||
ClientProof: hex.EncodeToString(srpM1),
|
||||
Password: password,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
@@ -237,8 +237,8 @@ func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(runCmd)
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
runCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
runCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth")
|
||||
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("include-imports", true, "Import linked secrets ")
|
||||
|
@@ -4,23 +4,17 @@ Copyright (c) 2023 Infisical Inc.
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/crypto"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/Infisical/infisical-merge/packages/visualize"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/posthog/posthog-go"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@@ -160,14 +154,19 @@ var secretsSetCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
secretsPath, err := cmd.Flags().GetString("path")
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get your local config details")
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretsPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretType, err := cmd.Flags().GetString("type")
|
||||
@@ -175,196 +174,18 @@ var secretsSetCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse secret type")
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
var secretOperations []models.SecretSetOperation
|
||||
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
|
||||
secretOperations, err = util.SetRawSecrets(args, secretType, environmentName, secretsPath, projectId, token)
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
secretOperations, err = util.SetEncryptedSecrets(args, secretType, environmentName, secretsPath)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to authenticate")
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
request := api.GetEncryptedWorkspaceKeyRequest{
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
}
|
||||
|
||||
workspaceKeyResponse, err := api.CallGetEncryptedWorkspaceKey(httpClient, request)
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to get your encrypted workspace key")
|
||||
}
|
||||
|
||||
encryptedWorkspaceKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.EncryptedKey)
|
||||
encryptedWorkspaceKeySenderPublicKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Sender.PublicKey)
|
||||
encryptedWorkspaceKeyNonce, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce)
|
||||
currentUsersPrivateKey, _ := base64.StdEncoding.DecodeString(loggedInUserDetails.UserCredentials.PrivateKey)
|
||||
|
||||
if len(currentUsersPrivateKey) == 0 || len(encryptedWorkspaceKeySenderPublicKey) == 0 {
|
||||
log.Debug().Msgf("Missing credentials for generating plainTextEncryptionKey: [currentUsersPrivateKey=%s] [encryptedWorkspaceKeySenderPublicKey=%s]", currentUsersPrivateKey, encryptedWorkspaceKeySenderPublicKey)
|
||||
util.PrintErrorMessageAndExit("Some required user credentials are missing to generate your [plainTextEncryptionKey]. Please run [infisical login] then try again")
|
||||
}
|
||||
|
||||
// decrypt workspace key
|
||||
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
|
||||
|
||||
infisicalTokenEnv := os.Getenv(util.INFISICAL_TOKEN_NAME)
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, InfisicalToken: infisicalTokenEnv}, "")
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to retrieve secrets")
|
||||
}
|
||||
|
||||
type SecretSetOperation struct {
|
||||
SecretKey string
|
||||
SecretValue string
|
||||
SecretOperation string
|
||||
}
|
||||
|
||||
secretsToCreate := []api.Secret{}
|
||||
secretsToModify := []api.Secret{}
|
||||
secretOperations := []SecretSetOperation{}
|
||||
|
||||
sharedSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
personalSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
|
||||
for _, secret := range secrets {
|
||||
if secret.Type == util.SECRET_TYPE_PERSONAL {
|
||||
personalSecretMapByName[secret.Key] = secret
|
||||
} else {
|
||||
sharedSecretMapByName[secret.Key] = secret
|
||||
}
|
||||
}
|
||||
|
||||
for _, arg := range args {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
util.PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
util.PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
key := splitKeyValueFromArg[0]
|
||||
value := splitKeyValueFromArg[1]
|
||||
|
||||
hashedKey := fmt.Sprintf("%x", sha256.Sum256([]byte(key)))
|
||||
encryptedKey, err := crypto.EncryptSymmetric([]byte(key), []byte(plainTextEncryptionKey))
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to encrypt your secrets")
|
||||
}
|
||||
|
||||
hashedValue := fmt.Sprintf("%x", sha256.Sum256([]byte(value)))
|
||||
encryptedValue, err := crypto.EncryptSymmetric([]byte(value), []byte(plainTextEncryptionKey))
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to encrypt your secrets")
|
||||
}
|
||||
|
||||
var existingSecret models.SingleEnvironmentVariable
|
||||
var doesSecretExist bool
|
||||
|
||||
if secretType == util.SECRET_TYPE_SHARED {
|
||||
existingSecret, doesSecretExist = sharedSecretMapByName[key]
|
||||
} else {
|
||||
existingSecret, doesSecretExist = personalSecretMapByName[key]
|
||||
}
|
||||
|
||||
if doesSecretExist {
|
||||
// case: secret exists in project so it needs to be modified
|
||||
encryptedSecretDetails := api.Secret{
|
||||
ID: existingSecret.ID,
|
||||
SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText),
|
||||
SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce),
|
||||
SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag),
|
||||
SecretValueHash: hashedValue,
|
||||
PlainTextKey: key,
|
||||
Type: existingSecret.Type,
|
||||
}
|
||||
|
||||
// Only add to modifications if the value is different
|
||||
if existingSecret.Value != value {
|
||||
secretsToModify = append(secretsToModify, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE MODIFIED",
|
||||
})
|
||||
} else {
|
||||
// Current value is same as exisitng so no change
|
||||
secretOperations = append(secretOperations, SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE UNCHANGED",
|
||||
})
|
||||
}
|
||||
|
||||
} else {
|
||||
// case: secret doesn't exist in project so it needs to be created
|
||||
encryptedSecretDetails := api.Secret{
|
||||
SecretKeyCiphertext: base64.StdEncoding.EncodeToString(encryptedKey.CipherText),
|
||||
SecretKeyIV: base64.StdEncoding.EncodeToString(encryptedKey.Nonce),
|
||||
SecretKeyTag: base64.StdEncoding.EncodeToString(encryptedKey.AuthTag),
|
||||
SecretKeyHash: hashedKey,
|
||||
SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText),
|
||||
SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce),
|
||||
SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag),
|
||||
SecretValueHash: hashedValue,
|
||||
Type: secretType,
|
||||
PlainTextKey: key,
|
||||
}
|
||||
secretsToCreate = append(secretsToCreate, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET CREATED",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToCreate {
|
||||
createSecretRequest := api.CreateSecretV3Request{
|
||||
WorkspaceID: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
SecretName: secret.PlainTextKey,
|
||||
SecretKeyCiphertext: secret.SecretKeyCiphertext,
|
||||
SecretKeyIV: secret.SecretKeyIV,
|
||||
SecretKeyTag: secret.SecretKeyTag,
|
||||
SecretValueCiphertext: secret.SecretValueCiphertext,
|
||||
SecretValueIV: secret.SecretValueIV,
|
||||
SecretValueTag: secret.SecretValueTag,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
err = api.CallCreateSecretsV3(httpClient, createSecretRequest)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to process new secret creations")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToModify {
|
||||
updateSecretRequest := api.UpdateSecretByNameV3Request{
|
||||
WorkspaceID: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
SecretValueCiphertext: secret.SecretValueCiphertext,
|
||||
SecretValueIV: secret.SecretValueIV,
|
||||
SecretValueTag: secret.SecretValueTag,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
err = api.CallUpdateSecretsV3(httpClient, updateSecretRequest, secret.PlainTextKey)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to process secret update request")
|
||||
return
|
||||
}
|
||||
util.HandleError(err, "Unable to set secrets")
|
||||
}
|
||||
|
||||
// Print secret operations
|
||||
@@ -395,6 +216,16 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretsPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@@ -405,33 +236,44 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to authenticate")
|
||||
httpClient := resty.New().
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
}
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
|
||||
httpClient.SetAuthToken(token.Token)
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to authenticate")
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken)
|
||||
}
|
||||
|
||||
for _, secretName := range args {
|
||||
request := api.DeleteSecretV3Request{
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
WorkspaceId: projectId,
|
||||
Environment: environmentName,
|
||||
SecretName: secretName,
|
||||
Type: secretType,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
err = api.CallDeleteSecretsV3(httpClient, request)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to complete your delete request")
|
||||
@@ -789,13 +631,13 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod
|
||||
}
|
||||
|
||||
func init() {
|
||||
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId when using machine identity based auth")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("path", "/", "Fetch secrets from within a folder path")
|
||||
secretsCmd.AddCommand(secretsGenerateExampleEnvCmd)
|
||||
|
||||
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsGetCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsGetCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsGetCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth")
|
||||
secretsGetCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsGetCmd.Flags().Bool("raw-value", false, "Returns only the value of secret, only works with one secret")
|
||||
@@ -804,41 +646,37 @@ func init() {
|
||||
|
||||
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
secretsCmd.AddCommand(secretsSetCmd)
|
||||
secretsSetCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsSetCmd.Flags().String("projectId", "", "manually set the project ID to for setting secrets when using machine identity based auth")
|
||||
secretsSetCmd.Flags().String("path", "/", "set secrets within a folder path")
|
||||
secretsSetCmd.Flags().String("type", util.SECRET_TYPE_SHARED, "the type of secret to create: personal or shared")
|
||||
|
||||
// Only supports logged in users (JWT auth)
|
||||
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
secretsDeleteCmd.Flags().String("type", "personal", "the type of secret to delete: personal or shared (default: personal)")
|
||||
secretsDeleteCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsDeleteCmd.Flags().String("projectId", "", "manually set the projectId to delete secrets from when using machine identity based auth")
|
||||
secretsDeleteCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
secretsCmd.AddCommand(secretsDeleteCmd)
|
||||
|
||||
// Only supports logged in users (JWT auth)
|
||||
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
// *** Folders sub command ***
|
||||
folderCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
|
||||
// Add getCmd, createCmd and deleteCmd flags here
|
||||
getCmd.Flags().StringP("path", "p", "/", "The path from where folders should be fetched from")
|
||||
getCmd.Flags().String("token", "", "Fetch folders using the infisical token")
|
||||
getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
getCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from when using machine identity based auth")
|
||||
folderCmd.AddCommand(getCmd)
|
||||
|
||||
// Add createCmd flags here
|
||||
createCmd.Flags().StringP("path", "p", "/", "Path to where the folder should be created")
|
||||
createCmd.Flags().StringP("name", "n", "", "Name of the folder to be created in selected `--path`")
|
||||
createCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
createCmd.Flags().String("projectId", "", "manually set the project ID for creating folders in when using machine identity based auth")
|
||||
folderCmd.AddCommand(createCmd)
|
||||
|
||||
// Add deleteCmd flags here
|
||||
deleteCmd.Flags().StringP("path", "p", "/", "Path to the folder to be deleted")
|
||||
deleteCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
deleteCmd.Flags().String("projectId", "", "manually set the projectId to delete folders when using machine identity based auth")
|
||||
deleteCmd.Flags().StringP("name", "n", "", "Name of the folder to be deleted within selected `--path`")
|
||||
folderCmd.AddCommand(deleteCmd)
|
||||
|
||||
@@ -846,8 +684,8 @@ func init() {
|
||||
|
||||
// ** End of folders sub command
|
||||
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets when using machine identity based auth")
|
||||
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")
|
||||
|
@@ -39,7 +39,7 @@ var tokenRenewCmd = &cobra.Command{
|
||||
util.PrintErrorMessageAndExit("You are trying to renew a service token. You can only renew universal auth access tokens.")
|
||||
}
|
||||
|
||||
renewedAccessToken, err := util.RenewUniversalAuthAccessToken(token)
|
||||
renewedAccessToken, err := util.RenewMachineIdentityAccessToken(token)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to renew token")
|
||||
|
@@ -134,3 +134,9 @@ type MachineIdentityCredentials struct {
|
||||
ClientId string
|
||||
ClientSecret string
|
||||
}
|
||||
|
||||
type SecretSetOperation struct {
|
||||
SecretKey string
|
||||
SecretValue string
|
||||
SecretOperation string
|
||||
}
|
||||
|
42
cli/packages/util/auth.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package util
|
||||
|
||||
type AuthStrategyType string
|
||||
|
||||
var AuthStrategy = struct {
|
||||
UNIVERSAL_AUTH AuthStrategyType
|
||||
KUBERNETES_AUTH AuthStrategyType
|
||||
AZURE_AUTH AuthStrategyType
|
||||
GCP_ID_TOKEN_AUTH AuthStrategyType
|
||||
GCP_IAM_AUTH AuthStrategyType
|
||||
AWS_IAM_AUTH AuthStrategyType
|
||||
}{
|
||||
UNIVERSAL_AUTH: "universal-auth",
|
||||
KUBERNETES_AUTH: "kubernetes",
|
||||
AZURE_AUTH: "azure",
|
||||
GCP_ID_TOKEN_AUTH: "gcp-id-token",
|
||||
GCP_IAM_AUTH: "gcp-iam",
|
||||
AWS_IAM_AUTH: "aws-iam",
|
||||
}
|
||||
|
||||
var AVAILABLE_AUTH_STRATEGIES = []AuthStrategyType{
|
||||
AuthStrategy.UNIVERSAL_AUTH,
|
||||
AuthStrategy.KUBERNETES_AUTH,
|
||||
AuthStrategy.AZURE_AUTH,
|
||||
AuthStrategy.GCP_ID_TOKEN_AUTH,
|
||||
AuthStrategy.GCP_IAM_AUTH,
|
||||
AuthStrategy.AWS_IAM_AUTH,
|
||||
}
|
||||
|
||||
func IsAuthMethodValid(authMethod string, allowUserAuth bool) (isValid bool, strategy AuthStrategyType) {
|
||||
|
||||
if authMethod == "user" && allowUserAuth {
|
||||
return true, ""
|
||||
}
|
||||
|
||||
for _, strategy := range AVAILABLE_AUTH_STRATEGIES {
|
||||
if string(strategy) == authMethod {
|
||||
return true, strategy
|
||||
}
|
||||
}
|
||||
return false, ""
|
||||
}
|
@@ -1,20 +1,32 @@
|
||||
package util
|
||||
|
||||
const (
|
||||
CONFIG_FILE_NAME = "infisical-config.json"
|
||||
CONFIG_FOLDER_NAME = ".infisical"
|
||||
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
|
||||
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
|
||||
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
|
||||
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
|
||||
CONFIG_FILE_NAME = "infisical-config.json"
|
||||
CONFIG_FOLDER_NAME = ".infisical"
|
||||
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
|
||||
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
|
||||
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
|
||||
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
|
||||
INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN"
|
||||
|
||||
// Universal Auth
|
||||
INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID"
|
||||
INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET"
|
||||
INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN"
|
||||
SECRET_TYPE_PERSONAL = "personal"
|
||||
SECRET_TYPE_SHARED = "shared"
|
||||
KEYRING_SERVICE_NAME = "infisical"
|
||||
PERSONAL_SECRET_TYPE_NAME = "personal"
|
||||
SHARED_SECRET_TYPE_NAME = "shared"
|
||||
|
||||
// Kubernetes auth
|
||||
INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME = "INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH"
|
||||
|
||||
// GCP Auth
|
||||
INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME = "INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH"
|
||||
|
||||
// Generic env variable used for auth methods that require a machine identity ID
|
||||
INFISICAL_MACHINE_IDENTITY_ID_NAME = "INFISICAL_MACHINE_IDENTITY_ID"
|
||||
|
||||
SECRET_TYPE_PERSONAL = "personal"
|
||||
SECRET_TYPE_SHARED = "shared"
|
||||
KEYRING_SERVICE_NAME = "infisical"
|
||||
PERSONAL_SECRET_TYPE_NAME = "personal"
|
||||
SHARED_SECRET_TYPE_NAME = "shared"
|
||||
|
||||
SERVICE_TOKEN_IDENTIFIER = "service-token"
|
||||
UNIVERSAL_AUTH_TOKEN_IDENTIFIER = "universal-auth-token"
|
||||
|
@@ -172,19 +172,28 @@ func GetFoldersViaMachineIdentity(accessToken string, workspaceId string, envSlu
|
||||
|
||||
// CreateFolder creates a folder in Infisical
|
||||
func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, error) {
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
return models.SingleFolder{}, err
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
// If no token is provided, we will try to get the token from the current logged in user
|
||||
if params.InfisicalToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
|
||||
if err != nil {
|
||||
return models.SingleFolder{}, err
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
params.InfisicalToken = loggedInUserDetails.UserCredentials.JTWToken
|
||||
}
|
||||
|
||||
// set up resty client
|
||||
httpClient := resty.New()
|
||||
httpClient.
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetAuthToken(params.InfisicalToken).
|
||||
SetHeader("Accept", "application/json").
|
||||
SetHeader("Content-Type", "application/json")
|
||||
|
||||
@@ -209,19 +218,29 @@ func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, er
|
||||
}
|
||||
|
||||
func DeleteFolder(params models.DeleteFolderParameters) ([]models.SingleFolder, error) {
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
// If no token is provided, we will try to get the token from the current logged in user
|
||||
if params.InfisicalToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
params.InfisicalToken = loggedInUserDetails.UserCredentials.JTWToken
|
||||
}
|
||||
|
||||
// set up resty client
|
||||
httpClient := resty.New()
|
||||
httpClient.
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetAuthToken(params.InfisicalToken).
|
||||
SetHeader("Accept", "application/json").
|
||||
SetHeader("Content-Type", "application/json")
|
||||
|
||||
|
@@ -123,7 +123,7 @@ func UniversalAuthLogin(clientId string, clientSecret string) (api.UniversalAuth
|
||||
return tokenResponse, nil
|
||||
}
|
||||
|
||||
func RenewUniversalAuthAccessToken(accessToken string) (string, error) {
|
||||
func RenewMachineIdentityAccessToken(accessToken string) (string, error) {
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetRetryCount(10000).
|
||||
@@ -134,7 +134,7 @@ func RenewUniversalAuthAccessToken(accessToken string) (string, error) {
|
||||
AccessToken: accessToken,
|
||||
}
|
||||
|
||||
tokenResponse, err := api.CallUniversalAuthRefreshAccessToken(httpClient, request)
|
||||
tokenResponse, err := api.CallMachineIdentityRefreshAccessToken(httpClient, request)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -246,3 +246,44 @@ func AppendAPIEndpoint(address string) string {
|
||||
}
|
||||
return address + "/api"
|
||||
}
|
||||
|
||||
func ReadFileAsString(filePath string) (string, error) {
|
||||
fileBytes, err := os.ReadFile(filePath)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return string(fileBytes), nil
|
||||
|
||||
}
|
||||
|
||||
func GetEnvVarOrFileContent(envName string, filePath string) (string, error) {
|
||||
// First check if the environment variable is set
|
||||
if envVarValue := os.Getenv(envName); envVarValue != "" {
|
||||
return envVarValue, nil
|
||||
}
|
||||
|
||||
// If it's not set, try to read the file
|
||||
fileContent, err := ReadFileAsString(filePath)
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("unable to read file content from file path '%s' [err=%v]", filePath, err)
|
||||
}
|
||||
|
||||
return fileContent, nil
|
||||
}
|
||||
|
||||
func GetCmdFlagOrEnv(cmd *cobra.Command, flag, envName string) (string, error) {
|
||||
value, flagsErr := cmd.Flags().GetString(flag)
|
||||
if flagsErr != nil {
|
||||
return "", flagsErr
|
||||
}
|
||||
if value == "" {
|
||||
value = os.Getenv(envName)
|
||||
}
|
||||
if value == "" {
|
||||
return "", fmt.Errorf("please provide %s flag", flag)
|
||||
}
|
||||
return value, nil
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
@@ -9,6 +10,7 @@ import (
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/crypto"
|
||||
@@ -806,3 +808,336 @@ func GetPlainTextWorkspaceKey(authenticationToken string, receiverPrivateKey str
|
||||
|
||||
return crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey), nil
|
||||
}
|
||||
|
||||
func SetEncryptedSecrets(secretArgs []string, secretType string, environmentName string, secretsPath string) ([]models.SecretSetOperation, error) {
|
||||
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get your local config details [err=%v]", err)
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to authenticate [err=%v]", err)
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
request := api.GetEncryptedWorkspaceKeyRequest{
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
}
|
||||
|
||||
workspaceKeyResponse, err := api.CallGetEncryptedWorkspaceKey(httpClient, request)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get your encrypted workspace key [err=%v]", err)
|
||||
}
|
||||
|
||||
encryptedWorkspaceKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.EncryptedKey)
|
||||
encryptedWorkspaceKeySenderPublicKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Sender.PublicKey)
|
||||
encryptedWorkspaceKeyNonce, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce)
|
||||
currentUsersPrivateKey, _ := base64.StdEncoding.DecodeString(loggedInUserDetails.UserCredentials.PrivateKey)
|
||||
|
||||
if len(currentUsersPrivateKey) == 0 || len(encryptedWorkspaceKeySenderPublicKey) == 0 {
|
||||
log.Debug().Msgf("Missing credentials for generating plainTextEncryptionKey: [currentUsersPrivateKey=%s] [encryptedWorkspaceKeySenderPublicKey=%s]", currentUsersPrivateKey, encryptedWorkspaceKeySenderPublicKey)
|
||||
PrintErrorMessageAndExit("Some required user credentials are missing to generate your [plainTextEncryptionKey]. Please run [infisical login] then try again")
|
||||
}
|
||||
|
||||
// decrypt workspace key
|
||||
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
|
||||
|
||||
infisicalTokenEnv := os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, InfisicalToken: infisicalTokenEnv}, "")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to retrieve secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
secretsToCreate := []api.Secret{}
|
||||
secretsToModify := []api.Secret{}
|
||||
secretOperations := []models.SecretSetOperation{}
|
||||
|
||||
sharedSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
personalSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
|
||||
for _, secret := range secrets {
|
||||
if secret.Type == SECRET_TYPE_PERSONAL {
|
||||
personalSecretMapByName[secret.Key] = secret
|
||||
} else {
|
||||
sharedSecretMapByName[secret.Key] = secret
|
||||
}
|
||||
}
|
||||
|
||||
for _, arg := range secretArgs {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
key := splitKeyValueFromArg[0]
|
||||
value := splitKeyValueFromArg[1]
|
||||
|
||||
hashedKey := fmt.Sprintf("%x", sha256.Sum256([]byte(key)))
|
||||
encryptedKey, err := crypto.EncryptSymmetric([]byte(key), []byte(plainTextEncryptionKey))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to encrypt your secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
hashedValue := fmt.Sprintf("%x", sha256.Sum256([]byte(value)))
|
||||
encryptedValue, err := crypto.EncryptSymmetric([]byte(value), []byte(plainTextEncryptionKey))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to encrypt your secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
var existingSecret models.SingleEnvironmentVariable
|
||||
var doesSecretExist bool
|
||||
|
||||
if secretType == SECRET_TYPE_SHARED {
|
||||
existingSecret, doesSecretExist = sharedSecretMapByName[key]
|
||||
} else {
|
||||
existingSecret, doesSecretExist = personalSecretMapByName[key]
|
||||
}
|
||||
|
||||
if doesSecretExist {
|
||||
// case: secret exists in project so it needs to be modified
|
||||
encryptedSecretDetails := api.Secret{
|
||||
ID: existingSecret.ID,
|
||||
SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText),
|
||||
SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce),
|
||||
SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag),
|
||||
SecretValueHash: hashedValue,
|
||||
PlainTextKey: key,
|
||||
Type: existingSecret.Type,
|
||||
}
|
||||
|
||||
// Only add to modifications if the value is different
|
||||
if existingSecret.Value != value {
|
||||
secretsToModify = append(secretsToModify, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE MODIFIED",
|
||||
})
|
||||
} else {
|
||||
// Current value is same as exisitng so no change
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE UNCHANGED",
|
||||
})
|
||||
}
|
||||
|
||||
} else {
|
||||
// case: secret doesn't exist in project so it needs to be created
|
||||
encryptedSecretDetails := api.Secret{
|
||||
SecretKeyCiphertext: base64.StdEncoding.EncodeToString(encryptedKey.CipherText),
|
||||
SecretKeyIV: base64.StdEncoding.EncodeToString(encryptedKey.Nonce),
|
||||
SecretKeyTag: base64.StdEncoding.EncodeToString(encryptedKey.AuthTag),
|
||||
SecretKeyHash: hashedKey,
|
||||
SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText),
|
||||
SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce),
|
||||
SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag),
|
||||
SecretValueHash: hashedValue,
|
||||
Type: secretType,
|
||||
PlainTextKey: key,
|
||||
}
|
||||
secretsToCreate = append(secretsToCreate, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET CREATED",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToCreate {
|
||||
createSecretRequest := api.CreateSecretV3Request{
|
||||
WorkspaceID: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
SecretName: secret.PlainTextKey,
|
||||
SecretKeyCiphertext: secret.SecretKeyCiphertext,
|
||||
SecretKeyIV: secret.SecretKeyIV,
|
||||
SecretKeyTag: secret.SecretKeyTag,
|
||||
SecretValueCiphertext: secret.SecretValueCiphertext,
|
||||
SecretValueIV: secret.SecretValueIV,
|
||||
SecretValueTag: secret.SecretValueTag,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
err = api.CallCreateSecretsV3(httpClient, createSecretRequest)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process new secret creations [err=%v]", err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToModify {
|
||||
updateSecretRequest := api.UpdateSecretByNameV3Request{
|
||||
WorkspaceID: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
SecretValueCiphertext: secret.SecretValueCiphertext,
|
||||
SecretValueIV: secret.SecretValueIV,
|
||||
SecretValueTag: secret.SecretValueTag,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
err = api.CallUpdateSecretsV3(httpClient, updateSecretRequest, secret.PlainTextKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process secret update request [err=%v]", err)
|
||||
}
|
||||
}
|
||||
|
||||
return secretOperations, nil
|
||||
|
||||
}
|
||||
|
||||
func SetRawSecrets(secretArgs []string, secretType string, environmentName string, secretsPath string, projectId string, tokenDetails *models.TokenDetails) ([]models.SecretSetOperation, error) {
|
||||
|
||||
if tokenDetails == nil {
|
||||
return nil, fmt.Errorf("unable to process set secret operations, token details are missing")
|
||||
}
|
||||
|
||||
getAllEnvironmentVariablesRequest := models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, WorkspaceId: projectId}
|
||||
if tokenDetails.Type == UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
getAllEnvironmentVariablesRequest.UniversalAuthAccessToken = tokenDetails.Token
|
||||
} else {
|
||||
getAllEnvironmentVariablesRequest.InfisicalToken = tokenDetails.Token
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(tokenDetails.Token).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := GetAllEnvironmentVariables(getAllEnvironmentVariablesRequest, "")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to retrieve secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
secretsToCreate := []api.RawSecret{}
|
||||
secretsToModify := []api.RawSecret{}
|
||||
secretOperations := []models.SecretSetOperation{}
|
||||
|
||||
sharedSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
personalSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
|
||||
for _, secret := range secrets {
|
||||
if secret.Type == SECRET_TYPE_PERSONAL {
|
||||
personalSecretMapByName[secret.Key] = secret
|
||||
} else {
|
||||
sharedSecretMapByName[secret.Key] = secret
|
||||
}
|
||||
}
|
||||
|
||||
for _, arg := range secretArgs {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
key := splitKeyValueFromArg[0]
|
||||
value := splitKeyValueFromArg[1]
|
||||
|
||||
var existingSecret models.SingleEnvironmentVariable
|
||||
var doesSecretExist bool
|
||||
|
||||
if secretType == SECRET_TYPE_SHARED {
|
||||
existingSecret, doesSecretExist = sharedSecretMapByName[key]
|
||||
} else {
|
||||
existingSecret, doesSecretExist = personalSecretMapByName[key]
|
||||
}
|
||||
|
||||
if doesSecretExist {
|
||||
// case: secret exists in project so it needs to be modified
|
||||
encryptedSecretDetails := api.RawSecret{
|
||||
ID: existingSecret.ID,
|
||||
SecretValue: value,
|
||||
SecretKey: key,
|
||||
Type: existingSecret.Type,
|
||||
}
|
||||
|
||||
// Only add to modifications if the value is different
|
||||
if existingSecret.Value != value {
|
||||
secretsToModify = append(secretsToModify, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE MODIFIED",
|
||||
})
|
||||
} else {
|
||||
// Current value is same as existing so no change
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE UNCHANGED",
|
||||
})
|
||||
}
|
||||
|
||||
} else {
|
||||
// case: secret doesn't exist in project so it needs to be created
|
||||
encryptedSecretDetails := api.RawSecret{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
Type: secretType,
|
||||
}
|
||||
secretsToCreate = append(secretsToCreate, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET CREATED",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToCreate {
|
||||
createSecretRequest := api.CreateRawSecretV3Request{
|
||||
SecretName: secret.SecretKey,
|
||||
SecretValue: secret.SecretValue,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
WorkspaceID: projectId,
|
||||
Environment: environmentName,
|
||||
}
|
||||
|
||||
err = api.CallCreateRawSecretsV3(httpClient, createSecretRequest)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process new secret creations [err=%v]", err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToModify {
|
||||
updateSecretRequest := api.UpdateRawSecretByNameV3Request{
|
||||
SecretName: secret.SecretKey,
|
||||
SecretValue: secret.SecretValue,
|
||||
SecretPath: secretsPath,
|
||||
WorkspaceID: projectId,
|
||||
Environment: environmentName,
|
||||
Type: secret.Type,
|
||||
}
|
||||
|
||||
err = api.CallUpdateRawSecretsV3(httpClient, updateSecretRequest)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process secret update request [err=%v]", err)
|
||||
}
|
||||
}
|
||||
|
||||
return secretOperations, nil
|
||||
|
||||
}
|
||||
|
@@ -4,59 +4,63 @@ sidebarTitle: "What is Infisical?"
|
||||
description: "An Introduction to the Infisical secret management platform."
|
||||
---
|
||||
|
||||
Infisical is an [open-source](https://github.com/infisical/infisical) secret management platform for developers.
|
||||
It provides capabilities for storing, managing, and syncing application configuration and secrets like API keys, database
|
||||
credentials, and certificates across infrastructure. In addition, Infisical prevents secrets leaks to git and enables secure
|
||||
sharing of secrets among engineers.
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that developers use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI. In addition, developers use Infisical to prevent secrets leaks to git and securely share secrets amongst engineers.
|
||||
|
||||
Start managing secrets securely with [Infisical Cloud](https://app.infisical.com) or learn how to [host Infisical](/self-hosting/overview) yourself.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card
|
||||
title="Infisical Cloud"
|
||||
href="https://app.infisical.com/signup"
|
||||
icon="cloud"
|
||||
color="#000000"
|
||||
>
|
||||
Get started with Infisical Cloud in just a few minutes.
|
||||
</Card>
|
||||
<Card
|
||||
href="/self-hosting/overview"
|
||||
title="Self-hosting"
|
||||
icon="server"
|
||||
color="#000000"
|
||||
>
|
||||
Self-host Infisical on your own infrastructure.
|
||||
</Card>
|
||||
<Card
|
||||
title="Infisical Cloud"
|
||||
href="https://app.infisical.com/signup"
|
||||
icon="cloud"
|
||||
color="#000000"
|
||||
>
|
||||
Get started with Infisical Cloud in just a few minutes.
|
||||
</Card>
|
||||
<Card
|
||||
href="/self-hosting/overview"
|
||||
title="Self-hosting"
|
||||
icon="server"
|
||||
color="#000000"
|
||||
>
|
||||
Self-host Infisical on your own infrastructure.
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Why Infisical?
|
||||
## Why Infisical?
|
||||
|
||||
Infisical helps developers achieve secure centralized secret management and provides all the tools to easily manage secrets in various environments and infrastructure components. In particular, here are some of the most common points that developers mention after adopting Infisical:
|
||||
|
||||
Infisical helps developers achieve secure centralized secret management and provides all the tools to easily manage secrets in various environments and infrastructure components. In particular, here are some of the most common points that developers mention after adopting Infisical:
|
||||
- Streamlined **local development** processes (switching .env files to [Infisical CLI](/cli/commands/run) and removing secrets from developer machines).
|
||||
- **Best-in-class developer experience** with an easy-to-use [Web Dashboard](/documentation/platform/project).
|
||||
- Simple secret management inside **[CI/CD pipelines](/integrations/cicd/githubactions)** and staging environments.
|
||||
- Secure and compliant secret management practices in **[production environments](/sdks/overview)**.
|
||||
- **Best-in-class developer experience** with an easy-to-use [Web Dashboard](/documentation/platform/project).
|
||||
- Simple secret management inside **[CI/CD pipelines](/integrations/cicd/githubactions)** and staging environments.
|
||||
- Secure and compliant secret management practices in **[production environments](/sdks/overview)**.
|
||||
- **Facilitated workflows** around [secret change management](/documentation/platform/pr-workflows), [access requests](/documentation/platform/access-controls/access-requests), [temporary access provisioning](/documentation/platform/access-controls/temporary-access), and more.
|
||||
- **Improved security posture** thanks to [secret scanning](/cli/scanning-overview), [granular access control policies](/documentation/platform/access-controls/overview), [automated secret rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview), and [dynamic secrets](/documentation/platform/dynamic-secrets/overview) capabilities.
|
||||
|
||||
## How does Infisical work?
|
||||
## How does Infisical work?
|
||||
|
||||
To make secret management effortless and secure, Infisical follows a certain structure for enabling secret management workflows as defined below.
|
||||
To make secret management effortless and secure, Infisical follows a certain structure for enabling secret management workflows as defined below.
|
||||
|
||||
**Identities** in Infisical are users or machine which have a certain set of roles and permissions assigned to them. Such identities are able to manage secrets in various **Clients** throughout the entire infrastructure. To do that, identities have to verify themselves through one of the available **Authentication Methods**.
|
||||
**Identities** in Infisical are users or machine which have a certain set of roles and permissions assigned to them. Such identities are able to manage secrets in various **Clients** throughout the entire infrastructure. To do that, identities have to verify themselves through one of the available **Authentication Methods**.
|
||||
|
||||
As a result, the 3 main concepts that are important to understand are:
|
||||
- **[Identities](/documentation/platform/identities/overview)**: users or machines with a set permissions assigned to them.
|
||||
As a result, the 3 main concepts that are important to understand are:
|
||||
|
||||
- **[Identities](/documentation/platform/identities/overview)**: users or machines with a set permissions assigned to them.
|
||||
- **[Clients](/integrations/platforms/kubernetes)**: Infisical-developed tools for managing secrets in various infrastructure components (e.g., [Kubernetes Operator](/integrations/platforms/kubernetes), [Infisical Agent](/integrations/platforms/infisical-agent), [CLI](/cli/usage), [SDKs](/sdks/overview), [API](/api-reference/overview/introduction), [Web Dashboard](/documentation/platform/organization)).
|
||||
- **[Authentication Methods](/documentation/platform/identities/universal-auth)**: ways for Identities to authenticate inside different clients (e.g., SAML SSO for Web Dashboard, Universal Auth for Infisical Agent, etc.).
|
||||
|
||||
## How to get started with Infisical?
|
||||
## How to get started with Infisical?
|
||||
|
||||
Depending on your use case, it might be helpful to look into some of the resources and guides provided below.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card href="../../cli/overview" title="Command Line Interface (CLI)" icon="square-terminal" color="#000000">
|
||||
<Card
|
||||
href="../../cli/overview"
|
||||
title="Command Line Interface (CLI)"
|
||||
icon="square-terminal"
|
||||
color="#000000"
|
||||
>
|
||||
Inject secrets into any application process/environment.
|
||||
</Card>
|
||||
<Card
|
||||
@@ -67,7 +71,12 @@ Depending on your use case, it might be helpful to look into some of the resourc
|
||||
>
|
||||
Fetch secrets with any programming language on demand.
|
||||
</Card>
|
||||
<Card href="../../integrations/platforms/docker-intro" title="Docker" icon="docker" color="#000000">
|
||||
<Card
|
||||
href="../../integrations/platforms/docker-intro"
|
||||
title="Docker"
|
||||
icon="docker"
|
||||
color="#000000"
|
||||
>
|
||||
Inject secrets into Docker containers.
|
||||
</Card>
|
||||
<Card
|
||||
|
4
docs/api-reference/endpoints/secret-tags/get-by-id.mdx
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get By ID"
|
||||
openapi: "GET /api/v1/workspace/{projectId}/tags/{tagId}"
|
||||
---
|
4
docs/api-reference/endpoints/secret-tags/get-by-slug.mdx
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get By Slug"
|
||||
openapi: "GET /api/v1/workspace/{projectId}/tags/slug/{tagSlug}"
|
||||
---
|
4
docs/api-reference/endpoints/secret-tags/update.mdx
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v1/workspace/{projectId}/tags/{tagId}"
|
||||
---
|
@@ -4,6 +4,25 @@ title: "Changelog"
|
||||
|
||||
The changelog below reflects new product developments and updates on a monthly basis.
|
||||
|
||||
## May 2024
|
||||
- Released [AWS](https://infisical.com/docs/documentation/platform/identities/aws-auth), [GCP](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure](https://infisical.com/docs/documentation/platform/identities/azure-auth), and [Kubernetes](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth) Native Auth Methods.
|
||||
- Added [Secret Sharing](https://infisical.com/docs/documentation/platform/secret-sharing) functionality for sharing sensitive data through encrypted links – within and outside of an organization.
|
||||
- Updated [Secret Referencing](https://infisical.com/docs/documentation/platform/secret-reference) to be supported in all Infisical clients. Infisical UI is now able to provide automatic reference suggestions when typing.
|
||||
- Released new [Infisical Jenkins Plugin](https://infisical.com/docs/integrations/cicd/jenkins).
|
||||
- Added statuses and manual sync option to integrations in the Dashboard UI.
|
||||
- Released universal [Audit Log Streaming](https://infisical.com/docs/documentation/platform/audit-log-streams).
|
||||
- Added [Dynamic Secret template for AWS IAM](https://infisical.com/docs/documentation/platform/dynamic-secrets/aws-iam).
|
||||
- Added support for syncing tags and custom KMS keys to [AWS Secrets Manager](https://infisical.com/docs/integrations/cloud/aws-secret-manager) and [Parameter Store](https://infisical.com/docs/integrations/cloud/aws-parameter-store) Integrations.
|
||||
- Officially released Infisical on [AWS Marketplace](https://infisical.com/blog/infisical-launches-on-aws-marketplace).
|
||||
|
||||
## April 2024
|
||||
- Added [Access Requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests) as part of self-serve secrets management workflows.
|
||||
- Added [Temporary Access Provisioning](https://infisical.com/docs/documentation/platform/access-controls/temporary-access) for roles and additional privileges.
|
||||
|
||||
## March 2024
|
||||
- Released support for [Dynamic Secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview).
|
||||
- Released the concept of [Additional Privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges) on top of user/machine roles.
|
||||
|
||||
## Feb 2024
|
||||
- Added org-scoped authentication enforcement for SAML
|
||||
- Added support for [SCIM](https://infisical.com/docs/documentation/platform/scim/overview) along with instructions for setting it up with [Okta](https://infisical.com/docs/documentation/platform/scim/okta), [Azure](https://infisical.com/docs/documentation/platform/scim/azure), and [JumpCloud](https://infisical.com/docs/documentation/platform/scim/jumpcloud).
|
||||
|
@@ -7,32 +7,38 @@ description: "Login into Infisical from the CLI"
|
||||
infisical login
|
||||
```
|
||||
|
||||
## Description
|
||||
### Description
|
||||
The CLI uses authentication to verify your identity. When you enter the correct email and password for your account, a token is generated and saved in your system Keyring to allow you to make future interactions with the CLI.
|
||||
|
||||
To change where the login credentials are stored, visit the [vaults command](./vault).
|
||||
|
||||
If you have added multiple users, you can switch between the users by using the [user command](./user).
|
||||
|
||||
<Info>
|
||||
When you authenticate with **any other method than `user`**, an access token will be printed to the console upon successful login. This token can be used to authenticate with the Infisical API and the CLI by passing it in the `--token` flag when applicable.
|
||||
|
||||
Use flag `--plain` along with `--silent` to print only the token in plain text when using a machine identity auth method.
|
||||
|
||||
</Info>
|
||||
|
||||
|
||||
### Flags
|
||||
The login command supports a number of flags that you can use for different authentication methods. Below is a list of all the flags that can be used with the login command.
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="--method">
|
||||
```bash
|
||||
infisical login --method=<auth-method> # Optional, will default to 'user'.
|
||||
```
|
||||
|
||||
#### Valid values for the `method` flag are:
|
||||
- `user`: Login using email and password.
|
||||
- `user`: Login using email and password. (default)
|
||||
- `universal-auth`: Login using a universal auth client ID and client secret.
|
||||
|
||||
<Info>
|
||||
When `method` is set to `universal-auth`, the `client-id` and `client-secret` flags are required. Optionally you can set the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` and `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variables instead of using the flags.
|
||||
|
||||
When you authenticate with universal auth, an access token will be printed to the console upon successful login. This token can be used to authenticate with the Infisical API and the CLI by passing it in the `--token` flag when applicable.
|
||||
|
||||
Use flag `--plain` along with `--silent` to print only the token in plain text when using the `universal-auth` method.
|
||||
|
||||
</Info>
|
||||
- `kubernetes`: Login using a Kubernetes native auth.
|
||||
- `azure`: Login using an Azure native auth.
|
||||
- `gcp-id-token`: Login using a GCP ID token native auth.
|
||||
- `gcp-iam`: Login using a GCP IAM.
|
||||
- `aws-iam`: Login using an AWS IAM native auth.
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--client-id">
|
||||
@@ -41,7 +47,7 @@ If you have added multiple users, you can switch between the users by using the
|
||||
```
|
||||
|
||||
#### Description
|
||||
The client ID of the universal auth client. This is required if the `--method` flag is set to `universal-auth`.
|
||||
The client ID of the universal auth machine identity. This is required if the `--method` flag is set to `universal-auth`.
|
||||
|
||||
<Tip>
|
||||
The `client-id` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` environment variable.
|
||||
@@ -52,13 +58,245 @@ If you have added multiple users, you can switch between the users by using the
|
||||
infisical login --client-secret=<client-secret> # Optional, required if --method=universal-auth.
|
||||
```
|
||||
#### Description
|
||||
The client secret of the universal auth client. This is required if the `--method` flag is set to `universal-auth`.
|
||||
The client secret of the universal auth machine identity. This is required if the `--method` flag is set to `universal-auth`.
|
||||
|
||||
<Tip>
|
||||
The `client-secret` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variable.
|
||||
</Tip>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--machine-identity-id">
|
||||
```bash
|
||||
infisical login --machine-identity-id=<your-machine-identity-id> # Optional, required if --method=kubernetes, azure, gcp-id-token, gcp-iam, or aws-iam.
|
||||
```
|
||||
|
||||
#### Description
|
||||
The ID of the machine identity. This is required if the `--method` flag is set to `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, or `aws-iam`.
|
||||
|
||||
<Tip>
|
||||
The `machine-identity-id` flag can be substituted with the `INFISICAL_MACHINE_IDENTITY_ID` environment variable.
|
||||
</Tip>
|
||||
</Accordion>
|
||||
<Accordion title="--service-account-token-path">
|
||||
```bash
|
||||
infisical login --service-account-token-path=<service-account-token-path> # Optional Will default to '/var/run/secrets/kubernetes.io/serviceaccount/token'.
|
||||
```
|
||||
|
||||
#### Description
|
||||
The path to the Kubernetes service account token to use for authentication.
|
||||
This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
|
||||
<Tip>
|
||||
The `service-account-token-path` flag can be substituted with the `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH` environment variable.
|
||||
</Tip>
|
||||
</Accordion>
|
||||
<Accordion title="--service-account-key-file-path">
|
||||
```bash
|
||||
infisical login --service-account-key-file-path=<gcp-service-account-key-file-path> # Optional, but required if --method=gcp-iam.
|
||||
```
|
||||
|
||||
#### Description
|
||||
The path to your GCP service account key file. This is required if the `--method` flag is set to `gcp-iam`.
|
||||
|
||||
<Tip>
|
||||
The `service-account-key-path` flag can be substituted with the `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` environment variable.
|
||||
</Tip>
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
|
||||
|
||||
### Authentication Methods
|
||||
|
||||
The Infisical CLI supports multiple authentication methods. Below are the available authentication methods, with their respective flags.
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Universal Auth">
|
||||
The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="client-id" type="string" required>
|
||||
Your machine identity client ID.
|
||||
</ParamField>
|
||||
<ParamField query="client-secret" type="string" required>
|
||||
Your machine identity client secret.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a universal auth machine identity">
|
||||
To create a universal auth machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/universal-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=universal-auth --client-id=<client-id> --client-secret=<client-secret>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Native Kubernetes">
|
||||
The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
<ParamField query="service-account-token-path" type="string" optional>
|
||||
Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a Kubernetes machine identity">
|
||||
To create a Kubernetes machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/kubernetes-auth).
|
||||
</Step>
|
||||
<Step title="Obtain access an token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
# --service-account-token-path is optional, and will default to '/var/run/secrets/kubernetes.io/serviceaccount/token' if not provided.
|
||||
infisical login --method=kubernetes --machine-identity-id=<machine-identity-id> --service-account-token-path=<service-account-token-path>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="Native Azure">
|
||||
The Native Azure method is used to authenticate with Infisical when running in an Azure environment.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create an Azure machine identity">
|
||||
To create an Azure machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/azure-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=azure --machine-identity-id=<machine-identity-id>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="Native GCP ID Token">
|
||||
The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a GCP machine identity">
|
||||
To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=gcp-id-token --machine-identity-id=<machine-identity-id>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="GCP IAM">
|
||||
The GCP IAM method is used to authenticate with Infisical with a GCP service account key.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
<ParamField query="service-account-key-file-path" type="string" required>
|
||||
Path to your GCP service account key file _(Must be in JSON format!)_
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a GCP machine identity">
|
||||
To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=gcp-iam --machine-identity-id=<machine-identity-id> --service-account-key-file-path=<service-account-key-file-path>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Native AWS IAM">
|
||||
The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create an AWS machine identity">
|
||||
To create an AWS machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/aws-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=aws-iam --machine-identity-id=<machine-identity-id>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
### Machine Identity Authentication Quick Start
|
||||
In this example we'll be using the `universal-auth` method to login to obtain an Infisical access token, which we will then use to fetch secrets with.
|
||||
|
||||
<Steps>
|
||||
<Step title="Obtain an access token">
|
||||
```bash
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=<client-id> --client-secret=<client-secret> --silent --plain) # silent and plain is important to ensure only the token itself is printed, so we can easily set it as an environment variable.
|
||||
```
|
||||
|
||||
Now that we've set the `INFISICAL_TOKEN` environment variable, we can use the CLI to interact with Infisical. The CLI will automatically check for the presence of the `INFISICAL_TOKEN` environment variable and use it for authentication.
|
||||
|
||||
|
||||
Alternatively, if you would rather use the `--token` flag to pass the token directly, you can do so by running the following command:
|
||||
|
||||
```bash
|
||||
infisical [command] --token=<your-access-token> # The token output from the login command.
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Fetch all secrets from an evironment">
|
||||
```bash
|
||||
infisical secrets --projectId=<your-project-id --env=dev --recursive
|
||||
```
|
||||
|
||||
This command will fetch all secrets from the `dev` environment in your project, including all secrets in subfolders.
|
||||
|
||||
<Info>
|
||||
The `--recursive`, and `--env` flag is optional and will fetch all secrets in subfolders. The default environment is `dev` if no `--env` flag is provided.
|
||||
</Info>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
And that's it! Now you're ready to start using the Infisical CLI to interact with your secrets, with the use of Machine Identities.
|
||||
|
@@ -4,10 +4,7 @@ sidebarTitle: "What is Infisical?"
|
||||
description: "An Introduction to the Infisical secret management platform."
|
||||
---
|
||||
|
||||
Infisical is an [open-source](https://github.com/infisical/infisical) secret management platform for developers.
|
||||
It provides capabilities for storing, managing, and syncing application configuration and secrets like API keys, database
|
||||
credentials, and certificates across infrastructure. In addition, Infisical prevents secrets leaks to git and enables secure
|
||||
sharing of secrets among engineers.
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that developers use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI. Additionally, developers use Infisical to prevent secrets leaks to git and securely share secrets amongst engineers.
|
||||
|
||||
Start managing secrets securely with [Infisical Cloud](https://app.infisical.com) or learn how to [host Infisical](/self-hosting/overview) yourself.
|
||||
|
||||
|
@@ -12,14 +12,14 @@ From there, you can invite external members to the organization and start creati
|
||||
### Projects
|
||||
|
||||
The **Projects** page shows you all the projects that you have access to within your organization.
|
||||
Here, you can also create a new project.
|
||||
Here, you can also create a new project.
|
||||
|
||||

|
||||
|
||||
### Members
|
||||
|
||||
The **Members** page lets you add or remove external members to your organization.
|
||||
Note that you can configure your organization in Infisical to have members authenticate with the platform via protocols like SAML 2.0.
|
||||
The **Members** page lets you add or remove external members to your organization.
|
||||
Note that you can configure your organization in Infisical to have members authenticate with the platform via protocols like SAML 2.0 and OpenID Connect.
|
||||
|
||||

|
||||
|
||||
@@ -35,13 +35,14 @@ The **Secrets Overview** screen provides a bird's-eye view of all the secrets in
|
||||

|
||||
|
||||
In the above image, you can already see that:
|
||||
|
||||
- `STRIPE_API_KEY` is missing from the **Staging** environment.
|
||||
- `JWT_SECRET` is missing from the **Production** environment.
|
||||
- `BAR` is `EMPTY` in the **Production** environment.
|
||||
|
||||
### Dashboard
|
||||
|
||||
The secrets dashboard lets you manage secrets for a specific environment in a project.
|
||||
The secrets dashboard lets you manage secrets for a specific environment in a project.
|
||||
Here, developers can override secrets, version secrets, rollback projects to any point in time and much more.
|
||||
|
||||

|
||||
@@ -61,4 +62,4 @@ which you can assign to members.
|
||||
|
||||
That's it for the platform quickstart! — We encourage you to continue exploring the documentation to gain a deeper understanding of the extensive features and functionalities that Infisical has to offer.
|
||||
|
||||
Next, head back to [Getting Started > Introduction](/documentation/getting-started/overview) to explore ways to fetch secrets from Infisical to your apps and infrastructure.
|
||||
Next, head back to [Getting Started > Introduction](/documentation/getting-started/overview) to explore ways to fetch secrets from Infisical to your apps and infrastructure.
|
||||
|
@@ -26,13 +26,6 @@ A typical workflow for using identities consists of four steps:
|
||||
3. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back.
|
||||
4. Authenticating subsequent requests with the Infisical API using the short-lived access token.
|
||||
|
||||
<Note>
|
||||
Currently, identities can only be used to make authenticated requests to the Infisical API, SDKs, Terraform, Kubernetes Operator, and Infisical Agent. They do not work with clients such as CLI, Ansible look up plugin, etc.
|
||||
|
||||
Machine Identity support for the rest of the clients is planned to be released in the current quarter.
|
||||
|
||||
</Note>
|
||||
|
||||
## Authentication Methods
|
||||
|
||||
To interact with various resources in Infisical, Machine Identities are able to authenticate using:
|
||||
|
@@ -14,8 +14,6 @@ then you should contact sales@infisical.com to purchase an enterprise license to
|
||||
|
||||
You can configure your organization in Infisical to have members authenticate with the platform via [LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol).
|
||||
|
||||
To note, configuring LDAP retains the end-to-end encrypted nature of authentication in Infisical because we decouple the authentication and decryption steps; the LDAP server cannot and will not have access to the decryption key needed to decrypt your secrets.
|
||||
|
||||
LDAP providers:
|
||||
|
||||
- Active Directory
|
||||
|
@@ -21,20 +21,19 @@ The **Settings** page lets you manage information about your organization includ
|
||||
|
||||

|
||||
|
||||
|
||||
- Security and Authentication: A set of setting to enforce or manage [SAML](/documentation/platform/sso/overview), [SCIM](/documentation/platform/scim/overview), [LDAP](/documentation/platform/ldap/overview), and other authentication configurations.
|
||||
- Security and Authentication: A set of setting to enforce or manage [SAML](/documentation/platform/sso/overview), [OIDC](/documentation/platform/sso/overview), [SCIM](/documentation/platform/scim/overview), [LDAP](/documentation/platform/ldap/overview), and other authentication configurations.
|
||||
|
||||

|
||||
|
||||
## Access Control
|
||||
|
||||
The **Access Control** page is where you can manage identities (both people and machines) that are part of your organization.
|
||||
The **Access Control** page is where you can manage identities (both people and machines) that are part of your organization.
|
||||
You can add or remove additional members as well as modify their permissions.
|
||||
|
||||

|
||||

|
||||
|
||||
In the **Organization Roles** tab, you can edit current or create new custom roles for members within the organization.
|
||||
In the **Organization Roles** tab, you can edit current or create new custom roles for members within the organization.
|
||||
|
||||
<Info>
|
||||
Note that Role-Based Access Management (RBAC) is partly a paid feature.
|
||||
@@ -42,13 +41,14 @@ In the **Organization Roles** tab, you can edit current or create new custom rol
|
||||
Infisical provides immutable roles like `admin`, `member`, etc.
|
||||
at the organization and project level for free.
|
||||
|
||||
If you're using Infisical Cloud, the ability to create custom roles is available under the **Pro Tier**.
|
||||
If you're self-hosting Infisical, then you should contact sales@infisical.com to purchase an enterprise license to use it.
|
||||
If you're using Infisical Cloud, the ability to create custom roles is available under the **Pro Tier**.
|
||||
If you're self-hosting Infisical, then you should contact sales@infisical.com to purchase an enterprise license to use it.
|
||||
|
||||
</Info>
|
||||
|
||||

|
||||
|
||||
As you can see next, Infisical supports granular permissions that you can tailor to each role.
|
||||
As you can see next, Infisical supports granular permissions that you can tailor to each role.
|
||||
If you need certain members to only be able to access billing details, for example, then you can
|
||||
assign them that permission only.
|
||||
|
||||
@@ -66,4 +66,4 @@ This includes the following items:
|
||||
- Receipts: The receipts of monthly/annual invoices.
|
||||
- Billing: The billing details of your organization including payment methods on file, tax IDs (if applicable), etc.
|
||||
|
||||

|
||||

|
||||
|
66
docs/documentation/platform/sso/auth0-oidc.mdx
Normal file
@@ -0,0 +1,66 @@
|
||||
---
|
||||
title: "Auth0 OIDC"
|
||||
description: "Learn how to configure Auth0 OIDC for Infisical SSO."
|
||||
---
|
||||
|
||||
<Info>
|
||||
Auth0 OIDC SSO is a paid feature. If you're using Infisical Cloud, then it is
|
||||
available under the **Pro Tier**. If you're self-hosting Infisical, then you
|
||||
should contact sales@infisical.com to purchase an enterprise license to use
|
||||
it.
|
||||
</Info>
|
||||
|
||||
<Steps>
|
||||
<Step title="Setup application in Auth0">
|
||||
1.1. From the Application's Page, navigate to the settings tab of the Auth0 application you want to integrate with Infisical.
|
||||

|
||||
|
||||
1.2. In the Application URIs section, set the **Application Login URI** and **Allowed Web Origins** fields to `https://app.infisical.com` and the **Allowed Callback URL** field to `https://app.infisical.com/api/v1/sso/oidc/callback`.
|
||||

|
||||

|
||||
<Info>
|
||||
If you’re self-hosting Infisical, then you will want to replace https://app.infisical.com with your own domain.
|
||||
</Info>
|
||||
|
||||
Once done, click **Save Changes**.
|
||||
|
||||
1.3. Proceed to the Connections Tab and enable desired connections.
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Retrieve Identity Provider (IdP) Information from Auth0">
|
||||
2.1. From the application settings page, retrieve the **Client ID** and **Client Secret**
|
||||

|
||||
|
||||
2.2. In the advanced settings (bottom-most section), retrieve the **OpenID Configuration URL** from the Endpoints tab.
|
||||

|
||||
|
||||
Keep these values handy as we will need them in the next steps.
|
||||
|
||||
</Step>
|
||||
<Step title="Finish configuring OIDC in Infisical">
|
||||
3.1. Back in Infisical, in the Organization settings > Security > OIDC, click **Manage**.
|
||||

|
||||
|
||||
3.2. For configuration type, select **Discovery URL**. Then, set **Discovery Document URL**, **Client ID**, and **Client Secret** from step 2.1 and 2.2.
|
||||

|
||||
|
||||
Once you've done that, press **Update** to complete the required configuration.
|
||||
|
||||
</Step>
|
||||
<Step title="Enable OIDC in Infisical">
|
||||
Enabling OIDC allows members in your organization to log into Infisical via Auth0.
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
<Note>
|
||||
If you're configuring OIDC SSO on a self-hosted instance of Infisical, make
|
||||
sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to
|
||||
work: - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This
|
||||
can be a random 32-byte base64 string generated with `openssl rand -base64
|
||||
32`. - `SITE_URL`: The URL of your self-hosted instance of Infisical - should
|
||||
be an absolute URL including the protocol (e.g. https://app.infisical.com)
|
||||
</Note>
|
69
docs/documentation/platform/sso/general-oidc.mdx
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
title: "General OIDC"
|
||||
description: "Learn how to configure OIDC for Infisical SSO with any OIDC-compliant identity provider"
|
||||
---
|
||||
|
||||
<Info>
|
||||
OIDC SSO is a paid feature. If you're using Infisical Cloud, then it is
|
||||
available under the **Pro Tier**. If you're self-hosting Infisical, then you
|
||||
should contact sales@infisical.com to purchase an enterprise license to use
|
||||
it.
|
||||
</Info>
|
||||
|
||||
You can configure your organization in Infisical to have members authenticate with the platform through identity providers via [OpenID Connect](https://openid.net/specs/openid-connect-core-1_0.html).
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- The identity provider (Okta, Google, Azure AD, etc.) should support OIDC.
|
||||
- Users in the IdP should have a configured `email` and `given_name`.
|
||||
|
||||
<Steps>
|
||||
<Step title="Setup Identity Provider">
|
||||
1.1. Register your application with the IdP to obtain a **Client ID** and **Client Secret**. These credentials are used by Infisical to authenticate with your IdP.
|
||||
|
||||
1.2. Configure **Redirect URL** to be `https://app.infisical.com/api/v1/sso/oidc/callback`. If you're self-hosting Infisical, replace the domain with your own.
|
||||
|
||||
1.3. Configure the scopes needed by Infisical (email, profile, openid) and ensure that they are mapped to the ID token claims.
|
||||
|
||||
1.4. Access the IdP’s OIDC discovery document (usually located at `https://<idp-domain>/.well-known/openid-configuration`). This document contains important endpoints such as authorization, token, userinfo, and keys.
|
||||
</Step>
|
||||
<Step title="Finish configuring OIDC in Infisical">
|
||||
2.1. Back in Infisical, in the Organization settings > Security > OIDC, click Manage
|
||||

|
||||
|
||||
2.2. You can configure OIDC either through the Discovery URL (Recommended) or by inputting custom endpoints.
|
||||
|
||||
To configure OIDC via Discovery URL, set the **Configuration Type** field to **Discovery URL** and fill out the **Discovery Document URL** field.
|
||||
|
||||
<Note>
|
||||
Note that the Discovery Document URL typically takes the form: `https://<idp-domain>/.well-known/openid-configuration`.
|
||||
</Note>
|
||||
|
||||

|
||||
|
||||
To configure OIDC via the custom endpoints, set the **Configuration Type** field to **Custom** and input the required endpoint fields.
|
||||

|
||||
|
||||
2.3. Optionally, you can define a whitelist of allowed email domains.
|
||||
|
||||
Finally, fill out the **Client ID** and **Client Secret** fields and press **Update** to complete the required configuration.
|
||||
|
||||
</Step>
|
||||
|
||||
<Step title="Enable OIDC SSO in Infisical">
|
||||
Enabling OIDC SSO allows members in your organization to log into Infisical via the configured Identity Provider
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
|
||||
<Note>
|
||||
If you're configuring OIDC SSO on a self-hosted instance of Infisical, make
|
||||
sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to
|
||||
work: - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This
|
||||
can be a random 32-byte base64 string generated with `openssl rand -base64
|
||||
32`. - `SITE_URL`: The URL of your self-hosted instance of Infisical - should
|
||||
be an absolute URL including the protocol (e.g. https://app.infisical.com)
|
||||
</Note>
|
92
docs/documentation/platform/sso/keycloak-oidc.mdx
Normal file
@@ -0,0 +1,92 @@
|
||||
---
|
||||
title: "Keycloak OIDC"
|
||||
description: "Learn how to configure Keycloak OIDC for Infisical SSO."
|
||||
---
|
||||
|
||||
<Info>
|
||||
Keycloak OIDC SSO is a paid feature. If you're using Infisical Cloud, then it
|
||||
is available under the **Pro Tier**. If you're self-hosting Infisical, then
|
||||
you should contact sales@infisical.com to purchase an enterprise license to
|
||||
use it.
|
||||
</Info>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create an OIDC client application in Keycloak">
|
||||
1.1. In your realm, navigate to the **Clients** tab and click **Create client** to create a new client application.
|
||||
|
||||

|
||||
|
||||
<Info>
|
||||
You don’t typically need to make a realm dedicated to Infisical. We recommend adding Infisical as a client to your primary realm.
|
||||
</Info>
|
||||
|
||||
1.2. In the General Settings step, set **Client type** to **OpenID Connect**, the **Client ID** field to an appropriate identifier, and the **Name** field to a friendly name like **Infisical**.
|
||||
|
||||

|
||||
|
||||
1.3. Next, in the Capability Config step, ensure that **Client Authentication** is set to On and that **Standard flow** is enabled in the Authentication flow section.
|
||||
|
||||

|
||||
|
||||
1.4. In the Login Settings step, set the following values:
|
||||
- Root URL: `https://app.infisical.com`.
|
||||
- Home URL: `https://app.infisical.com`.
|
||||
- Valid Redirect URIs: `https://app.infisical.com/api/v1/sso/oidc/callback`.
|
||||
- Web origins: `https://app.infisical.com`.
|
||||
|
||||

|
||||
<Info>
|
||||
If you’re self-hosting Infisical, then you will want to replace https://app.infisical.com (base URL) with your own domain.
|
||||
</Info>
|
||||
|
||||
1.5. Next, navigate to the **Client scopes** tab and select the client's dedicated scope.
|
||||
|
||||

|
||||
|
||||
1.6. Next, click **Add predefined mapper**.
|
||||
|
||||

|
||||
|
||||
1.7. Select the **email**, **given name**, **family name** attributes and click **Add**.
|
||||
|
||||

|
||||

|
||||
|
||||
Once you've completed the above steps, the list of mappers should look like the following:
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Retrieve Identity Provider (IdP) Information from Keycloak">
|
||||
2.1. Back in Keycloak, navigate to Configure > Realm settings > General tab > Endpoints > OpenID Endpoint Configuration and copy the opened URL. This is what is to referred to as the Discovery Document URL and it takes the form: `https://keycloak-mysite.com/realms/myrealm/.well-known/openid-configuration`.
|
||||

|
||||
|
||||
2.2. From the Clients page, navigate to the Credential tab and copy the **Client Secret** to be used in the next steps.
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Finish configuring OIDC in Infisical">
|
||||
3.1. Back in Infisical, in the Organization settings > Security > OIDC, click Manage
|
||||

|
||||
|
||||
3.2. For configuration type, select Discovery URL. Then, set the appropriate values for **Discovery Document URL**, **Client ID**, and **Client Secret**.
|
||||

|
||||
|
||||
Once you've done that, press **Update** to complete the required configuration.
|
||||
|
||||
</Step>
|
||||
<Step title="Enable OIDC SSO in Infisical">
|
||||
Enabling OIDC SSO allows members in your organization to log into Infisical via Keycloak.
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
<Note>
|
||||
If you're configuring OIDC SSO on a self-hosted instance of Infisical, make
|
||||
sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to
|
||||
work: - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This
|
||||
can be a random 32-byte base64 string generated with `openssl rand -base64
|
||||
32`. - `SITE_URL`: The URL of your self-hosted instance of Infisical - should
|
||||
be an absolute URL including the protocol (e.g. https://app.infisical.com)
|
||||
</Note>
|
@@ -7,16 +7,14 @@ description: "Learn how to log in to Infisical via SSO protocols."
|
||||
<Info>
|
||||
Infisical offers Google SSO and GitHub SSO for free across both Infisical
|
||||
Cloud and Infisical Self-hosted. Infisical also offers SAML SSO authentication
|
||||
but as paid features that can be unlocked on Infisical Cloud's **Pro** tier or
|
||||
via enterprise license on self-hosted instances of Infisical. On this front,
|
||||
we support industry-leading providers including Okta, Azure AD, and JumpCloud;
|
||||
with any questions, please reach out to team@infisical.com.
|
||||
and OpenID Connect (OIDC) but as paid features that can be unlocked on
|
||||
Infisical Cloud's **Pro** tier or via enterprise license on self-hosted
|
||||
instances of Infisical. On this front, we support industry-leading providers
|
||||
including Okta, Azure AD, and JumpCloud; with any questions, please reach out
|
||||
to team@infisical.com.
|
||||
</Info>
|
||||
|
||||
You can configure your organization in Infisical to have members authenticate with the platform via protocols like [SAML 2.0](https://en.wikipedia.org/wiki/SAML_2.0).
|
||||
|
||||
To note, Infisical's SSO implementation decouples the **authentication** and **decryption** steps – which implies that no
|
||||
Identity Provider can have access to the decryption key needed to decrypt your secrets (this also implies that Infisical requires entering the user's Master Password on top of authenticating with SSO).
|
||||
You can configure your organization in Infisical to have members authenticate with the platform via protocols like [SAML 2.0](https://en.wikipedia.org/wiki/SAML_2.0) or [OpenID Connect](https://openid.net/specs/openid-connect-core-1_0.html).
|
||||
|
||||
## Identity providers
|
||||
|
||||
@@ -30,6 +28,9 @@ Infisical supports these and many other identity providers:
|
||||
- [JumpCloud SAML](/documentation/platform/sso/jumpcloud)
|
||||
- [Keycloak SAML](/documentation/platform/sso/keycloak-saml)
|
||||
- [Google SAML](/documentation/platform/sso/google-saml)
|
||||
- [Keycloak OIDC](/documentation/platform/sso/keycloak-oidc)
|
||||
- [Auth0 OIDC](/documentation/platform/sso/auth0-oidc)
|
||||
- [General OIDC](/documentation/platform/sso/general-oidc)
|
||||
|
||||
If your required identity provider is not shown in the list above, please reach out to [team@infisical.com](mailto:team@infisical.com) for assistance.
|
||||
|
||||
|
BIN
docs/images/sso/auth0-oidc/application-connections.png
Normal file
After Width: | Height: | Size: 339 KiB |
BIN
docs/images/sso/auth0-oidc/application-credential.png
Normal file
After Width: | Height: | Size: 318 KiB |
BIN
docs/images/sso/auth0-oidc/application-origin.png
Normal file
After Width: | Height: | Size: 449 KiB |
BIN
docs/images/sso/auth0-oidc/application-settings.png
Normal file
After Width: | Height: | Size: 318 KiB |
BIN
docs/images/sso/auth0-oidc/application-uris.png
Normal file
After Width: | Height: | Size: 425 KiB |
BIN
docs/images/sso/auth0-oidc/application-urls.png
Normal file
After Width: | Height: | Size: 358 KiB |
BIN
docs/images/sso/auth0-oidc/enable-oidc.png
Normal file
After Width: | Height: | Size: 754 KiB |